##// END OF EJS Templates
ignore: remove .hgignore from ignore list if nonexistent...
Durham Goode -
r25163:3139900f default
parent child Browse files
Show More
@@ -1,979 +1,981 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import scmutil, util, ignore, osutil, parsers, encoding, pathutil
10 import scmutil, util, ignore, osutil, parsers, encoding, pathutil
11 import os, stat, errno
11 import os, stat, errno
12
12
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14 filecache = scmutil.filecache
14 filecache = scmutil.filecache
15 _rangemask = 0x7fffffff
15 _rangemask = 0x7fffffff
16
16
17 dirstatetuple = parsers.dirstatetuple
17 dirstatetuple = parsers.dirstatetuple
18
18
19 class repocache(filecache):
19 class repocache(filecache):
20 """filecache for files in .hg/"""
20 """filecache for files in .hg/"""
21 def join(self, obj, fname):
21 def join(self, obj, fname):
22 return obj._opener.join(fname)
22 return obj._opener.join(fname)
23
23
24 class rootcache(filecache):
24 class rootcache(filecache):
25 """filecache for files in the repository root"""
25 """filecache for files in the repository root"""
26 def join(self, obj, fname):
26 def join(self, obj, fname):
27 return obj._join(fname)
27 return obj._join(fname)
28
28
29 class dirstate(object):
29 class dirstate(object):
30
30
31 def __init__(self, opener, ui, root, validate):
31 def __init__(self, opener, ui, root, validate):
32 '''Create a new dirstate object.
32 '''Create a new dirstate object.
33
33
34 opener is an open()-like callable that can be used to open the
34 opener is an open()-like callable that can be used to open the
35 dirstate file; root is the root of the directory tracked by
35 dirstate file; root is the root of the directory tracked by
36 the dirstate.
36 the dirstate.
37 '''
37 '''
38 self._opener = opener
38 self._opener = opener
39 self._validate = validate
39 self._validate = validate
40 self._root = root
40 self._root = root
41 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
41 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
42 # UNC path pointing to root share (issue4557)
42 # UNC path pointing to root share (issue4557)
43 self._rootdir = pathutil.normasprefix(root)
43 self._rootdir = pathutil.normasprefix(root)
44 self._dirty = False
44 self._dirty = False
45 self._dirtypl = False
45 self._dirtypl = False
46 self._lastnormaltime = 0
46 self._lastnormaltime = 0
47 self._ui = ui
47 self._ui = ui
48 self._filecache = {}
48 self._filecache = {}
49 self._parentwriters = 0
49 self._parentwriters = 0
50
50
51 def beginparentchange(self):
51 def beginparentchange(self):
52 '''Marks the beginning of a set of changes that involve changing
52 '''Marks the beginning of a set of changes that involve changing
53 the dirstate parents. If there is an exception during this time,
53 the dirstate parents. If there is an exception during this time,
54 the dirstate will not be written when the wlock is released. This
54 the dirstate will not be written when the wlock is released. This
55 prevents writing an incoherent dirstate where the parent doesn't
55 prevents writing an incoherent dirstate where the parent doesn't
56 match the contents.
56 match the contents.
57 '''
57 '''
58 self._parentwriters += 1
58 self._parentwriters += 1
59
59
60 def endparentchange(self):
60 def endparentchange(self):
61 '''Marks the end of a set of changes that involve changing the
61 '''Marks the end of a set of changes that involve changing the
62 dirstate parents. Once all parent changes have been marked done,
62 dirstate parents. Once all parent changes have been marked done,
63 the wlock will be free to write the dirstate on release.
63 the wlock will be free to write the dirstate on release.
64 '''
64 '''
65 if self._parentwriters > 0:
65 if self._parentwriters > 0:
66 self._parentwriters -= 1
66 self._parentwriters -= 1
67
67
68 def pendingparentchange(self):
68 def pendingparentchange(self):
69 '''Returns true if the dirstate is in the middle of a set of changes
69 '''Returns true if the dirstate is in the middle of a set of changes
70 that modify the dirstate parent.
70 that modify the dirstate parent.
71 '''
71 '''
72 return self._parentwriters > 0
72 return self._parentwriters > 0
73
73
74 @propertycache
74 @propertycache
75 def _map(self):
75 def _map(self):
76 '''Return the dirstate contents as a map from filename to
76 '''Return the dirstate contents as a map from filename to
77 (state, mode, size, time).'''
77 (state, mode, size, time).'''
78 self._read()
78 self._read()
79 return self._map
79 return self._map
80
80
81 @propertycache
81 @propertycache
82 def _copymap(self):
82 def _copymap(self):
83 self._read()
83 self._read()
84 return self._copymap
84 return self._copymap
85
85
86 @propertycache
86 @propertycache
87 def _filefoldmap(self):
87 def _filefoldmap(self):
88 try:
88 try:
89 makefilefoldmap = parsers.make_file_foldmap
89 makefilefoldmap = parsers.make_file_foldmap
90 except AttributeError:
90 except AttributeError:
91 pass
91 pass
92 else:
92 else:
93 return makefilefoldmap(self._map, util.normcasespec,
93 return makefilefoldmap(self._map, util.normcasespec,
94 util.normcasefallback)
94 util.normcasefallback)
95
95
96 f = {}
96 f = {}
97 normcase = util.normcase
97 normcase = util.normcase
98 for name, s in self._map.iteritems():
98 for name, s in self._map.iteritems():
99 if s[0] != 'r':
99 if s[0] != 'r':
100 f[normcase(name)] = name
100 f[normcase(name)] = name
101 f['.'] = '.' # prevents useless util.fspath() invocation
101 f['.'] = '.' # prevents useless util.fspath() invocation
102 return f
102 return f
103
103
104 @propertycache
104 @propertycache
105 def _dirfoldmap(self):
105 def _dirfoldmap(self):
106 f = {}
106 f = {}
107 normcase = util.normcase
107 normcase = util.normcase
108 for name in self._dirs:
108 for name in self._dirs:
109 f[normcase(name)] = name
109 f[normcase(name)] = name
110 return f
110 return f
111
111
112 @repocache('branch')
112 @repocache('branch')
113 def _branch(self):
113 def _branch(self):
114 try:
114 try:
115 return self._opener.read("branch").strip() or "default"
115 return self._opener.read("branch").strip() or "default"
116 except IOError, inst:
116 except IOError, inst:
117 if inst.errno != errno.ENOENT:
117 if inst.errno != errno.ENOENT:
118 raise
118 raise
119 return "default"
119 return "default"
120
120
121 @propertycache
121 @propertycache
122 def _pl(self):
122 def _pl(self):
123 try:
123 try:
124 fp = self._opener("dirstate")
124 fp = self._opener("dirstate")
125 st = fp.read(40)
125 st = fp.read(40)
126 fp.close()
126 fp.close()
127 l = len(st)
127 l = len(st)
128 if l == 40:
128 if l == 40:
129 return st[:20], st[20:40]
129 return st[:20], st[20:40]
130 elif l > 0 and l < 40:
130 elif l > 0 and l < 40:
131 raise util.Abort(_('working directory state appears damaged!'))
131 raise util.Abort(_('working directory state appears damaged!'))
132 except IOError, err:
132 except IOError, err:
133 if err.errno != errno.ENOENT:
133 if err.errno != errno.ENOENT:
134 raise
134 raise
135 return [nullid, nullid]
135 return [nullid, nullid]
136
136
137 @propertycache
137 @propertycache
138 def _dirs(self):
138 def _dirs(self):
139 return util.dirs(self._map, 'r')
139 return util.dirs(self._map, 'r')
140
140
141 def dirs(self):
141 def dirs(self):
142 return self._dirs
142 return self._dirs
143
143
144 @rootcache('.hgignore')
144 @rootcache('.hgignore')
145 def _ignore(self):
145 def _ignore(self):
146 files = [self._join('.hgignore')]
146 files = []
147 if os.path.exists(self._join('.hgignore')):
148 files.append(self._join('.hgignore'))
147 for name, path in self._ui.configitems("ui"):
149 for name, path in self._ui.configitems("ui"):
148 if name == 'ignore' or name.startswith('ignore.'):
150 if name == 'ignore' or name.startswith('ignore.'):
149 # we need to use os.path.join here rather than self._join
151 # we need to use os.path.join here rather than self._join
150 # because path is arbitrary and user-specified
152 # because path is arbitrary and user-specified
151 files.append(os.path.join(self._rootdir, util.expandpath(path)))
153 files.append(os.path.join(self._rootdir, util.expandpath(path)))
152 return ignore.ignore(self._root, files, self._ui.warn)
154 return ignore.ignore(self._root, files, self._ui.warn)
153
155
154 @propertycache
156 @propertycache
155 def _slash(self):
157 def _slash(self):
156 return self._ui.configbool('ui', 'slash') and os.sep != '/'
158 return self._ui.configbool('ui', 'slash') and os.sep != '/'
157
159
158 @propertycache
160 @propertycache
159 def _checklink(self):
161 def _checklink(self):
160 return util.checklink(self._root)
162 return util.checklink(self._root)
161
163
162 @propertycache
164 @propertycache
163 def _checkexec(self):
165 def _checkexec(self):
164 return util.checkexec(self._root)
166 return util.checkexec(self._root)
165
167
166 @propertycache
168 @propertycache
167 def _checkcase(self):
169 def _checkcase(self):
168 return not util.checkcase(self._join('.hg'))
170 return not util.checkcase(self._join('.hg'))
169
171
170 def _join(self, f):
172 def _join(self, f):
171 # much faster than os.path.join()
173 # much faster than os.path.join()
172 # it's safe because f is always a relative path
174 # it's safe because f is always a relative path
173 return self._rootdir + f
175 return self._rootdir + f
174
176
175 def flagfunc(self, buildfallback):
177 def flagfunc(self, buildfallback):
176 if self._checklink and self._checkexec:
178 if self._checklink and self._checkexec:
177 def f(x):
179 def f(x):
178 try:
180 try:
179 st = os.lstat(self._join(x))
181 st = os.lstat(self._join(x))
180 if util.statislink(st):
182 if util.statislink(st):
181 return 'l'
183 return 'l'
182 if util.statisexec(st):
184 if util.statisexec(st):
183 return 'x'
185 return 'x'
184 except OSError:
186 except OSError:
185 pass
187 pass
186 return ''
188 return ''
187 return f
189 return f
188
190
189 fallback = buildfallback()
191 fallback = buildfallback()
190 if self._checklink:
192 if self._checklink:
191 def f(x):
193 def f(x):
192 if os.path.islink(self._join(x)):
194 if os.path.islink(self._join(x)):
193 return 'l'
195 return 'l'
194 if 'x' in fallback(x):
196 if 'x' in fallback(x):
195 return 'x'
197 return 'x'
196 return ''
198 return ''
197 return f
199 return f
198 if self._checkexec:
200 if self._checkexec:
199 def f(x):
201 def f(x):
200 if 'l' in fallback(x):
202 if 'l' in fallback(x):
201 return 'l'
203 return 'l'
202 if util.isexec(self._join(x)):
204 if util.isexec(self._join(x)):
203 return 'x'
205 return 'x'
204 return ''
206 return ''
205 return f
207 return f
206 else:
208 else:
207 return fallback
209 return fallback
208
210
209 @propertycache
211 @propertycache
210 def _cwd(self):
212 def _cwd(self):
211 return os.getcwd()
213 return os.getcwd()
212
214
213 def getcwd(self):
215 def getcwd(self):
214 cwd = self._cwd
216 cwd = self._cwd
215 if cwd == self._root:
217 if cwd == self._root:
216 return ''
218 return ''
217 # self._root ends with a path separator if self._root is '/' or 'C:\'
219 # self._root ends with a path separator if self._root is '/' or 'C:\'
218 rootsep = self._root
220 rootsep = self._root
219 if not util.endswithsep(rootsep):
221 if not util.endswithsep(rootsep):
220 rootsep += os.sep
222 rootsep += os.sep
221 if cwd.startswith(rootsep):
223 if cwd.startswith(rootsep):
222 return cwd[len(rootsep):]
224 return cwd[len(rootsep):]
223 else:
225 else:
224 # we're outside the repo. return an absolute path.
226 # we're outside the repo. return an absolute path.
225 return cwd
227 return cwd
226
228
227 def pathto(self, f, cwd=None):
229 def pathto(self, f, cwd=None):
228 if cwd is None:
230 if cwd is None:
229 cwd = self.getcwd()
231 cwd = self.getcwd()
230 path = util.pathto(self._root, cwd, f)
232 path = util.pathto(self._root, cwd, f)
231 if self._slash:
233 if self._slash:
232 return util.pconvert(path)
234 return util.pconvert(path)
233 return path
235 return path
234
236
235 def __getitem__(self, key):
237 def __getitem__(self, key):
236 '''Return the current state of key (a filename) in the dirstate.
238 '''Return the current state of key (a filename) in the dirstate.
237
239
238 States are:
240 States are:
239 n normal
241 n normal
240 m needs merging
242 m needs merging
241 r marked for removal
243 r marked for removal
242 a marked for addition
244 a marked for addition
243 ? not tracked
245 ? not tracked
244 '''
246 '''
245 return self._map.get(key, ("?",))[0]
247 return self._map.get(key, ("?",))[0]
246
248
247 def __contains__(self, key):
249 def __contains__(self, key):
248 return key in self._map
250 return key in self._map
249
251
250 def __iter__(self):
252 def __iter__(self):
251 for x in sorted(self._map):
253 for x in sorted(self._map):
252 yield x
254 yield x
253
255
254 def iteritems(self):
256 def iteritems(self):
255 return self._map.iteritems()
257 return self._map.iteritems()
256
258
257 def parents(self):
259 def parents(self):
258 return [self._validate(p) for p in self._pl]
260 return [self._validate(p) for p in self._pl]
259
261
260 def p1(self):
262 def p1(self):
261 return self._validate(self._pl[0])
263 return self._validate(self._pl[0])
262
264
263 def p2(self):
265 def p2(self):
264 return self._validate(self._pl[1])
266 return self._validate(self._pl[1])
265
267
266 def branch(self):
268 def branch(self):
267 return encoding.tolocal(self._branch)
269 return encoding.tolocal(self._branch)
268
270
269 def setparents(self, p1, p2=nullid):
271 def setparents(self, p1, p2=nullid):
270 """Set dirstate parents to p1 and p2.
272 """Set dirstate parents to p1 and p2.
271
273
272 When moving from two parents to one, 'm' merged entries a
274 When moving from two parents to one, 'm' merged entries a
273 adjusted to normal and previous copy records discarded and
275 adjusted to normal and previous copy records discarded and
274 returned by the call.
276 returned by the call.
275
277
276 See localrepo.setparents()
278 See localrepo.setparents()
277 """
279 """
278 if self._parentwriters == 0:
280 if self._parentwriters == 0:
279 raise ValueError("cannot set dirstate parent without "
281 raise ValueError("cannot set dirstate parent without "
280 "calling dirstate.beginparentchange")
282 "calling dirstate.beginparentchange")
281
283
282 self._dirty = self._dirtypl = True
284 self._dirty = self._dirtypl = True
283 oldp2 = self._pl[1]
285 oldp2 = self._pl[1]
284 self._pl = p1, p2
286 self._pl = p1, p2
285 copies = {}
287 copies = {}
286 if oldp2 != nullid and p2 == nullid:
288 if oldp2 != nullid and p2 == nullid:
287 for f, s in self._map.iteritems():
289 for f, s in self._map.iteritems():
288 # Discard 'm' markers when moving away from a merge state
290 # Discard 'm' markers when moving away from a merge state
289 if s[0] == 'm':
291 if s[0] == 'm':
290 if f in self._copymap:
292 if f in self._copymap:
291 copies[f] = self._copymap[f]
293 copies[f] = self._copymap[f]
292 self.normallookup(f)
294 self.normallookup(f)
293 # Also fix up otherparent markers
295 # Also fix up otherparent markers
294 elif s[0] == 'n' and s[2] == -2:
296 elif s[0] == 'n' and s[2] == -2:
295 if f in self._copymap:
297 if f in self._copymap:
296 copies[f] = self._copymap[f]
298 copies[f] = self._copymap[f]
297 self.add(f)
299 self.add(f)
298 return copies
300 return copies
299
301
300 def setbranch(self, branch):
302 def setbranch(self, branch):
301 self._branch = encoding.fromlocal(branch)
303 self._branch = encoding.fromlocal(branch)
302 f = self._opener('branch', 'w', atomictemp=True)
304 f = self._opener('branch', 'w', atomictemp=True)
303 try:
305 try:
304 f.write(self._branch + '\n')
306 f.write(self._branch + '\n')
305 f.close()
307 f.close()
306
308
307 # make sure filecache has the correct stat info for _branch after
309 # make sure filecache has the correct stat info for _branch after
308 # replacing the underlying file
310 # replacing the underlying file
309 ce = self._filecache['_branch']
311 ce = self._filecache['_branch']
310 if ce:
312 if ce:
311 ce.refresh()
313 ce.refresh()
312 except: # re-raises
314 except: # re-raises
313 f.discard()
315 f.discard()
314 raise
316 raise
315
317
316 def _read(self):
318 def _read(self):
317 self._map = {}
319 self._map = {}
318 self._copymap = {}
320 self._copymap = {}
319 try:
321 try:
320 st = self._opener.read("dirstate")
322 st = self._opener.read("dirstate")
321 except IOError, err:
323 except IOError, err:
322 if err.errno != errno.ENOENT:
324 if err.errno != errno.ENOENT:
323 raise
325 raise
324 return
326 return
325 if not st:
327 if not st:
326 return
328 return
327
329
328 # Python's garbage collector triggers a GC each time a certain number
330 # Python's garbage collector triggers a GC each time a certain number
329 # of container objects (the number being defined by
331 # of container objects (the number being defined by
330 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
332 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
331 # for each file in the dirstate. The C version then immediately marks
333 # for each file in the dirstate. The C version then immediately marks
332 # them as not to be tracked by the collector. However, this has no
334 # them as not to be tracked by the collector. However, this has no
333 # effect on when GCs are triggered, only on what objects the GC looks
335 # effect on when GCs are triggered, only on what objects the GC looks
334 # into. This means that O(number of files) GCs are unavoidable.
336 # into. This means that O(number of files) GCs are unavoidable.
335 # Depending on when in the process's lifetime the dirstate is parsed,
337 # Depending on when in the process's lifetime the dirstate is parsed,
336 # this can get very expensive. As a workaround, disable GC while
338 # this can get very expensive. As a workaround, disable GC while
337 # parsing the dirstate.
339 # parsing the dirstate.
338 #
340 #
339 # (we cannot decorate the function directly since it is in a C module)
341 # (we cannot decorate the function directly since it is in a C module)
340 parse_dirstate = util.nogc(parsers.parse_dirstate)
342 parse_dirstate = util.nogc(parsers.parse_dirstate)
341 p = parse_dirstate(self._map, self._copymap, st)
343 p = parse_dirstate(self._map, self._copymap, st)
342 if not self._dirtypl:
344 if not self._dirtypl:
343 self._pl = p
345 self._pl = p
344
346
345 def invalidate(self):
347 def invalidate(self):
346 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
348 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
347 "_pl", "_dirs", "_ignore"):
349 "_pl", "_dirs", "_ignore"):
348 if a in self.__dict__:
350 if a in self.__dict__:
349 delattr(self, a)
351 delattr(self, a)
350 self._lastnormaltime = 0
352 self._lastnormaltime = 0
351 self._dirty = False
353 self._dirty = False
352 self._parentwriters = 0
354 self._parentwriters = 0
353
355
354 def copy(self, source, dest):
356 def copy(self, source, dest):
355 """Mark dest as a copy of source. Unmark dest if source is None."""
357 """Mark dest as a copy of source. Unmark dest if source is None."""
356 if source == dest:
358 if source == dest:
357 return
359 return
358 self._dirty = True
360 self._dirty = True
359 if source is not None:
361 if source is not None:
360 self._copymap[dest] = source
362 self._copymap[dest] = source
361 elif dest in self._copymap:
363 elif dest in self._copymap:
362 del self._copymap[dest]
364 del self._copymap[dest]
363
365
364 def copied(self, file):
366 def copied(self, file):
365 return self._copymap.get(file, None)
367 return self._copymap.get(file, None)
366
368
367 def copies(self):
369 def copies(self):
368 return self._copymap
370 return self._copymap
369
371
370 def _droppath(self, f):
372 def _droppath(self, f):
371 if self[f] not in "?r" and "_dirs" in self.__dict__:
373 if self[f] not in "?r" and "_dirs" in self.__dict__:
372 self._dirs.delpath(f)
374 self._dirs.delpath(f)
373
375
374 def _addpath(self, f, state, mode, size, mtime):
376 def _addpath(self, f, state, mode, size, mtime):
375 oldstate = self[f]
377 oldstate = self[f]
376 if state == 'a' or oldstate == 'r':
378 if state == 'a' or oldstate == 'r':
377 scmutil.checkfilename(f)
379 scmutil.checkfilename(f)
378 if f in self._dirs:
380 if f in self._dirs:
379 raise util.Abort(_('directory %r already in dirstate') % f)
381 raise util.Abort(_('directory %r already in dirstate') % f)
380 # shadows
382 # shadows
381 for d in util.finddirs(f):
383 for d in util.finddirs(f):
382 if d in self._dirs:
384 if d in self._dirs:
383 break
385 break
384 if d in self._map and self[d] != 'r':
386 if d in self._map and self[d] != 'r':
385 raise util.Abort(
387 raise util.Abort(
386 _('file %r in dirstate clashes with %r') % (d, f))
388 _('file %r in dirstate clashes with %r') % (d, f))
387 if oldstate in "?r" and "_dirs" in self.__dict__:
389 if oldstate in "?r" and "_dirs" in self.__dict__:
388 self._dirs.addpath(f)
390 self._dirs.addpath(f)
389 self._dirty = True
391 self._dirty = True
390 self._map[f] = dirstatetuple(state, mode, size, mtime)
392 self._map[f] = dirstatetuple(state, mode, size, mtime)
391
393
392 def normal(self, f):
394 def normal(self, f):
393 '''Mark a file normal and clean.'''
395 '''Mark a file normal and clean.'''
394 s = os.lstat(self._join(f))
396 s = os.lstat(self._join(f))
395 mtime = int(s.st_mtime)
397 mtime = int(s.st_mtime)
396 self._addpath(f, 'n', s.st_mode,
398 self._addpath(f, 'n', s.st_mode,
397 s.st_size & _rangemask, mtime & _rangemask)
399 s.st_size & _rangemask, mtime & _rangemask)
398 if f in self._copymap:
400 if f in self._copymap:
399 del self._copymap[f]
401 del self._copymap[f]
400 if mtime > self._lastnormaltime:
402 if mtime > self._lastnormaltime:
401 # Remember the most recent modification timeslot for status(),
403 # Remember the most recent modification timeslot for status(),
402 # to make sure we won't miss future size-preserving file content
404 # to make sure we won't miss future size-preserving file content
403 # modifications that happen within the same timeslot.
405 # modifications that happen within the same timeslot.
404 self._lastnormaltime = mtime
406 self._lastnormaltime = mtime
405
407
406 def normallookup(self, f):
408 def normallookup(self, f):
407 '''Mark a file normal, but possibly dirty.'''
409 '''Mark a file normal, but possibly dirty.'''
408 if self._pl[1] != nullid and f in self._map:
410 if self._pl[1] != nullid and f in self._map:
409 # if there is a merge going on and the file was either
411 # if there is a merge going on and the file was either
410 # in state 'm' (-1) or coming from other parent (-2) before
412 # in state 'm' (-1) or coming from other parent (-2) before
411 # being removed, restore that state.
413 # being removed, restore that state.
412 entry = self._map[f]
414 entry = self._map[f]
413 if entry[0] == 'r' and entry[2] in (-1, -2):
415 if entry[0] == 'r' and entry[2] in (-1, -2):
414 source = self._copymap.get(f)
416 source = self._copymap.get(f)
415 if entry[2] == -1:
417 if entry[2] == -1:
416 self.merge(f)
418 self.merge(f)
417 elif entry[2] == -2:
419 elif entry[2] == -2:
418 self.otherparent(f)
420 self.otherparent(f)
419 if source:
421 if source:
420 self.copy(source, f)
422 self.copy(source, f)
421 return
423 return
422 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
424 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
423 return
425 return
424 self._addpath(f, 'n', 0, -1, -1)
426 self._addpath(f, 'n', 0, -1, -1)
425 if f in self._copymap:
427 if f in self._copymap:
426 del self._copymap[f]
428 del self._copymap[f]
427
429
428 def otherparent(self, f):
430 def otherparent(self, f):
429 '''Mark as coming from the other parent, always dirty.'''
431 '''Mark as coming from the other parent, always dirty.'''
430 if self._pl[1] == nullid:
432 if self._pl[1] == nullid:
431 raise util.Abort(_("setting %r to other parent "
433 raise util.Abort(_("setting %r to other parent "
432 "only allowed in merges") % f)
434 "only allowed in merges") % f)
433 if f in self and self[f] == 'n':
435 if f in self and self[f] == 'n':
434 # merge-like
436 # merge-like
435 self._addpath(f, 'm', 0, -2, -1)
437 self._addpath(f, 'm', 0, -2, -1)
436 else:
438 else:
437 # add-like
439 # add-like
438 self._addpath(f, 'n', 0, -2, -1)
440 self._addpath(f, 'n', 0, -2, -1)
439
441
440 if f in self._copymap:
442 if f in self._copymap:
441 del self._copymap[f]
443 del self._copymap[f]
442
444
443 def add(self, f):
445 def add(self, f):
444 '''Mark a file added.'''
446 '''Mark a file added.'''
445 self._addpath(f, 'a', 0, -1, -1)
447 self._addpath(f, 'a', 0, -1, -1)
446 if f in self._copymap:
448 if f in self._copymap:
447 del self._copymap[f]
449 del self._copymap[f]
448
450
449 def remove(self, f):
451 def remove(self, f):
450 '''Mark a file removed.'''
452 '''Mark a file removed.'''
451 self._dirty = True
453 self._dirty = True
452 self._droppath(f)
454 self._droppath(f)
453 size = 0
455 size = 0
454 if self._pl[1] != nullid and f in self._map:
456 if self._pl[1] != nullid and f in self._map:
455 # backup the previous state
457 # backup the previous state
456 entry = self._map[f]
458 entry = self._map[f]
457 if entry[0] == 'm': # merge
459 if entry[0] == 'm': # merge
458 size = -1
460 size = -1
459 elif entry[0] == 'n' and entry[2] == -2: # other parent
461 elif entry[0] == 'n' and entry[2] == -2: # other parent
460 size = -2
462 size = -2
461 self._map[f] = dirstatetuple('r', 0, size, 0)
463 self._map[f] = dirstatetuple('r', 0, size, 0)
462 if size == 0 and f in self._copymap:
464 if size == 0 and f in self._copymap:
463 del self._copymap[f]
465 del self._copymap[f]
464
466
465 def merge(self, f):
467 def merge(self, f):
466 '''Mark a file merged.'''
468 '''Mark a file merged.'''
467 if self._pl[1] == nullid:
469 if self._pl[1] == nullid:
468 return self.normallookup(f)
470 return self.normallookup(f)
469 return self.otherparent(f)
471 return self.otherparent(f)
470
472
471 def drop(self, f):
473 def drop(self, f):
472 '''Drop a file from the dirstate'''
474 '''Drop a file from the dirstate'''
473 if f in self._map:
475 if f in self._map:
474 self._dirty = True
476 self._dirty = True
475 self._droppath(f)
477 self._droppath(f)
476 del self._map[f]
478 del self._map[f]
477
479
478 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
480 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
479 if exists is None:
481 if exists is None:
480 exists = os.path.lexists(os.path.join(self._root, path))
482 exists = os.path.lexists(os.path.join(self._root, path))
481 if not exists:
483 if not exists:
482 # Maybe a path component exists
484 # Maybe a path component exists
483 if not ignoremissing and '/' in path:
485 if not ignoremissing and '/' in path:
484 d, f = path.rsplit('/', 1)
486 d, f = path.rsplit('/', 1)
485 d = self._normalize(d, False, ignoremissing, None)
487 d = self._normalize(d, False, ignoremissing, None)
486 folded = d + "/" + f
488 folded = d + "/" + f
487 else:
489 else:
488 # No path components, preserve original case
490 # No path components, preserve original case
489 folded = path
491 folded = path
490 else:
492 else:
491 # recursively normalize leading directory components
493 # recursively normalize leading directory components
492 # against dirstate
494 # against dirstate
493 if '/' in normed:
495 if '/' in normed:
494 d, f = normed.rsplit('/', 1)
496 d, f = normed.rsplit('/', 1)
495 d = self._normalize(d, False, ignoremissing, True)
497 d = self._normalize(d, False, ignoremissing, True)
496 r = self._root + "/" + d
498 r = self._root + "/" + d
497 folded = d + "/" + util.fspath(f, r)
499 folded = d + "/" + util.fspath(f, r)
498 else:
500 else:
499 folded = util.fspath(normed, self._root)
501 folded = util.fspath(normed, self._root)
500 storemap[normed] = folded
502 storemap[normed] = folded
501
503
502 return folded
504 return folded
503
505
504 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
506 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
505 normed = util.normcase(path)
507 normed = util.normcase(path)
506 folded = self._filefoldmap.get(normed, None)
508 folded = self._filefoldmap.get(normed, None)
507 if folded is None:
509 if folded is None:
508 if isknown:
510 if isknown:
509 folded = path
511 folded = path
510 else:
512 else:
511 folded = self._discoverpath(path, normed, ignoremissing, exists,
513 folded = self._discoverpath(path, normed, ignoremissing, exists,
512 self._filefoldmap)
514 self._filefoldmap)
513 return folded
515 return folded
514
516
515 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
517 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
516 normed = util.normcase(path)
518 normed = util.normcase(path)
517 folded = self._filefoldmap.get(normed, None)
519 folded = self._filefoldmap.get(normed, None)
518 if folded is None:
520 if folded is None:
519 folded = self._dirfoldmap.get(normed, None)
521 folded = self._dirfoldmap.get(normed, None)
520 if folded is None:
522 if folded is None:
521 if isknown:
523 if isknown:
522 folded = path
524 folded = path
523 else:
525 else:
524 # store discovered result in dirfoldmap so that future
526 # store discovered result in dirfoldmap so that future
525 # normalizefile calls don't start matching directories
527 # normalizefile calls don't start matching directories
526 folded = self._discoverpath(path, normed, ignoremissing, exists,
528 folded = self._discoverpath(path, normed, ignoremissing, exists,
527 self._dirfoldmap)
529 self._dirfoldmap)
528 return folded
530 return folded
529
531
530 def normalize(self, path, isknown=False, ignoremissing=False):
532 def normalize(self, path, isknown=False, ignoremissing=False):
531 '''
533 '''
532 normalize the case of a pathname when on a casefolding filesystem
534 normalize the case of a pathname when on a casefolding filesystem
533
535
534 isknown specifies whether the filename came from walking the
536 isknown specifies whether the filename came from walking the
535 disk, to avoid extra filesystem access.
537 disk, to avoid extra filesystem access.
536
538
537 If ignoremissing is True, missing path are returned
539 If ignoremissing is True, missing path are returned
538 unchanged. Otherwise, we try harder to normalize possibly
540 unchanged. Otherwise, we try harder to normalize possibly
539 existing path components.
541 existing path components.
540
542
541 The normalized case is determined based on the following precedence:
543 The normalized case is determined based on the following precedence:
542
544
543 - version of name already stored in the dirstate
545 - version of name already stored in the dirstate
544 - version of name stored on disk
546 - version of name stored on disk
545 - version provided via command arguments
547 - version provided via command arguments
546 '''
548 '''
547
549
548 if self._checkcase:
550 if self._checkcase:
549 return self._normalize(path, isknown, ignoremissing)
551 return self._normalize(path, isknown, ignoremissing)
550 return path
552 return path
551
553
552 def clear(self):
554 def clear(self):
553 self._map = {}
555 self._map = {}
554 if "_dirs" in self.__dict__:
556 if "_dirs" in self.__dict__:
555 delattr(self, "_dirs")
557 delattr(self, "_dirs")
556 self._copymap = {}
558 self._copymap = {}
557 self._pl = [nullid, nullid]
559 self._pl = [nullid, nullid]
558 self._lastnormaltime = 0
560 self._lastnormaltime = 0
559 self._dirty = True
561 self._dirty = True
560
562
561 def rebuild(self, parent, allfiles, changedfiles=None):
563 def rebuild(self, parent, allfiles, changedfiles=None):
562 changedfiles = changedfiles or allfiles
564 changedfiles = changedfiles or allfiles
563 oldmap = self._map
565 oldmap = self._map
564 self.clear()
566 self.clear()
565 for f in allfiles:
567 for f in allfiles:
566 if f not in changedfiles:
568 if f not in changedfiles:
567 self._map[f] = oldmap[f]
569 self._map[f] = oldmap[f]
568 else:
570 else:
569 if 'x' in allfiles.flags(f):
571 if 'x' in allfiles.flags(f):
570 self._map[f] = dirstatetuple('n', 0777, -1, 0)
572 self._map[f] = dirstatetuple('n', 0777, -1, 0)
571 else:
573 else:
572 self._map[f] = dirstatetuple('n', 0666, -1, 0)
574 self._map[f] = dirstatetuple('n', 0666, -1, 0)
573 self._pl = (parent, nullid)
575 self._pl = (parent, nullid)
574 self._dirty = True
576 self._dirty = True
575
577
576 def write(self):
578 def write(self):
577 if not self._dirty:
579 if not self._dirty:
578 return
580 return
579
581
580 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
582 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
581 # timestamp of each entries in dirstate, because of 'now > mtime'
583 # timestamp of each entries in dirstate, because of 'now > mtime'
582 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
584 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
583 if delaywrite > 0:
585 if delaywrite > 0:
584 import time # to avoid useless import
586 import time # to avoid useless import
585 time.sleep(delaywrite)
587 time.sleep(delaywrite)
586
588
587 st = self._opener("dirstate", "w", atomictemp=True)
589 st = self._opener("dirstate", "w", atomictemp=True)
588 # use the modification time of the newly created temporary file as the
590 # use the modification time of the newly created temporary file as the
589 # filesystem's notion of 'now'
591 # filesystem's notion of 'now'
590 now = util.fstat(st).st_mtime
592 now = util.fstat(st).st_mtime
591 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
593 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
592 st.close()
594 st.close()
593 self._lastnormaltime = 0
595 self._lastnormaltime = 0
594 self._dirty = self._dirtypl = False
596 self._dirty = self._dirtypl = False
595
597
596 def _dirignore(self, f):
598 def _dirignore(self, f):
597 if f == '.':
599 if f == '.':
598 return False
600 return False
599 if self._ignore(f):
601 if self._ignore(f):
600 return True
602 return True
601 for p in util.finddirs(f):
603 for p in util.finddirs(f):
602 if self._ignore(p):
604 if self._ignore(p):
603 return True
605 return True
604 return False
606 return False
605
607
606 def _walkexplicit(self, match, subrepos):
608 def _walkexplicit(self, match, subrepos):
607 '''Get stat data about the files explicitly specified by match.
609 '''Get stat data about the files explicitly specified by match.
608
610
609 Return a triple (results, dirsfound, dirsnotfound).
611 Return a triple (results, dirsfound, dirsnotfound).
610 - results is a mapping from filename to stat result. It also contains
612 - results is a mapping from filename to stat result. It also contains
611 listings mapping subrepos and .hg to None.
613 listings mapping subrepos and .hg to None.
612 - dirsfound is a list of files found to be directories.
614 - dirsfound is a list of files found to be directories.
613 - dirsnotfound is a list of files that the dirstate thinks are
615 - dirsnotfound is a list of files that the dirstate thinks are
614 directories and that were not found.'''
616 directories and that were not found.'''
615
617
616 def badtype(mode):
618 def badtype(mode):
617 kind = _('unknown')
619 kind = _('unknown')
618 if stat.S_ISCHR(mode):
620 if stat.S_ISCHR(mode):
619 kind = _('character device')
621 kind = _('character device')
620 elif stat.S_ISBLK(mode):
622 elif stat.S_ISBLK(mode):
621 kind = _('block device')
623 kind = _('block device')
622 elif stat.S_ISFIFO(mode):
624 elif stat.S_ISFIFO(mode):
623 kind = _('fifo')
625 kind = _('fifo')
624 elif stat.S_ISSOCK(mode):
626 elif stat.S_ISSOCK(mode):
625 kind = _('socket')
627 kind = _('socket')
626 elif stat.S_ISDIR(mode):
628 elif stat.S_ISDIR(mode):
627 kind = _('directory')
629 kind = _('directory')
628 return _('unsupported file type (type is %s)') % kind
630 return _('unsupported file type (type is %s)') % kind
629
631
630 matchedir = match.explicitdir
632 matchedir = match.explicitdir
631 badfn = match.bad
633 badfn = match.bad
632 dmap = self._map
634 dmap = self._map
633 lstat = os.lstat
635 lstat = os.lstat
634 getkind = stat.S_IFMT
636 getkind = stat.S_IFMT
635 dirkind = stat.S_IFDIR
637 dirkind = stat.S_IFDIR
636 regkind = stat.S_IFREG
638 regkind = stat.S_IFREG
637 lnkkind = stat.S_IFLNK
639 lnkkind = stat.S_IFLNK
638 join = self._join
640 join = self._join
639 dirsfound = []
641 dirsfound = []
640 foundadd = dirsfound.append
642 foundadd = dirsfound.append
641 dirsnotfound = []
643 dirsnotfound = []
642 notfoundadd = dirsnotfound.append
644 notfoundadd = dirsnotfound.append
643
645
644 if not match.isexact() and self._checkcase:
646 if not match.isexact() and self._checkcase:
645 normalize = self._normalize
647 normalize = self._normalize
646 else:
648 else:
647 normalize = None
649 normalize = None
648
650
649 files = sorted(match.files())
651 files = sorted(match.files())
650 subrepos.sort()
652 subrepos.sort()
651 i, j = 0, 0
653 i, j = 0, 0
652 while i < len(files) and j < len(subrepos):
654 while i < len(files) and j < len(subrepos):
653 subpath = subrepos[j] + "/"
655 subpath = subrepos[j] + "/"
654 if files[i] < subpath:
656 if files[i] < subpath:
655 i += 1
657 i += 1
656 continue
658 continue
657 while i < len(files) and files[i].startswith(subpath):
659 while i < len(files) and files[i].startswith(subpath):
658 del files[i]
660 del files[i]
659 j += 1
661 j += 1
660
662
661 if not files or '.' in files:
663 if not files or '.' in files:
662 files = ['.']
664 files = ['.']
663 results = dict.fromkeys(subrepos)
665 results = dict.fromkeys(subrepos)
664 results['.hg'] = None
666 results['.hg'] = None
665
667
666 alldirs = None
668 alldirs = None
667 for ff in files:
669 for ff in files:
668 # constructing the foldmap is expensive, so don't do it for the
670 # constructing the foldmap is expensive, so don't do it for the
669 # common case where files is ['.']
671 # common case where files is ['.']
670 if normalize and ff != '.':
672 if normalize and ff != '.':
671 nf = normalize(ff, False, True)
673 nf = normalize(ff, False, True)
672 else:
674 else:
673 nf = ff
675 nf = ff
674 if nf in results:
676 if nf in results:
675 continue
677 continue
676
678
677 try:
679 try:
678 st = lstat(join(nf))
680 st = lstat(join(nf))
679 kind = getkind(st.st_mode)
681 kind = getkind(st.st_mode)
680 if kind == dirkind:
682 if kind == dirkind:
681 if nf in dmap:
683 if nf in dmap:
682 # file replaced by dir on disk but still in dirstate
684 # file replaced by dir on disk but still in dirstate
683 results[nf] = None
685 results[nf] = None
684 if matchedir:
686 if matchedir:
685 matchedir(nf)
687 matchedir(nf)
686 foundadd((nf, ff))
688 foundadd((nf, ff))
687 elif kind == regkind or kind == lnkkind:
689 elif kind == regkind or kind == lnkkind:
688 results[nf] = st
690 results[nf] = st
689 else:
691 else:
690 badfn(ff, badtype(kind))
692 badfn(ff, badtype(kind))
691 if nf in dmap:
693 if nf in dmap:
692 results[nf] = None
694 results[nf] = None
693 except OSError, inst: # nf not found on disk - it is dirstate only
695 except OSError, inst: # nf not found on disk - it is dirstate only
694 if nf in dmap: # does it exactly match a missing file?
696 if nf in dmap: # does it exactly match a missing file?
695 results[nf] = None
697 results[nf] = None
696 else: # does it match a missing directory?
698 else: # does it match a missing directory?
697 if alldirs is None:
699 if alldirs is None:
698 alldirs = util.dirs(dmap)
700 alldirs = util.dirs(dmap)
699 if nf in alldirs:
701 if nf in alldirs:
700 if matchedir:
702 if matchedir:
701 matchedir(nf)
703 matchedir(nf)
702 notfoundadd(nf)
704 notfoundadd(nf)
703 else:
705 else:
704 badfn(ff, inst.strerror)
706 badfn(ff, inst.strerror)
705
707
706 return results, dirsfound, dirsnotfound
708 return results, dirsfound, dirsnotfound
707
709
708 def walk(self, match, subrepos, unknown, ignored, full=True):
710 def walk(self, match, subrepos, unknown, ignored, full=True):
709 '''
711 '''
710 Walk recursively through the directory tree, finding all files
712 Walk recursively through the directory tree, finding all files
711 matched by match.
713 matched by match.
712
714
713 If full is False, maybe skip some known-clean files.
715 If full is False, maybe skip some known-clean files.
714
716
715 Return a dict mapping filename to stat-like object (either
717 Return a dict mapping filename to stat-like object (either
716 mercurial.osutil.stat instance or return value of os.stat()).
718 mercurial.osutil.stat instance or return value of os.stat()).
717
719
718 '''
720 '''
719 # full is a flag that extensions that hook into walk can use -- this
721 # full is a flag that extensions that hook into walk can use -- this
720 # implementation doesn't use it at all. This satisfies the contract
722 # implementation doesn't use it at all. This satisfies the contract
721 # because we only guarantee a "maybe".
723 # because we only guarantee a "maybe".
722
724
723 if ignored:
725 if ignored:
724 ignore = util.never
726 ignore = util.never
725 dirignore = util.never
727 dirignore = util.never
726 elif unknown:
728 elif unknown:
727 ignore = self._ignore
729 ignore = self._ignore
728 dirignore = self._dirignore
730 dirignore = self._dirignore
729 else:
731 else:
730 # if not unknown and not ignored, drop dir recursion and step 2
732 # if not unknown and not ignored, drop dir recursion and step 2
731 ignore = util.always
733 ignore = util.always
732 dirignore = util.always
734 dirignore = util.always
733
735
734 matchfn = match.matchfn
736 matchfn = match.matchfn
735 matchalways = match.always()
737 matchalways = match.always()
736 matchtdir = match.traversedir
738 matchtdir = match.traversedir
737 dmap = self._map
739 dmap = self._map
738 listdir = osutil.listdir
740 listdir = osutil.listdir
739 lstat = os.lstat
741 lstat = os.lstat
740 dirkind = stat.S_IFDIR
742 dirkind = stat.S_IFDIR
741 regkind = stat.S_IFREG
743 regkind = stat.S_IFREG
742 lnkkind = stat.S_IFLNK
744 lnkkind = stat.S_IFLNK
743 join = self._join
745 join = self._join
744
746
745 exact = skipstep3 = False
747 exact = skipstep3 = False
746 if match.isexact(): # match.exact
748 if match.isexact(): # match.exact
747 exact = True
749 exact = True
748 dirignore = util.always # skip step 2
750 dirignore = util.always # skip step 2
749 elif match.files() and not match.anypats(): # match.match, no patterns
751 elif match.files() and not match.anypats(): # match.match, no patterns
750 skipstep3 = True
752 skipstep3 = True
751
753
752 if not exact and self._checkcase:
754 if not exact and self._checkcase:
753 normalize = self._normalize
755 normalize = self._normalize
754 normalizefile = self._normalizefile
756 normalizefile = self._normalizefile
755 skipstep3 = False
757 skipstep3 = False
756 else:
758 else:
757 normalize = self._normalize
759 normalize = self._normalize
758 normalizefile = None
760 normalizefile = None
759
761
760 # step 1: find all explicit files
762 # step 1: find all explicit files
761 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
763 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
762
764
763 skipstep3 = skipstep3 and not (work or dirsnotfound)
765 skipstep3 = skipstep3 and not (work or dirsnotfound)
764 work = [d for d in work if not dirignore(d[0])]
766 work = [d for d in work if not dirignore(d[0])]
765
767
766 # step 2: visit subdirectories
768 # step 2: visit subdirectories
767 def traverse(work, alreadynormed):
769 def traverse(work, alreadynormed):
768 wadd = work.append
770 wadd = work.append
769 while work:
771 while work:
770 nd = work.pop()
772 nd = work.pop()
771 skip = None
773 skip = None
772 if nd == '.':
774 if nd == '.':
773 nd = ''
775 nd = ''
774 else:
776 else:
775 skip = '.hg'
777 skip = '.hg'
776 try:
778 try:
777 entries = listdir(join(nd), stat=True, skip=skip)
779 entries = listdir(join(nd), stat=True, skip=skip)
778 except OSError, inst:
780 except OSError, inst:
779 if inst.errno in (errno.EACCES, errno.ENOENT):
781 if inst.errno in (errno.EACCES, errno.ENOENT):
780 match.bad(self.pathto(nd), inst.strerror)
782 match.bad(self.pathto(nd), inst.strerror)
781 continue
783 continue
782 raise
784 raise
783 for f, kind, st in entries:
785 for f, kind, st in entries:
784 if normalizefile:
786 if normalizefile:
785 # even though f might be a directory, we're only
787 # even though f might be a directory, we're only
786 # interested in comparing it to files currently in the
788 # interested in comparing it to files currently in the
787 # dmap -- therefore normalizefile is enough
789 # dmap -- therefore normalizefile is enough
788 nf = normalizefile(nd and (nd + "/" + f) or f, True,
790 nf = normalizefile(nd and (nd + "/" + f) or f, True,
789 True)
791 True)
790 else:
792 else:
791 nf = nd and (nd + "/" + f) or f
793 nf = nd and (nd + "/" + f) or f
792 if nf not in results:
794 if nf not in results:
793 if kind == dirkind:
795 if kind == dirkind:
794 if not ignore(nf):
796 if not ignore(nf):
795 if matchtdir:
797 if matchtdir:
796 matchtdir(nf)
798 matchtdir(nf)
797 wadd(nf)
799 wadd(nf)
798 if nf in dmap and (matchalways or matchfn(nf)):
800 if nf in dmap and (matchalways or matchfn(nf)):
799 results[nf] = None
801 results[nf] = None
800 elif kind == regkind or kind == lnkkind:
802 elif kind == regkind or kind == lnkkind:
801 if nf in dmap:
803 if nf in dmap:
802 if matchalways or matchfn(nf):
804 if matchalways or matchfn(nf):
803 results[nf] = st
805 results[nf] = st
804 elif ((matchalways or matchfn(nf))
806 elif ((matchalways or matchfn(nf))
805 and not ignore(nf)):
807 and not ignore(nf)):
806 # unknown file -- normalize if necessary
808 # unknown file -- normalize if necessary
807 if not alreadynormed:
809 if not alreadynormed:
808 nf = normalize(nf, False, True)
810 nf = normalize(nf, False, True)
809 results[nf] = st
811 results[nf] = st
810 elif nf in dmap and (matchalways or matchfn(nf)):
812 elif nf in dmap and (matchalways or matchfn(nf)):
811 results[nf] = None
813 results[nf] = None
812
814
813 for nd, d in work:
815 for nd, d in work:
814 # alreadynormed means that processwork doesn't have to do any
816 # alreadynormed means that processwork doesn't have to do any
815 # expensive directory normalization
817 # expensive directory normalization
816 alreadynormed = not normalize or nd == d
818 alreadynormed = not normalize or nd == d
817 traverse([d], alreadynormed)
819 traverse([d], alreadynormed)
818
820
819 for s in subrepos:
821 for s in subrepos:
820 del results[s]
822 del results[s]
821 del results['.hg']
823 del results['.hg']
822
824
823 # step 3: visit remaining files from dmap
825 # step 3: visit remaining files from dmap
824 if not skipstep3 and not exact:
826 if not skipstep3 and not exact:
825 # If a dmap file is not in results yet, it was either
827 # If a dmap file is not in results yet, it was either
826 # a) not matching matchfn b) ignored, c) missing, or d) under a
828 # a) not matching matchfn b) ignored, c) missing, or d) under a
827 # symlink directory.
829 # symlink directory.
828 if not results and matchalways:
830 if not results and matchalways:
829 visit = dmap.keys()
831 visit = dmap.keys()
830 else:
832 else:
831 visit = [f for f in dmap if f not in results and matchfn(f)]
833 visit = [f for f in dmap if f not in results and matchfn(f)]
832 visit.sort()
834 visit.sort()
833
835
834 if unknown:
836 if unknown:
835 # unknown == True means we walked all dirs under the roots
837 # unknown == True means we walked all dirs under the roots
836 # that wasn't ignored, and everything that matched was stat'ed
838 # that wasn't ignored, and everything that matched was stat'ed
837 # and is already in results.
839 # and is already in results.
838 # The rest must thus be ignored or under a symlink.
840 # The rest must thus be ignored or under a symlink.
839 audit_path = pathutil.pathauditor(self._root)
841 audit_path = pathutil.pathauditor(self._root)
840
842
841 for nf in iter(visit):
843 for nf in iter(visit):
842 # If a stat for the same file was already added with a
844 # If a stat for the same file was already added with a
843 # different case, don't add one for this, since that would
845 # different case, don't add one for this, since that would
844 # make it appear as if the file exists under both names
846 # make it appear as if the file exists under both names
845 # on disk.
847 # on disk.
846 if (normalizefile and
848 if (normalizefile and
847 normalizefile(nf, True, True) in results):
849 normalizefile(nf, True, True) in results):
848 results[nf] = None
850 results[nf] = None
849 # Report ignored items in the dmap as long as they are not
851 # Report ignored items in the dmap as long as they are not
850 # under a symlink directory.
852 # under a symlink directory.
851 elif audit_path.check(nf):
853 elif audit_path.check(nf):
852 try:
854 try:
853 results[nf] = lstat(join(nf))
855 results[nf] = lstat(join(nf))
854 # file was just ignored, no links, and exists
856 # file was just ignored, no links, and exists
855 except OSError:
857 except OSError:
856 # file doesn't exist
858 # file doesn't exist
857 results[nf] = None
859 results[nf] = None
858 else:
860 else:
859 # It's either missing or under a symlink directory
861 # It's either missing or under a symlink directory
860 # which we in this case report as missing
862 # which we in this case report as missing
861 results[nf] = None
863 results[nf] = None
862 else:
864 else:
863 # We may not have walked the full directory tree above,
865 # We may not have walked the full directory tree above,
864 # so stat and check everything we missed.
866 # so stat and check everything we missed.
865 nf = iter(visit).next
867 nf = iter(visit).next
866 for st in util.statfiles([join(i) for i in visit]):
868 for st in util.statfiles([join(i) for i in visit]):
867 results[nf()] = st
869 results[nf()] = st
868 return results
870 return results
869
871
870 def status(self, match, subrepos, ignored, clean, unknown):
872 def status(self, match, subrepos, ignored, clean, unknown):
871 '''Determine the status of the working copy relative to the
873 '''Determine the status of the working copy relative to the
872 dirstate and return a pair of (unsure, status), where status is of type
874 dirstate and return a pair of (unsure, status), where status is of type
873 scmutil.status and:
875 scmutil.status and:
874
876
875 unsure:
877 unsure:
876 files that might have been modified since the dirstate was
878 files that might have been modified since the dirstate was
877 written, but need to be read to be sure (size is the same
879 written, but need to be read to be sure (size is the same
878 but mtime differs)
880 but mtime differs)
879 status.modified:
881 status.modified:
880 files that have definitely been modified since the dirstate
882 files that have definitely been modified since the dirstate
881 was written (different size or mode)
883 was written (different size or mode)
882 status.clean:
884 status.clean:
883 files that have definitely not been modified since the
885 files that have definitely not been modified since the
884 dirstate was written
886 dirstate was written
885 '''
887 '''
886 listignored, listclean, listunknown = ignored, clean, unknown
888 listignored, listclean, listunknown = ignored, clean, unknown
887 lookup, modified, added, unknown, ignored = [], [], [], [], []
889 lookup, modified, added, unknown, ignored = [], [], [], [], []
888 removed, deleted, clean = [], [], []
890 removed, deleted, clean = [], [], []
889
891
890 dmap = self._map
892 dmap = self._map
891 ladd = lookup.append # aka "unsure"
893 ladd = lookup.append # aka "unsure"
892 madd = modified.append
894 madd = modified.append
893 aadd = added.append
895 aadd = added.append
894 uadd = unknown.append
896 uadd = unknown.append
895 iadd = ignored.append
897 iadd = ignored.append
896 radd = removed.append
898 radd = removed.append
897 dadd = deleted.append
899 dadd = deleted.append
898 cadd = clean.append
900 cadd = clean.append
899 mexact = match.exact
901 mexact = match.exact
900 dirignore = self._dirignore
902 dirignore = self._dirignore
901 checkexec = self._checkexec
903 checkexec = self._checkexec
902 copymap = self._copymap
904 copymap = self._copymap
903 lastnormaltime = self._lastnormaltime
905 lastnormaltime = self._lastnormaltime
904
906
905 # We need to do full walks when either
907 # We need to do full walks when either
906 # - we're listing all clean files, or
908 # - we're listing all clean files, or
907 # - match.traversedir does something, because match.traversedir should
909 # - match.traversedir does something, because match.traversedir should
908 # be called for every dir in the working dir
910 # be called for every dir in the working dir
909 full = listclean or match.traversedir is not None
911 full = listclean or match.traversedir is not None
910 for fn, st in self.walk(match, subrepos, listunknown, listignored,
912 for fn, st in self.walk(match, subrepos, listunknown, listignored,
911 full=full).iteritems():
913 full=full).iteritems():
912 if fn not in dmap:
914 if fn not in dmap:
913 if (listignored or mexact(fn)) and dirignore(fn):
915 if (listignored or mexact(fn)) and dirignore(fn):
914 if listignored:
916 if listignored:
915 iadd(fn)
917 iadd(fn)
916 else:
918 else:
917 uadd(fn)
919 uadd(fn)
918 continue
920 continue
919
921
920 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
922 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
921 # written like that for performance reasons. dmap[fn] is not a
923 # written like that for performance reasons. dmap[fn] is not a
922 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
924 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
923 # opcode has fast paths when the value to be unpacked is a tuple or
925 # opcode has fast paths when the value to be unpacked is a tuple or
924 # a list, but falls back to creating a full-fledged iterator in
926 # a list, but falls back to creating a full-fledged iterator in
925 # general. That is much slower than simply accessing and storing the
927 # general. That is much slower than simply accessing and storing the
926 # tuple members one by one.
928 # tuple members one by one.
927 t = dmap[fn]
929 t = dmap[fn]
928 state = t[0]
930 state = t[0]
929 mode = t[1]
931 mode = t[1]
930 size = t[2]
932 size = t[2]
931 time = t[3]
933 time = t[3]
932
934
933 if not st and state in "nma":
935 if not st and state in "nma":
934 dadd(fn)
936 dadd(fn)
935 elif state == 'n':
937 elif state == 'n':
936 mtime = int(st.st_mtime)
938 mtime = int(st.st_mtime)
937 if (size >= 0 and
939 if (size >= 0 and
938 ((size != st.st_size and size != st.st_size & _rangemask)
940 ((size != st.st_size and size != st.st_size & _rangemask)
939 or ((mode ^ st.st_mode) & 0100 and checkexec))
941 or ((mode ^ st.st_mode) & 0100 and checkexec))
940 or size == -2 # other parent
942 or size == -2 # other parent
941 or fn in copymap):
943 or fn in copymap):
942 madd(fn)
944 madd(fn)
943 elif time != mtime and time != mtime & _rangemask:
945 elif time != mtime and time != mtime & _rangemask:
944 ladd(fn)
946 ladd(fn)
945 elif mtime == lastnormaltime:
947 elif mtime == lastnormaltime:
946 # fn may have just been marked as normal and it may have
948 # fn may have just been marked as normal and it may have
947 # changed in the same second without changing its size.
949 # changed in the same second without changing its size.
948 # This can happen if we quickly do multiple commits.
950 # This can happen if we quickly do multiple commits.
949 # Force lookup, so we don't miss such a racy file change.
951 # Force lookup, so we don't miss such a racy file change.
950 ladd(fn)
952 ladd(fn)
951 elif listclean:
953 elif listclean:
952 cadd(fn)
954 cadd(fn)
953 elif state == 'm':
955 elif state == 'm':
954 madd(fn)
956 madd(fn)
955 elif state == 'a':
957 elif state == 'a':
956 aadd(fn)
958 aadd(fn)
957 elif state == 'r':
959 elif state == 'r':
958 radd(fn)
960 radd(fn)
959
961
960 return (lookup, scmutil.status(modified, added, removed, deleted,
962 return (lookup, scmutil.status(modified, added, removed, deleted,
961 unknown, ignored, clean))
963 unknown, ignored, clean))
962
964
963 def matches(self, match):
965 def matches(self, match):
964 '''
966 '''
965 return files in the dirstate (in whatever state) filtered by match
967 return files in the dirstate (in whatever state) filtered by match
966 '''
968 '''
967 dmap = self._map
969 dmap = self._map
968 if match.always():
970 if match.always():
969 return dmap.keys()
971 return dmap.keys()
970 files = match.files()
972 files = match.files()
971 if match.isexact():
973 if match.isexact():
972 # fast path -- filter the other way around, since typically files is
974 # fast path -- filter the other way around, since typically files is
973 # much smaller than dmap
975 # much smaller than dmap
974 return [f for f in files if f in dmap]
976 return [f for f in files if f in dmap]
975 if not match.anypats() and all(fn in dmap for fn in files):
977 if not match.anypats() and all(fn in dmap for fn in files):
976 # fast path -- all the values are known to be files, so just return
978 # fast path -- all the values are known to be files, so just return
977 # that
979 # that
978 return list(files)
980 return list(files)
979 return [f for f in dmap if match(f)]
981 return [f for f in dmap if match(f)]
@@ -1,122 +1,120 b''
1 # ignore.py - ignored file handling for mercurial
1 # ignore.py - ignored file handling for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, match
9 import util, match
10 import re
10 import re
11
11
12 _commentre = None
12 _commentre = None
13
13
14 def ignorepats(lines):
14 def ignorepats(lines):
15 '''parse lines (iterable) of .hgignore text, returning a tuple of
15 '''parse lines (iterable) of .hgignore text, returning a tuple of
16 (patterns, parse errors). These patterns should be given to compile()
16 (patterns, parse errors). These patterns should be given to compile()
17 to be validated and converted into a match function.'''
17 to be validated and converted into a match function.'''
18 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
18 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
19 syntax = 'relre:'
19 syntax = 'relre:'
20 patterns = []
20 patterns = []
21 warnings = []
21 warnings = []
22
22
23 for line in lines:
23 for line in lines:
24 if "#" in line:
24 if "#" in line:
25 global _commentre
25 global _commentre
26 if not _commentre:
26 if not _commentre:
27 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
27 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
28 # remove comments prefixed by an even number of escapes
28 # remove comments prefixed by an even number of escapes
29 line = _commentre.sub(r'\1', line)
29 line = _commentre.sub(r'\1', line)
30 # fixup properly escaped comments that survived the above
30 # fixup properly escaped comments that survived the above
31 line = line.replace("\\#", "#")
31 line = line.replace("\\#", "#")
32 line = line.rstrip()
32 line = line.rstrip()
33 if not line:
33 if not line:
34 continue
34 continue
35
35
36 if line.startswith('syntax:'):
36 if line.startswith('syntax:'):
37 s = line[7:].strip()
37 s = line[7:].strip()
38 try:
38 try:
39 syntax = syntaxes[s]
39 syntax = syntaxes[s]
40 except KeyError:
40 except KeyError:
41 warnings.append(_("ignoring invalid syntax '%s'") % s)
41 warnings.append(_("ignoring invalid syntax '%s'") % s)
42 continue
42 continue
43
43
44 linesyntax = syntax
44 linesyntax = syntax
45 for s, rels in syntaxes.iteritems():
45 for s, rels in syntaxes.iteritems():
46 if line.startswith(rels):
46 if line.startswith(rels):
47 linesyntax = rels
47 linesyntax = rels
48 line = line[len(rels):]
48 line = line[len(rels):]
49 break
49 break
50 elif line.startswith(s+':'):
50 elif line.startswith(s+':'):
51 linesyntax = rels
51 linesyntax = rels
52 line = line[len(s) + 1:]
52 line = line[len(s) + 1:]
53 break
53 break
54 patterns.append(linesyntax + line)
54 patterns.append(linesyntax + line)
55
55
56 return patterns, warnings
56 return patterns, warnings
57
57
58 def readignorefile(filepath, warn, skipwarning=False):
58 def readignorefile(filepath, warn):
59 try:
59 try:
60 pats = []
60 pats = []
61 fp = open(filepath)
61 fp = open(filepath)
62 pats, warnings = ignorepats(fp)
62 pats, warnings = ignorepats(fp)
63 fp.close()
63 fp.close()
64 for warning in warnings:
64 for warning in warnings:
65 warn("%s: %s\n" % (filepath, warning))
65 warn("%s: %s\n" % (filepath, warning))
66 except IOError, inst:
66 except IOError, inst:
67 if not skipwarning:
67 warn(_("skipping unreadable ignore file '%s': %s\n") %
68 warn(_("skipping unreadable ignore file '%s': %s\n") %
68 (filepath, inst.strerror))
69 (filepath, inst.strerror))
70 return pats
69 return pats
71
70
72 def readpats(root, files, warn):
71 def readpats(root, files, warn):
73 '''return a dict mapping ignore-file-name to list-of-patterns'''
72 '''return a dict mapping ignore-file-name to list-of-patterns'''
74
73
75 pats = {}
74 pats = {}
76 for f in files:
75 for f in files:
77 if f in pats:
76 if f in pats:
78 continue
77 continue
79 skipwarning = f == files[0]
78 pats[f] = readignorefile(f, warn)
80 pats[f] = readignorefile(f, warn, skipwarning=skipwarning)
81
79
82 return [(f, pats[f]) for f in files if f in pats]
80 return [(f, pats[f]) for f in files if f in pats]
83
81
84 def ignore(root, files, warn):
82 def ignore(root, files, warn):
85 '''return matcher covering patterns in 'files'.
83 '''return matcher covering patterns in 'files'.
86
84
87 the files parsed for patterns include:
85 the files parsed for patterns include:
88 .hgignore in the repository root
86 .hgignore in the repository root
89 any additional files specified in the [ui] section of ~/.hgrc
87 any additional files specified in the [ui] section of ~/.hgrc
90
88
91 trailing white space is dropped.
89 trailing white space is dropped.
92 the escape character is backslash.
90 the escape character is backslash.
93 comments start with #.
91 comments start with #.
94 empty lines are skipped.
92 empty lines are skipped.
95
93
96 lines can be of the following formats:
94 lines can be of the following formats:
97
95
98 syntax: regexp # defaults following lines to non-rooted regexps
96 syntax: regexp # defaults following lines to non-rooted regexps
99 syntax: glob # defaults following lines to non-rooted globs
97 syntax: glob # defaults following lines to non-rooted globs
100 re:pattern # non-rooted regular expression
98 re:pattern # non-rooted regular expression
101 glob:pattern # non-rooted glob
99 glob:pattern # non-rooted glob
102 pattern # pattern of the current default type'''
100 pattern # pattern of the current default type'''
103
101
104 pats = readpats(root, files, warn)
102 pats = readpats(root, files, warn)
105
103
106 allpats = []
104 allpats = []
107 for f, patlist in pats:
105 for f, patlist in pats:
108 allpats.extend(patlist)
106 allpats.extend(patlist)
109 if not allpats:
107 if not allpats:
110 return util.never
108 return util.never
111
109
112 try:
110 try:
113 ignorefunc = match.match(root, '', [], allpats)
111 ignorefunc = match.match(root, '', [], allpats)
114 except util.Abort:
112 except util.Abort:
115 # Re-raise an exception where the src is the right file
113 # Re-raise an exception where the src is the right file
116 for f, patlist in pats:
114 for f, patlist in pats:
117 try:
115 try:
118 match.match(root, '', [], patlist)
116 match.match(root, '', [], patlist)
119 except util.Abort, inst:
117 except util.Abort, inst:
120 raise util.Abort('%s: %s' % (f, inst[0]))
118 raise util.Abort('%s: %s' % (f, inst[0]))
121
119
122 return ignorefunc
120 return ignorefunc
General Comments 0
You need to be logged in to leave comments. Login now