##// END OF EJS Templates
dirstate: split write to write changes into files other than .hg/dirstate...
FUJIWARA Katsunori -
r26521:3f41e28a default
parent child Browse files
Show More
@@ -1,1044 +1,1047
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import scmutil, util, osutil, parsers, encoding, pathutil
10 import scmutil, util, osutil, parsers, encoding, pathutil
11 import os, stat, errno
11 import os, stat, errno
12 import match as matchmod
12 import match as matchmod
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15 filecache = scmutil.filecache
15 filecache = scmutil.filecache
16 _rangemask = 0x7fffffff
16 _rangemask = 0x7fffffff
17
17
18 dirstatetuple = parsers.dirstatetuple
18 dirstatetuple = parsers.dirstatetuple
19
19
20 class repocache(filecache):
20 class repocache(filecache):
21 """filecache for files in .hg/"""
21 """filecache for files in .hg/"""
22 def join(self, obj, fname):
22 def join(self, obj, fname):
23 return obj._opener.join(fname)
23 return obj._opener.join(fname)
24
24
25 class rootcache(filecache):
25 class rootcache(filecache):
26 """filecache for files in the repository root"""
26 """filecache for files in the repository root"""
27 def join(self, obj, fname):
27 def join(self, obj, fname):
28 return obj._join(fname)
28 return obj._join(fname)
29
29
30 class dirstate(object):
30 class dirstate(object):
31
31
32 def __init__(self, opener, ui, root, validate):
32 def __init__(self, opener, ui, root, validate):
33 '''Create a new dirstate object.
33 '''Create a new dirstate object.
34
34
35 opener is an open()-like callable that can be used to open the
35 opener is an open()-like callable that can be used to open the
36 dirstate file; root is the root of the directory tracked by
36 dirstate file; root is the root of the directory tracked by
37 the dirstate.
37 the dirstate.
38 '''
38 '''
39 self._opener = opener
39 self._opener = opener
40 self._validate = validate
40 self._validate = validate
41 self._root = root
41 self._root = root
42 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
42 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
43 # UNC path pointing to root share (issue4557)
43 # UNC path pointing to root share (issue4557)
44 self._rootdir = pathutil.normasprefix(root)
44 self._rootdir = pathutil.normasprefix(root)
45 # internal config: ui.forcecwd
45 # internal config: ui.forcecwd
46 forcecwd = ui.config('ui', 'forcecwd')
46 forcecwd = ui.config('ui', 'forcecwd')
47 if forcecwd:
47 if forcecwd:
48 self._cwd = forcecwd
48 self._cwd = forcecwd
49 self._dirty = False
49 self._dirty = False
50 self._dirtypl = False
50 self._dirtypl = False
51 self._lastnormaltime = 0
51 self._lastnormaltime = 0
52 self._ui = ui
52 self._ui = ui
53 self._filecache = {}
53 self._filecache = {}
54 self._parentwriters = 0
54 self._parentwriters = 0
55 self._filename = 'dirstate'
55 self._filename = 'dirstate'
56
56
57 def beginparentchange(self):
57 def beginparentchange(self):
58 '''Marks the beginning of a set of changes that involve changing
58 '''Marks the beginning of a set of changes that involve changing
59 the dirstate parents. If there is an exception during this time,
59 the dirstate parents. If there is an exception during this time,
60 the dirstate will not be written when the wlock is released. This
60 the dirstate will not be written when the wlock is released. This
61 prevents writing an incoherent dirstate where the parent doesn't
61 prevents writing an incoherent dirstate where the parent doesn't
62 match the contents.
62 match the contents.
63 '''
63 '''
64 self._parentwriters += 1
64 self._parentwriters += 1
65
65
66 def endparentchange(self):
66 def endparentchange(self):
67 '''Marks the end of a set of changes that involve changing the
67 '''Marks the end of a set of changes that involve changing the
68 dirstate parents. Once all parent changes have been marked done,
68 dirstate parents. Once all parent changes have been marked done,
69 the wlock will be free to write the dirstate on release.
69 the wlock will be free to write the dirstate on release.
70 '''
70 '''
71 if self._parentwriters > 0:
71 if self._parentwriters > 0:
72 self._parentwriters -= 1
72 self._parentwriters -= 1
73
73
74 def pendingparentchange(self):
74 def pendingparentchange(self):
75 '''Returns true if the dirstate is in the middle of a set of changes
75 '''Returns true if the dirstate is in the middle of a set of changes
76 that modify the dirstate parent.
76 that modify the dirstate parent.
77 '''
77 '''
78 return self._parentwriters > 0
78 return self._parentwriters > 0
79
79
80 @propertycache
80 @propertycache
81 def _map(self):
81 def _map(self):
82 '''Return the dirstate contents as a map from filename to
82 '''Return the dirstate contents as a map from filename to
83 (state, mode, size, time).'''
83 (state, mode, size, time).'''
84 self._read()
84 self._read()
85 return self._map
85 return self._map
86
86
87 @propertycache
87 @propertycache
88 def _copymap(self):
88 def _copymap(self):
89 self._read()
89 self._read()
90 return self._copymap
90 return self._copymap
91
91
92 @propertycache
92 @propertycache
93 def _filefoldmap(self):
93 def _filefoldmap(self):
94 try:
94 try:
95 makefilefoldmap = parsers.make_file_foldmap
95 makefilefoldmap = parsers.make_file_foldmap
96 except AttributeError:
96 except AttributeError:
97 pass
97 pass
98 else:
98 else:
99 return makefilefoldmap(self._map, util.normcasespec,
99 return makefilefoldmap(self._map, util.normcasespec,
100 util.normcasefallback)
100 util.normcasefallback)
101
101
102 f = {}
102 f = {}
103 normcase = util.normcase
103 normcase = util.normcase
104 for name, s in self._map.iteritems():
104 for name, s in self._map.iteritems():
105 if s[0] != 'r':
105 if s[0] != 'r':
106 f[normcase(name)] = name
106 f[normcase(name)] = name
107 f['.'] = '.' # prevents useless util.fspath() invocation
107 f['.'] = '.' # prevents useless util.fspath() invocation
108 return f
108 return f
109
109
110 @propertycache
110 @propertycache
111 def _dirfoldmap(self):
111 def _dirfoldmap(self):
112 f = {}
112 f = {}
113 normcase = util.normcase
113 normcase = util.normcase
114 for name in self._dirs:
114 for name in self._dirs:
115 f[normcase(name)] = name
115 f[normcase(name)] = name
116 return f
116 return f
117
117
118 @repocache('branch')
118 @repocache('branch')
119 def _branch(self):
119 def _branch(self):
120 try:
120 try:
121 return self._opener.read("branch").strip() or "default"
121 return self._opener.read("branch").strip() or "default"
122 except IOError as inst:
122 except IOError as inst:
123 if inst.errno != errno.ENOENT:
123 if inst.errno != errno.ENOENT:
124 raise
124 raise
125 return "default"
125 return "default"
126
126
127 @propertycache
127 @propertycache
128 def _pl(self):
128 def _pl(self):
129 try:
129 try:
130 fp = self._opener(self._filename)
130 fp = self._opener(self._filename)
131 st = fp.read(40)
131 st = fp.read(40)
132 fp.close()
132 fp.close()
133 l = len(st)
133 l = len(st)
134 if l == 40:
134 if l == 40:
135 return st[:20], st[20:40]
135 return st[:20], st[20:40]
136 elif l > 0 and l < 40:
136 elif l > 0 and l < 40:
137 raise util.Abort(_('working directory state appears damaged!'))
137 raise util.Abort(_('working directory state appears damaged!'))
138 except IOError as err:
138 except IOError as err:
139 if err.errno != errno.ENOENT:
139 if err.errno != errno.ENOENT:
140 raise
140 raise
141 return [nullid, nullid]
141 return [nullid, nullid]
142
142
143 @propertycache
143 @propertycache
144 def _dirs(self):
144 def _dirs(self):
145 return util.dirs(self._map, 'r')
145 return util.dirs(self._map, 'r')
146
146
147 def dirs(self):
147 def dirs(self):
148 return self._dirs
148 return self._dirs
149
149
150 @rootcache('.hgignore')
150 @rootcache('.hgignore')
151 def _ignore(self):
151 def _ignore(self):
152 files = []
152 files = []
153 if os.path.exists(self._join('.hgignore')):
153 if os.path.exists(self._join('.hgignore')):
154 files.append(self._join('.hgignore'))
154 files.append(self._join('.hgignore'))
155 for name, path in self._ui.configitems("ui"):
155 for name, path in self._ui.configitems("ui"):
156 if name == 'ignore' or name.startswith('ignore.'):
156 if name == 'ignore' or name.startswith('ignore.'):
157 # we need to use os.path.join here rather than self._join
157 # we need to use os.path.join here rather than self._join
158 # because path is arbitrary and user-specified
158 # because path is arbitrary and user-specified
159 files.append(os.path.join(self._rootdir, util.expandpath(path)))
159 files.append(os.path.join(self._rootdir, util.expandpath(path)))
160
160
161 if not files:
161 if not files:
162 return util.never
162 return util.never
163
163
164 pats = ['include:%s' % f for f in files]
164 pats = ['include:%s' % f for f in files]
165 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
165 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
166
166
167 @propertycache
167 @propertycache
168 def _slash(self):
168 def _slash(self):
169 return self._ui.configbool('ui', 'slash') and os.sep != '/'
169 return self._ui.configbool('ui', 'slash') and os.sep != '/'
170
170
171 @propertycache
171 @propertycache
172 def _checklink(self):
172 def _checklink(self):
173 return util.checklink(self._root)
173 return util.checklink(self._root)
174
174
175 @propertycache
175 @propertycache
176 def _checkexec(self):
176 def _checkexec(self):
177 return util.checkexec(self._root)
177 return util.checkexec(self._root)
178
178
179 @propertycache
179 @propertycache
180 def _checkcase(self):
180 def _checkcase(self):
181 return not util.checkcase(self._join('.hg'))
181 return not util.checkcase(self._join('.hg'))
182
182
183 def _join(self, f):
183 def _join(self, f):
184 # much faster than os.path.join()
184 # much faster than os.path.join()
185 # it's safe because f is always a relative path
185 # it's safe because f is always a relative path
186 return self._rootdir + f
186 return self._rootdir + f
187
187
188 def flagfunc(self, buildfallback):
188 def flagfunc(self, buildfallback):
189 if self._checklink and self._checkexec:
189 if self._checklink and self._checkexec:
190 def f(x):
190 def f(x):
191 try:
191 try:
192 st = os.lstat(self._join(x))
192 st = os.lstat(self._join(x))
193 if util.statislink(st):
193 if util.statislink(st):
194 return 'l'
194 return 'l'
195 if util.statisexec(st):
195 if util.statisexec(st):
196 return 'x'
196 return 'x'
197 except OSError:
197 except OSError:
198 pass
198 pass
199 return ''
199 return ''
200 return f
200 return f
201
201
202 fallback = buildfallback()
202 fallback = buildfallback()
203 if self._checklink:
203 if self._checklink:
204 def f(x):
204 def f(x):
205 if os.path.islink(self._join(x)):
205 if os.path.islink(self._join(x)):
206 return 'l'
206 return 'l'
207 if 'x' in fallback(x):
207 if 'x' in fallback(x):
208 return 'x'
208 return 'x'
209 return ''
209 return ''
210 return f
210 return f
211 if self._checkexec:
211 if self._checkexec:
212 def f(x):
212 def f(x):
213 if 'l' in fallback(x):
213 if 'l' in fallback(x):
214 return 'l'
214 return 'l'
215 if util.isexec(self._join(x)):
215 if util.isexec(self._join(x)):
216 return 'x'
216 return 'x'
217 return ''
217 return ''
218 return f
218 return f
219 else:
219 else:
220 return fallback
220 return fallback
221
221
222 @propertycache
222 @propertycache
223 def _cwd(self):
223 def _cwd(self):
224 return os.getcwd()
224 return os.getcwd()
225
225
226 def getcwd(self):
226 def getcwd(self):
227 '''Return the path from which a canonical path is calculated.
227 '''Return the path from which a canonical path is calculated.
228
228
229 This path should be used to resolve file patterns or to convert
229 This path should be used to resolve file patterns or to convert
230 canonical paths back to file paths for display. It shouldn't be
230 canonical paths back to file paths for display. It shouldn't be
231 used to get real file paths. Use vfs functions instead.
231 used to get real file paths. Use vfs functions instead.
232 '''
232 '''
233 cwd = self._cwd
233 cwd = self._cwd
234 if cwd == self._root:
234 if cwd == self._root:
235 return ''
235 return ''
236 # self._root ends with a path separator if self._root is '/' or 'C:\'
236 # self._root ends with a path separator if self._root is '/' or 'C:\'
237 rootsep = self._root
237 rootsep = self._root
238 if not util.endswithsep(rootsep):
238 if not util.endswithsep(rootsep):
239 rootsep += os.sep
239 rootsep += os.sep
240 if cwd.startswith(rootsep):
240 if cwd.startswith(rootsep):
241 return cwd[len(rootsep):]
241 return cwd[len(rootsep):]
242 else:
242 else:
243 # we're outside the repo. return an absolute path.
243 # we're outside the repo. return an absolute path.
244 return cwd
244 return cwd
245
245
246 def pathto(self, f, cwd=None):
246 def pathto(self, f, cwd=None):
247 if cwd is None:
247 if cwd is None:
248 cwd = self.getcwd()
248 cwd = self.getcwd()
249 path = util.pathto(self._root, cwd, f)
249 path = util.pathto(self._root, cwd, f)
250 if self._slash:
250 if self._slash:
251 return util.pconvert(path)
251 return util.pconvert(path)
252 return path
252 return path
253
253
254 def __getitem__(self, key):
254 def __getitem__(self, key):
255 '''Return the current state of key (a filename) in the dirstate.
255 '''Return the current state of key (a filename) in the dirstate.
256
256
257 States are:
257 States are:
258 n normal
258 n normal
259 m needs merging
259 m needs merging
260 r marked for removal
260 r marked for removal
261 a marked for addition
261 a marked for addition
262 ? not tracked
262 ? not tracked
263 '''
263 '''
264 return self._map.get(key, ("?",))[0]
264 return self._map.get(key, ("?",))[0]
265
265
266 def __contains__(self, key):
266 def __contains__(self, key):
267 return key in self._map
267 return key in self._map
268
268
269 def __iter__(self):
269 def __iter__(self):
270 for x in sorted(self._map):
270 for x in sorted(self._map):
271 yield x
271 yield x
272
272
273 def iteritems(self):
273 def iteritems(self):
274 return self._map.iteritems()
274 return self._map.iteritems()
275
275
276 def parents(self):
276 def parents(self):
277 return [self._validate(p) for p in self._pl]
277 return [self._validate(p) for p in self._pl]
278
278
279 def p1(self):
279 def p1(self):
280 return self._validate(self._pl[0])
280 return self._validate(self._pl[0])
281
281
282 def p2(self):
282 def p2(self):
283 return self._validate(self._pl[1])
283 return self._validate(self._pl[1])
284
284
285 def branch(self):
285 def branch(self):
286 return encoding.tolocal(self._branch)
286 return encoding.tolocal(self._branch)
287
287
288 def setparents(self, p1, p2=nullid):
288 def setparents(self, p1, p2=nullid):
289 """Set dirstate parents to p1 and p2.
289 """Set dirstate parents to p1 and p2.
290
290
291 When moving from two parents to one, 'm' merged entries a
291 When moving from two parents to one, 'm' merged entries a
292 adjusted to normal and previous copy records discarded and
292 adjusted to normal and previous copy records discarded and
293 returned by the call.
293 returned by the call.
294
294
295 See localrepo.setparents()
295 See localrepo.setparents()
296 """
296 """
297 if self._parentwriters == 0:
297 if self._parentwriters == 0:
298 raise ValueError("cannot set dirstate parent without "
298 raise ValueError("cannot set dirstate parent without "
299 "calling dirstate.beginparentchange")
299 "calling dirstate.beginparentchange")
300
300
301 self._dirty = self._dirtypl = True
301 self._dirty = self._dirtypl = True
302 oldp2 = self._pl[1]
302 oldp2 = self._pl[1]
303 self._pl = p1, p2
303 self._pl = p1, p2
304 copies = {}
304 copies = {}
305 if oldp2 != nullid and p2 == nullid:
305 if oldp2 != nullid and p2 == nullid:
306 for f, s in self._map.iteritems():
306 for f, s in self._map.iteritems():
307 # Discard 'm' markers when moving away from a merge state
307 # Discard 'm' markers when moving away from a merge state
308 if s[0] == 'm':
308 if s[0] == 'm':
309 if f in self._copymap:
309 if f in self._copymap:
310 copies[f] = self._copymap[f]
310 copies[f] = self._copymap[f]
311 self.normallookup(f)
311 self.normallookup(f)
312 # Also fix up otherparent markers
312 # Also fix up otherparent markers
313 elif s[0] == 'n' and s[2] == -2:
313 elif s[0] == 'n' and s[2] == -2:
314 if f in self._copymap:
314 if f in self._copymap:
315 copies[f] = self._copymap[f]
315 copies[f] = self._copymap[f]
316 self.add(f)
316 self.add(f)
317 return copies
317 return copies
318
318
319 def setbranch(self, branch):
319 def setbranch(self, branch):
320 self._branch = encoding.fromlocal(branch)
320 self._branch = encoding.fromlocal(branch)
321 f = self._opener('branch', 'w', atomictemp=True)
321 f = self._opener('branch', 'w', atomictemp=True)
322 try:
322 try:
323 f.write(self._branch + '\n')
323 f.write(self._branch + '\n')
324 f.close()
324 f.close()
325
325
326 # make sure filecache has the correct stat info for _branch after
326 # make sure filecache has the correct stat info for _branch after
327 # replacing the underlying file
327 # replacing the underlying file
328 ce = self._filecache['_branch']
328 ce = self._filecache['_branch']
329 if ce:
329 if ce:
330 ce.refresh()
330 ce.refresh()
331 except: # re-raises
331 except: # re-raises
332 f.discard()
332 f.discard()
333 raise
333 raise
334
334
335 def _read(self):
335 def _read(self):
336 self._map = {}
336 self._map = {}
337 self._copymap = {}
337 self._copymap = {}
338 try:
338 try:
339 fp = self._opener.open(self._filename)
339 fp = self._opener.open(self._filename)
340 try:
340 try:
341 st = fp.read()
341 st = fp.read()
342 finally:
342 finally:
343 fp.close()
343 fp.close()
344 except IOError as err:
344 except IOError as err:
345 if err.errno != errno.ENOENT:
345 if err.errno != errno.ENOENT:
346 raise
346 raise
347 return
347 return
348 if not st:
348 if not st:
349 return
349 return
350
350
351 if util.safehasattr(parsers, 'dict_new_presized'):
351 if util.safehasattr(parsers, 'dict_new_presized'):
352 # Make an estimate of the number of files in the dirstate based on
352 # Make an estimate of the number of files in the dirstate based on
353 # its size. From a linear regression on a set of real-world repos,
353 # its size. From a linear regression on a set of real-world repos,
354 # all over 10,000 files, the size of a dirstate entry is 85
354 # all over 10,000 files, the size of a dirstate entry is 85
355 # bytes. The cost of resizing is significantly higher than the cost
355 # bytes. The cost of resizing is significantly higher than the cost
356 # of filling in a larger presized dict, so subtract 20% from the
356 # of filling in a larger presized dict, so subtract 20% from the
357 # size.
357 # size.
358 #
358 #
359 # This heuristic is imperfect in many ways, so in a future dirstate
359 # This heuristic is imperfect in many ways, so in a future dirstate
360 # format update it makes sense to just record the number of entries
360 # format update it makes sense to just record the number of entries
361 # on write.
361 # on write.
362 self._map = parsers.dict_new_presized(len(st) / 71)
362 self._map = parsers.dict_new_presized(len(st) / 71)
363
363
364 # Python's garbage collector triggers a GC each time a certain number
364 # Python's garbage collector triggers a GC each time a certain number
365 # of container objects (the number being defined by
365 # of container objects (the number being defined by
366 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
366 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
367 # for each file in the dirstate. The C version then immediately marks
367 # for each file in the dirstate. The C version then immediately marks
368 # them as not to be tracked by the collector. However, this has no
368 # them as not to be tracked by the collector. However, this has no
369 # effect on when GCs are triggered, only on what objects the GC looks
369 # effect on when GCs are triggered, only on what objects the GC looks
370 # into. This means that O(number of files) GCs are unavoidable.
370 # into. This means that O(number of files) GCs are unavoidable.
371 # Depending on when in the process's lifetime the dirstate is parsed,
371 # Depending on when in the process's lifetime the dirstate is parsed,
372 # this can get very expensive. As a workaround, disable GC while
372 # this can get very expensive. As a workaround, disable GC while
373 # parsing the dirstate.
373 # parsing the dirstate.
374 #
374 #
375 # (we cannot decorate the function directly since it is in a C module)
375 # (we cannot decorate the function directly since it is in a C module)
376 parse_dirstate = util.nogc(parsers.parse_dirstate)
376 parse_dirstate = util.nogc(parsers.parse_dirstate)
377 p = parse_dirstate(self._map, self._copymap, st)
377 p = parse_dirstate(self._map, self._copymap, st)
378 if not self._dirtypl:
378 if not self._dirtypl:
379 self._pl = p
379 self._pl = p
380
380
381 def invalidate(self):
381 def invalidate(self):
382 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
382 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
383 "_pl", "_dirs", "_ignore"):
383 "_pl", "_dirs", "_ignore"):
384 if a in self.__dict__:
384 if a in self.__dict__:
385 delattr(self, a)
385 delattr(self, a)
386 self._lastnormaltime = 0
386 self._lastnormaltime = 0
387 self._dirty = False
387 self._dirty = False
388 self._parentwriters = 0
388 self._parentwriters = 0
389
389
390 def copy(self, source, dest):
390 def copy(self, source, dest):
391 """Mark dest as a copy of source. Unmark dest if source is None."""
391 """Mark dest as a copy of source. Unmark dest if source is None."""
392 if source == dest:
392 if source == dest:
393 return
393 return
394 self._dirty = True
394 self._dirty = True
395 if source is not None:
395 if source is not None:
396 self._copymap[dest] = source
396 self._copymap[dest] = source
397 elif dest in self._copymap:
397 elif dest in self._copymap:
398 del self._copymap[dest]
398 del self._copymap[dest]
399
399
400 def copied(self, file):
400 def copied(self, file):
401 return self._copymap.get(file, None)
401 return self._copymap.get(file, None)
402
402
403 def copies(self):
403 def copies(self):
404 return self._copymap
404 return self._copymap
405
405
406 def _droppath(self, f):
406 def _droppath(self, f):
407 if self[f] not in "?r" and "_dirs" in self.__dict__:
407 if self[f] not in "?r" and "_dirs" in self.__dict__:
408 self._dirs.delpath(f)
408 self._dirs.delpath(f)
409
409
410 def _addpath(self, f, state, mode, size, mtime):
410 def _addpath(self, f, state, mode, size, mtime):
411 oldstate = self[f]
411 oldstate = self[f]
412 if state == 'a' or oldstate == 'r':
412 if state == 'a' or oldstate == 'r':
413 scmutil.checkfilename(f)
413 scmutil.checkfilename(f)
414 if f in self._dirs:
414 if f in self._dirs:
415 raise util.Abort(_('directory %r already in dirstate') % f)
415 raise util.Abort(_('directory %r already in dirstate') % f)
416 # shadows
416 # shadows
417 for d in util.finddirs(f):
417 for d in util.finddirs(f):
418 if d in self._dirs:
418 if d in self._dirs:
419 break
419 break
420 if d in self._map and self[d] != 'r':
420 if d in self._map and self[d] != 'r':
421 raise util.Abort(
421 raise util.Abort(
422 _('file %r in dirstate clashes with %r') % (d, f))
422 _('file %r in dirstate clashes with %r') % (d, f))
423 if oldstate in "?r" and "_dirs" in self.__dict__:
423 if oldstate in "?r" and "_dirs" in self.__dict__:
424 self._dirs.addpath(f)
424 self._dirs.addpath(f)
425 self._dirty = True
425 self._dirty = True
426 self._map[f] = dirstatetuple(state, mode, size, mtime)
426 self._map[f] = dirstatetuple(state, mode, size, mtime)
427
427
428 def normal(self, f):
428 def normal(self, f):
429 '''Mark a file normal and clean.'''
429 '''Mark a file normal and clean.'''
430 s = os.lstat(self._join(f))
430 s = os.lstat(self._join(f))
431 mtime = util.statmtimesec(s)
431 mtime = util.statmtimesec(s)
432 self._addpath(f, 'n', s.st_mode,
432 self._addpath(f, 'n', s.st_mode,
433 s.st_size & _rangemask, mtime & _rangemask)
433 s.st_size & _rangemask, mtime & _rangemask)
434 if f in self._copymap:
434 if f in self._copymap:
435 del self._copymap[f]
435 del self._copymap[f]
436 if mtime > self._lastnormaltime:
436 if mtime > self._lastnormaltime:
437 # Remember the most recent modification timeslot for status(),
437 # Remember the most recent modification timeslot for status(),
438 # to make sure we won't miss future size-preserving file content
438 # to make sure we won't miss future size-preserving file content
439 # modifications that happen within the same timeslot.
439 # modifications that happen within the same timeslot.
440 self._lastnormaltime = mtime
440 self._lastnormaltime = mtime
441
441
442 def normallookup(self, f):
442 def normallookup(self, f):
443 '''Mark a file normal, but possibly dirty.'''
443 '''Mark a file normal, but possibly dirty.'''
444 if self._pl[1] != nullid and f in self._map:
444 if self._pl[1] != nullid and f in self._map:
445 # if there is a merge going on and the file was either
445 # if there is a merge going on and the file was either
446 # in state 'm' (-1) or coming from other parent (-2) before
446 # in state 'm' (-1) or coming from other parent (-2) before
447 # being removed, restore that state.
447 # being removed, restore that state.
448 entry = self._map[f]
448 entry = self._map[f]
449 if entry[0] == 'r' and entry[2] in (-1, -2):
449 if entry[0] == 'r' and entry[2] in (-1, -2):
450 source = self._copymap.get(f)
450 source = self._copymap.get(f)
451 if entry[2] == -1:
451 if entry[2] == -1:
452 self.merge(f)
452 self.merge(f)
453 elif entry[2] == -2:
453 elif entry[2] == -2:
454 self.otherparent(f)
454 self.otherparent(f)
455 if source:
455 if source:
456 self.copy(source, f)
456 self.copy(source, f)
457 return
457 return
458 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
458 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
459 return
459 return
460 self._addpath(f, 'n', 0, -1, -1)
460 self._addpath(f, 'n', 0, -1, -1)
461 if f in self._copymap:
461 if f in self._copymap:
462 del self._copymap[f]
462 del self._copymap[f]
463
463
464 def otherparent(self, f):
464 def otherparent(self, f):
465 '''Mark as coming from the other parent, always dirty.'''
465 '''Mark as coming from the other parent, always dirty.'''
466 if self._pl[1] == nullid:
466 if self._pl[1] == nullid:
467 raise util.Abort(_("setting %r to other parent "
467 raise util.Abort(_("setting %r to other parent "
468 "only allowed in merges") % f)
468 "only allowed in merges") % f)
469 if f in self and self[f] == 'n':
469 if f in self and self[f] == 'n':
470 # merge-like
470 # merge-like
471 self._addpath(f, 'm', 0, -2, -1)
471 self._addpath(f, 'm', 0, -2, -1)
472 else:
472 else:
473 # add-like
473 # add-like
474 self._addpath(f, 'n', 0, -2, -1)
474 self._addpath(f, 'n', 0, -2, -1)
475
475
476 if f in self._copymap:
476 if f in self._copymap:
477 del self._copymap[f]
477 del self._copymap[f]
478
478
479 def add(self, f):
479 def add(self, f):
480 '''Mark a file added.'''
480 '''Mark a file added.'''
481 self._addpath(f, 'a', 0, -1, -1)
481 self._addpath(f, 'a', 0, -1, -1)
482 if f in self._copymap:
482 if f in self._copymap:
483 del self._copymap[f]
483 del self._copymap[f]
484
484
485 def remove(self, f):
485 def remove(self, f):
486 '''Mark a file removed.'''
486 '''Mark a file removed.'''
487 self._dirty = True
487 self._dirty = True
488 self._droppath(f)
488 self._droppath(f)
489 size = 0
489 size = 0
490 if self._pl[1] != nullid and f in self._map:
490 if self._pl[1] != nullid and f in self._map:
491 # backup the previous state
491 # backup the previous state
492 entry = self._map[f]
492 entry = self._map[f]
493 if entry[0] == 'm': # merge
493 if entry[0] == 'm': # merge
494 size = -1
494 size = -1
495 elif entry[0] == 'n' and entry[2] == -2: # other parent
495 elif entry[0] == 'n' and entry[2] == -2: # other parent
496 size = -2
496 size = -2
497 self._map[f] = dirstatetuple('r', 0, size, 0)
497 self._map[f] = dirstatetuple('r', 0, size, 0)
498 if size == 0 and f in self._copymap:
498 if size == 0 and f in self._copymap:
499 del self._copymap[f]
499 del self._copymap[f]
500
500
501 def merge(self, f):
501 def merge(self, f):
502 '''Mark a file merged.'''
502 '''Mark a file merged.'''
503 if self._pl[1] == nullid:
503 if self._pl[1] == nullid:
504 return self.normallookup(f)
504 return self.normallookup(f)
505 return self.otherparent(f)
505 return self.otherparent(f)
506
506
507 def drop(self, f):
507 def drop(self, f):
508 '''Drop a file from the dirstate'''
508 '''Drop a file from the dirstate'''
509 if f in self._map:
509 if f in self._map:
510 self._dirty = True
510 self._dirty = True
511 self._droppath(f)
511 self._droppath(f)
512 del self._map[f]
512 del self._map[f]
513
513
514 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
514 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
515 if exists is None:
515 if exists is None:
516 exists = os.path.lexists(os.path.join(self._root, path))
516 exists = os.path.lexists(os.path.join(self._root, path))
517 if not exists:
517 if not exists:
518 # Maybe a path component exists
518 # Maybe a path component exists
519 if not ignoremissing and '/' in path:
519 if not ignoremissing and '/' in path:
520 d, f = path.rsplit('/', 1)
520 d, f = path.rsplit('/', 1)
521 d = self._normalize(d, False, ignoremissing, None)
521 d = self._normalize(d, False, ignoremissing, None)
522 folded = d + "/" + f
522 folded = d + "/" + f
523 else:
523 else:
524 # No path components, preserve original case
524 # No path components, preserve original case
525 folded = path
525 folded = path
526 else:
526 else:
527 # recursively normalize leading directory components
527 # recursively normalize leading directory components
528 # against dirstate
528 # against dirstate
529 if '/' in normed:
529 if '/' in normed:
530 d, f = normed.rsplit('/', 1)
530 d, f = normed.rsplit('/', 1)
531 d = self._normalize(d, False, ignoremissing, True)
531 d = self._normalize(d, False, ignoremissing, True)
532 r = self._root + "/" + d
532 r = self._root + "/" + d
533 folded = d + "/" + util.fspath(f, r)
533 folded = d + "/" + util.fspath(f, r)
534 else:
534 else:
535 folded = util.fspath(normed, self._root)
535 folded = util.fspath(normed, self._root)
536 storemap[normed] = folded
536 storemap[normed] = folded
537
537
538 return folded
538 return folded
539
539
540 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
540 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
541 normed = util.normcase(path)
541 normed = util.normcase(path)
542 folded = self._filefoldmap.get(normed, None)
542 folded = self._filefoldmap.get(normed, None)
543 if folded is None:
543 if folded is None:
544 if isknown:
544 if isknown:
545 folded = path
545 folded = path
546 else:
546 else:
547 folded = self._discoverpath(path, normed, ignoremissing, exists,
547 folded = self._discoverpath(path, normed, ignoremissing, exists,
548 self._filefoldmap)
548 self._filefoldmap)
549 return folded
549 return folded
550
550
551 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
551 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
552 normed = util.normcase(path)
552 normed = util.normcase(path)
553 folded = self._filefoldmap.get(normed, None)
553 folded = self._filefoldmap.get(normed, None)
554 if folded is None:
554 if folded is None:
555 folded = self._dirfoldmap.get(normed, None)
555 folded = self._dirfoldmap.get(normed, None)
556 if folded is None:
556 if folded is None:
557 if isknown:
557 if isknown:
558 folded = path
558 folded = path
559 else:
559 else:
560 # store discovered result in dirfoldmap so that future
560 # store discovered result in dirfoldmap so that future
561 # normalizefile calls don't start matching directories
561 # normalizefile calls don't start matching directories
562 folded = self._discoverpath(path, normed, ignoremissing, exists,
562 folded = self._discoverpath(path, normed, ignoremissing, exists,
563 self._dirfoldmap)
563 self._dirfoldmap)
564 return folded
564 return folded
565
565
566 def normalize(self, path, isknown=False, ignoremissing=False):
566 def normalize(self, path, isknown=False, ignoremissing=False):
567 '''
567 '''
568 normalize the case of a pathname when on a casefolding filesystem
568 normalize the case of a pathname when on a casefolding filesystem
569
569
570 isknown specifies whether the filename came from walking the
570 isknown specifies whether the filename came from walking the
571 disk, to avoid extra filesystem access.
571 disk, to avoid extra filesystem access.
572
572
573 If ignoremissing is True, missing path are returned
573 If ignoremissing is True, missing path are returned
574 unchanged. Otherwise, we try harder to normalize possibly
574 unchanged. Otherwise, we try harder to normalize possibly
575 existing path components.
575 existing path components.
576
576
577 The normalized case is determined based on the following precedence:
577 The normalized case is determined based on the following precedence:
578
578
579 - version of name already stored in the dirstate
579 - version of name already stored in the dirstate
580 - version of name stored on disk
580 - version of name stored on disk
581 - version provided via command arguments
581 - version provided via command arguments
582 '''
582 '''
583
583
584 if self._checkcase:
584 if self._checkcase:
585 return self._normalize(path, isknown, ignoremissing)
585 return self._normalize(path, isknown, ignoremissing)
586 return path
586 return path
587
587
588 def clear(self):
588 def clear(self):
589 self._map = {}
589 self._map = {}
590 if "_dirs" in self.__dict__:
590 if "_dirs" in self.__dict__:
591 delattr(self, "_dirs")
591 delattr(self, "_dirs")
592 self._copymap = {}
592 self._copymap = {}
593 self._pl = [nullid, nullid]
593 self._pl = [nullid, nullid]
594 self._lastnormaltime = 0
594 self._lastnormaltime = 0
595 self._dirty = True
595 self._dirty = True
596
596
597 def rebuild(self, parent, allfiles, changedfiles=None):
597 def rebuild(self, parent, allfiles, changedfiles=None):
598 if changedfiles is None:
598 if changedfiles is None:
599 changedfiles = allfiles
599 changedfiles = allfiles
600 oldmap = self._map
600 oldmap = self._map
601 self.clear()
601 self.clear()
602 for f in allfiles:
602 for f in allfiles:
603 if f not in changedfiles:
603 if f not in changedfiles:
604 self._map[f] = oldmap[f]
604 self._map[f] = oldmap[f]
605 else:
605 else:
606 if 'x' in allfiles.flags(f):
606 if 'x' in allfiles.flags(f):
607 self._map[f] = dirstatetuple('n', 0o777, -1, 0)
607 self._map[f] = dirstatetuple('n', 0o777, -1, 0)
608 else:
608 else:
609 self._map[f] = dirstatetuple('n', 0o666, -1, 0)
609 self._map[f] = dirstatetuple('n', 0o666, -1, 0)
610 self._pl = (parent, nullid)
610 self._pl = (parent, nullid)
611 self._dirty = True
611 self._dirty = True
612
612
613 def write(self):
613 def write(self):
614 if not self._dirty:
614 if not self._dirty:
615 return
615 return
616
616
617 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
617 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
618 # timestamp of each entries in dirstate, because of 'now > mtime'
618 # timestamp of each entries in dirstate, because of 'now > mtime'
619 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
619 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
620 if delaywrite > 0:
620 if delaywrite > 0:
621 import time # to avoid useless import
621 import time # to avoid useless import
622 time.sleep(delaywrite)
622 time.sleep(delaywrite)
623
623
624 st = self._opener(self._filename, "w", atomictemp=True)
624 st = self._opener(self._filename, "w", atomictemp=True)
625 self._writedirstate(st)
626
627 def _writedirstate(self, st):
625 # use the modification time of the newly created temporary file as the
628 # use the modification time of the newly created temporary file as the
626 # filesystem's notion of 'now'
629 # filesystem's notion of 'now'
627 now = util.fstat(st).st_mtime
630 now = util.fstat(st).st_mtime
628 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
631 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
629 st.close()
632 st.close()
630 self._lastnormaltime = 0
633 self._lastnormaltime = 0
631 self._dirty = self._dirtypl = False
634 self._dirty = self._dirtypl = False
632
635
633 def _dirignore(self, f):
636 def _dirignore(self, f):
634 if f == '.':
637 if f == '.':
635 return False
638 return False
636 if self._ignore(f):
639 if self._ignore(f):
637 return True
640 return True
638 for p in util.finddirs(f):
641 for p in util.finddirs(f):
639 if self._ignore(p):
642 if self._ignore(p):
640 return True
643 return True
641 return False
644 return False
642
645
643 def _walkexplicit(self, match, subrepos):
646 def _walkexplicit(self, match, subrepos):
644 '''Get stat data about the files explicitly specified by match.
647 '''Get stat data about the files explicitly specified by match.
645
648
646 Return a triple (results, dirsfound, dirsnotfound).
649 Return a triple (results, dirsfound, dirsnotfound).
647 - results is a mapping from filename to stat result. It also contains
650 - results is a mapping from filename to stat result. It also contains
648 listings mapping subrepos and .hg to None.
651 listings mapping subrepos and .hg to None.
649 - dirsfound is a list of files found to be directories.
652 - dirsfound is a list of files found to be directories.
650 - dirsnotfound is a list of files that the dirstate thinks are
653 - dirsnotfound is a list of files that the dirstate thinks are
651 directories and that were not found.'''
654 directories and that were not found.'''
652
655
653 def badtype(mode):
656 def badtype(mode):
654 kind = _('unknown')
657 kind = _('unknown')
655 if stat.S_ISCHR(mode):
658 if stat.S_ISCHR(mode):
656 kind = _('character device')
659 kind = _('character device')
657 elif stat.S_ISBLK(mode):
660 elif stat.S_ISBLK(mode):
658 kind = _('block device')
661 kind = _('block device')
659 elif stat.S_ISFIFO(mode):
662 elif stat.S_ISFIFO(mode):
660 kind = _('fifo')
663 kind = _('fifo')
661 elif stat.S_ISSOCK(mode):
664 elif stat.S_ISSOCK(mode):
662 kind = _('socket')
665 kind = _('socket')
663 elif stat.S_ISDIR(mode):
666 elif stat.S_ISDIR(mode):
664 kind = _('directory')
667 kind = _('directory')
665 return _('unsupported file type (type is %s)') % kind
668 return _('unsupported file type (type is %s)') % kind
666
669
667 matchedir = match.explicitdir
670 matchedir = match.explicitdir
668 badfn = match.bad
671 badfn = match.bad
669 dmap = self._map
672 dmap = self._map
670 lstat = os.lstat
673 lstat = os.lstat
671 getkind = stat.S_IFMT
674 getkind = stat.S_IFMT
672 dirkind = stat.S_IFDIR
675 dirkind = stat.S_IFDIR
673 regkind = stat.S_IFREG
676 regkind = stat.S_IFREG
674 lnkkind = stat.S_IFLNK
677 lnkkind = stat.S_IFLNK
675 join = self._join
678 join = self._join
676 dirsfound = []
679 dirsfound = []
677 foundadd = dirsfound.append
680 foundadd = dirsfound.append
678 dirsnotfound = []
681 dirsnotfound = []
679 notfoundadd = dirsnotfound.append
682 notfoundadd = dirsnotfound.append
680
683
681 if not match.isexact() and self._checkcase:
684 if not match.isexact() and self._checkcase:
682 normalize = self._normalize
685 normalize = self._normalize
683 else:
686 else:
684 normalize = None
687 normalize = None
685
688
686 files = sorted(match.files())
689 files = sorted(match.files())
687 subrepos.sort()
690 subrepos.sort()
688 i, j = 0, 0
691 i, j = 0, 0
689 while i < len(files) and j < len(subrepos):
692 while i < len(files) and j < len(subrepos):
690 subpath = subrepos[j] + "/"
693 subpath = subrepos[j] + "/"
691 if files[i] < subpath:
694 if files[i] < subpath:
692 i += 1
695 i += 1
693 continue
696 continue
694 while i < len(files) and files[i].startswith(subpath):
697 while i < len(files) and files[i].startswith(subpath):
695 del files[i]
698 del files[i]
696 j += 1
699 j += 1
697
700
698 if not files or '.' in files:
701 if not files or '.' in files:
699 files = ['.']
702 files = ['.']
700 results = dict.fromkeys(subrepos)
703 results = dict.fromkeys(subrepos)
701 results['.hg'] = None
704 results['.hg'] = None
702
705
703 alldirs = None
706 alldirs = None
704 for ff in files:
707 for ff in files:
705 # constructing the foldmap is expensive, so don't do it for the
708 # constructing the foldmap is expensive, so don't do it for the
706 # common case where files is ['.']
709 # common case where files is ['.']
707 if normalize and ff != '.':
710 if normalize and ff != '.':
708 nf = normalize(ff, False, True)
711 nf = normalize(ff, False, True)
709 else:
712 else:
710 nf = ff
713 nf = ff
711 if nf in results:
714 if nf in results:
712 continue
715 continue
713
716
714 try:
717 try:
715 st = lstat(join(nf))
718 st = lstat(join(nf))
716 kind = getkind(st.st_mode)
719 kind = getkind(st.st_mode)
717 if kind == dirkind:
720 if kind == dirkind:
718 if nf in dmap:
721 if nf in dmap:
719 # file replaced by dir on disk but still in dirstate
722 # file replaced by dir on disk but still in dirstate
720 results[nf] = None
723 results[nf] = None
721 if matchedir:
724 if matchedir:
722 matchedir(nf)
725 matchedir(nf)
723 foundadd((nf, ff))
726 foundadd((nf, ff))
724 elif kind == regkind or kind == lnkkind:
727 elif kind == regkind or kind == lnkkind:
725 results[nf] = st
728 results[nf] = st
726 else:
729 else:
727 badfn(ff, badtype(kind))
730 badfn(ff, badtype(kind))
728 if nf in dmap:
731 if nf in dmap:
729 results[nf] = None
732 results[nf] = None
730 except OSError as inst: # nf not found on disk - it is dirstate only
733 except OSError as inst: # nf not found on disk - it is dirstate only
731 if nf in dmap: # does it exactly match a missing file?
734 if nf in dmap: # does it exactly match a missing file?
732 results[nf] = None
735 results[nf] = None
733 else: # does it match a missing directory?
736 else: # does it match a missing directory?
734 if alldirs is None:
737 if alldirs is None:
735 alldirs = util.dirs(dmap)
738 alldirs = util.dirs(dmap)
736 if nf in alldirs:
739 if nf in alldirs:
737 if matchedir:
740 if matchedir:
738 matchedir(nf)
741 matchedir(nf)
739 notfoundadd(nf)
742 notfoundadd(nf)
740 else:
743 else:
741 badfn(ff, inst.strerror)
744 badfn(ff, inst.strerror)
742
745
743 # Case insensitive filesystems cannot rely on lstat() failing to detect
746 # Case insensitive filesystems cannot rely on lstat() failing to detect
744 # a case-only rename. Prune the stat object for any file that does not
747 # a case-only rename. Prune the stat object for any file that does not
745 # match the case in the filesystem, if there are multiple files that
748 # match the case in the filesystem, if there are multiple files that
746 # normalize to the same path.
749 # normalize to the same path.
747 if match.isexact() and self._checkcase:
750 if match.isexact() and self._checkcase:
748 normed = {}
751 normed = {}
749
752
750 for f, st in results.iteritems():
753 for f, st in results.iteritems():
751 if st is None:
754 if st is None:
752 continue
755 continue
753
756
754 nc = util.normcase(f)
757 nc = util.normcase(f)
755 paths = normed.get(nc)
758 paths = normed.get(nc)
756
759
757 if paths is None:
760 if paths is None:
758 paths = set()
761 paths = set()
759 normed[nc] = paths
762 normed[nc] = paths
760
763
761 paths.add(f)
764 paths.add(f)
762
765
763 for norm, paths in normed.iteritems():
766 for norm, paths in normed.iteritems():
764 if len(paths) > 1:
767 if len(paths) > 1:
765 for path in paths:
768 for path in paths:
766 folded = self._discoverpath(path, norm, True, None,
769 folded = self._discoverpath(path, norm, True, None,
767 self._dirfoldmap)
770 self._dirfoldmap)
768 if path != folded:
771 if path != folded:
769 results[path] = None
772 results[path] = None
770
773
771 return results, dirsfound, dirsnotfound
774 return results, dirsfound, dirsnotfound
772
775
773 def walk(self, match, subrepos, unknown, ignored, full=True):
776 def walk(self, match, subrepos, unknown, ignored, full=True):
774 '''
777 '''
775 Walk recursively through the directory tree, finding all files
778 Walk recursively through the directory tree, finding all files
776 matched by match.
779 matched by match.
777
780
778 If full is False, maybe skip some known-clean files.
781 If full is False, maybe skip some known-clean files.
779
782
780 Return a dict mapping filename to stat-like object (either
783 Return a dict mapping filename to stat-like object (either
781 mercurial.osutil.stat instance or return value of os.stat()).
784 mercurial.osutil.stat instance or return value of os.stat()).
782
785
783 '''
786 '''
784 # full is a flag that extensions that hook into walk can use -- this
787 # full is a flag that extensions that hook into walk can use -- this
785 # implementation doesn't use it at all. This satisfies the contract
788 # implementation doesn't use it at all. This satisfies the contract
786 # because we only guarantee a "maybe".
789 # because we only guarantee a "maybe".
787
790
788 if ignored:
791 if ignored:
789 ignore = util.never
792 ignore = util.never
790 dirignore = util.never
793 dirignore = util.never
791 elif unknown:
794 elif unknown:
792 ignore = self._ignore
795 ignore = self._ignore
793 dirignore = self._dirignore
796 dirignore = self._dirignore
794 else:
797 else:
795 # if not unknown and not ignored, drop dir recursion and step 2
798 # if not unknown and not ignored, drop dir recursion and step 2
796 ignore = util.always
799 ignore = util.always
797 dirignore = util.always
800 dirignore = util.always
798
801
799 matchfn = match.matchfn
802 matchfn = match.matchfn
800 matchalways = match.always()
803 matchalways = match.always()
801 matchtdir = match.traversedir
804 matchtdir = match.traversedir
802 dmap = self._map
805 dmap = self._map
803 listdir = osutil.listdir
806 listdir = osutil.listdir
804 lstat = os.lstat
807 lstat = os.lstat
805 dirkind = stat.S_IFDIR
808 dirkind = stat.S_IFDIR
806 regkind = stat.S_IFREG
809 regkind = stat.S_IFREG
807 lnkkind = stat.S_IFLNK
810 lnkkind = stat.S_IFLNK
808 join = self._join
811 join = self._join
809
812
810 exact = skipstep3 = False
813 exact = skipstep3 = False
811 if match.isexact(): # match.exact
814 if match.isexact(): # match.exact
812 exact = True
815 exact = True
813 dirignore = util.always # skip step 2
816 dirignore = util.always # skip step 2
814 elif match.prefix(): # match.match, no patterns
817 elif match.prefix(): # match.match, no patterns
815 skipstep3 = True
818 skipstep3 = True
816
819
817 if not exact and self._checkcase:
820 if not exact and self._checkcase:
818 normalize = self._normalize
821 normalize = self._normalize
819 normalizefile = self._normalizefile
822 normalizefile = self._normalizefile
820 skipstep3 = False
823 skipstep3 = False
821 else:
824 else:
822 normalize = self._normalize
825 normalize = self._normalize
823 normalizefile = None
826 normalizefile = None
824
827
825 # step 1: find all explicit files
828 # step 1: find all explicit files
826 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
829 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
827
830
828 skipstep3 = skipstep3 and not (work or dirsnotfound)
831 skipstep3 = skipstep3 and not (work or dirsnotfound)
829 work = [d for d in work if not dirignore(d[0])]
832 work = [d for d in work if not dirignore(d[0])]
830
833
831 # step 2: visit subdirectories
834 # step 2: visit subdirectories
832 def traverse(work, alreadynormed):
835 def traverse(work, alreadynormed):
833 wadd = work.append
836 wadd = work.append
834 while work:
837 while work:
835 nd = work.pop()
838 nd = work.pop()
836 skip = None
839 skip = None
837 if nd == '.':
840 if nd == '.':
838 nd = ''
841 nd = ''
839 else:
842 else:
840 skip = '.hg'
843 skip = '.hg'
841 try:
844 try:
842 entries = listdir(join(nd), stat=True, skip=skip)
845 entries = listdir(join(nd), stat=True, skip=skip)
843 except OSError as inst:
846 except OSError as inst:
844 if inst.errno in (errno.EACCES, errno.ENOENT):
847 if inst.errno in (errno.EACCES, errno.ENOENT):
845 match.bad(self.pathto(nd), inst.strerror)
848 match.bad(self.pathto(nd), inst.strerror)
846 continue
849 continue
847 raise
850 raise
848 for f, kind, st in entries:
851 for f, kind, st in entries:
849 if normalizefile:
852 if normalizefile:
850 # even though f might be a directory, we're only
853 # even though f might be a directory, we're only
851 # interested in comparing it to files currently in the
854 # interested in comparing it to files currently in the
852 # dmap -- therefore normalizefile is enough
855 # dmap -- therefore normalizefile is enough
853 nf = normalizefile(nd and (nd + "/" + f) or f, True,
856 nf = normalizefile(nd and (nd + "/" + f) or f, True,
854 True)
857 True)
855 else:
858 else:
856 nf = nd and (nd + "/" + f) or f
859 nf = nd and (nd + "/" + f) or f
857 if nf not in results:
860 if nf not in results:
858 if kind == dirkind:
861 if kind == dirkind:
859 if not ignore(nf):
862 if not ignore(nf):
860 if matchtdir:
863 if matchtdir:
861 matchtdir(nf)
864 matchtdir(nf)
862 wadd(nf)
865 wadd(nf)
863 if nf in dmap and (matchalways or matchfn(nf)):
866 if nf in dmap and (matchalways or matchfn(nf)):
864 results[nf] = None
867 results[nf] = None
865 elif kind == regkind or kind == lnkkind:
868 elif kind == regkind or kind == lnkkind:
866 if nf in dmap:
869 if nf in dmap:
867 if matchalways or matchfn(nf):
870 if matchalways or matchfn(nf):
868 results[nf] = st
871 results[nf] = st
869 elif ((matchalways or matchfn(nf))
872 elif ((matchalways or matchfn(nf))
870 and not ignore(nf)):
873 and not ignore(nf)):
871 # unknown file -- normalize if necessary
874 # unknown file -- normalize if necessary
872 if not alreadynormed:
875 if not alreadynormed:
873 nf = normalize(nf, False, True)
876 nf = normalize(nf, False, True)
874 results[nf] = st
877 results[nf] = st
875 elif nf in dmap and (matchalways or matchfn(nf)):
878 elif nf in dmap and (matchalways or matchfn(nf)):
876 results[nf] = None
879 results[nf] = None
877
880
878 for nd, d in work:
881 for nd, d in work:
879 # alreadynormed means that processwork doesn't have to do any
882 # alreadynormed means that processwork doesn't have to do any
880 # expensive directory normalization
883 # expensive directory normalization
881 alreadynormed = not normalize or nd == d
884 alreadynormed = not normalize or nd == d
882 traverse([d], alreadynormed)
885 traverse([d], alreadynormed)
883
886
884 for s in subrepos:
887 for s in subrepos:
885 del results[s]
888 del results[s]
886 del results['.hg']
889 del results['.hg']
887
890
888 # step 3: visit remaining files from dmap
891 # step 3: visit remaining files from dmap
889 if not skipstep3 and not exact:
892 if not skipstep3 and not exact:
890 # If a dmap file is not in results yet, it was either
893 # If a dmap file is not in results yet, it was either
891 # a) not matching matchfn b) ignored, c) missing, or d) under a
894 # a) not matching matchfn b) ignored, c) missing, or d) under a
892 # symlink directory.
895 # symlink directory.
893 if not results and matchalways:
896 if not results and matchalways:
894 visit = dmap.keys()
897 visit = dmap.keys()
895 else:
898 else:
896 visit = [f for f in dmap if f not in results and matchfn(f)]
899 visit = [f for f in dmap if f not in results and matchfn(f)]
897 visit.sort()
900 visit.sort()
898
901
899 if unknown:
902 if unknown:
900 # unknown == True means we walked all dirs under the roots
903 # unknown == True means we walked all dirs under the roots
901 # that wasn't ignored, and everything that matched was stat'ed
904 # that wasn't ignored, and everything that matched was stat'ed
902 # and is already in results.
905 # and is already in results.
903 # The rest must thus be ignored or under a symlink.
906 # The rest must thus be ignored or under a symlink.
904 audit_path = pathutil.pathauditor(self._root)
907 audit_path = pathutil.pathauditor(self._root)
905
908
906 for nf in iter(visit):
909 for nf in iter(visit):
907 # If a stat for the same file was already added with a
910 # If a stat for the same file was already added with a
908 # different case, don't add one for this, since that would
911 # different case, don't add one for this, since that would
909 # make it appear as if the file exists under both names
912 # make it appear as if the file exists under both names
910 # on disk.
913 # on disk.
911 if (normalizefile and
914 if (normalizefile and
912 normalizefile(nf, True, True) in results):
915 normalizefile(nf, True, True) in results):
913 results[nf] = None
916 results[nf] = None
914 # Report ignored items in the dmap as long as they are not
917 # Report ignored items in the dmap as long as they are not
915 # under a symlink directory.
918 # under a symlink directory.
916 elif audit_path.check(nf):
919 elif audit_path.check(nf):
917 try:
920 try:
918 results[nf] = lstat(join(nf))
921 results[nf] = lstat(join(nf))
919 # file was just ignored, no links, and exists
922 # file was just ignored, no links, and exists
920 except OSError:
923 except OSError:
921 # file doesn't exist
924 # file doesn't exist
922 results[nf] = None
925 results[nf] = None
923 else:
926 else:
924 # It's either missing or under a symlink directory
927 # It's either missing or under a symlink directory
925 # which we in this case report as missing
928 # which we in this case report as missing
926 results[nf] = None
929 results[nf] = None
927 else:
930 else:
928 # We may not have walked the full directory tree above,
931 # We may not have walked the full directory tree above,
929 # so stat and check everything we missed.
932 # so stat and check everything we missed.
930 nf = iter(visit).next
933 nf = iter(visit).next
931 for st in util.statfiles([join(i) for i in visit]):
934 for st in util.statfiles([join(i) for i in visit]):
932 results[nf()] = st
935 results[nf()] = st
933 return results
936 return results
934
937
935 def status(self, match, subrepos, ignored, clean, unknown):
938 def status(self, match, subrepos, ignored, clean, unknown):
936 '''Determine the status of the working copy relative to the
939 '''Determine the status of the working copy relative to the
937 dirstate and return a pair of (unsure, status), where status is of type
940 dirstate and return a pair of (unsure, status), where status is of type
938 scmutil.status and:
941 scmutil.status and:
939
942
940 unsure:
943 unsure:
941 files that might have been modified since the dirstate was
944 files that might have been modified since the dirstate was
942 written, but need to be read to be sure (size is the same
945 written, but need to be read to be sure (size is the same
943 but mtime differs)
946 but mtime differs)
944 status.modified:
947 status.modified:
945 files that have definitely been modified since the dirstate
948 files that have definitely been modified since the dirstate
946 was written (different size or mode)
949 was written (different size or mode)
947 status.clean:
950 status.clean:
948 files that have definitely not been modified since the
951 files that have definitely not been modified since the
949 dirstate was written
952 dirstate was written
950 '''
953 '''
951 listignored, listclean, listunknown = ignored, clean, unknown
954 listignored, listclean, listunknown = ignored, clean, unknown
952 lookup, modified, added, unknown, ignored = [], [], [], [], []
955 lookup, modified, added, unknown, ignored = [], [], [], [], []
953 removed, deleted, clean = [], [], []
956 removed, deleted, clean = [], [], []
954
957
955 dmap = self._map
958 dmap = self._map
956 ladd = lookup.append # aka "unsure"
959 ladd = lookup.append # aka "unsure"
957 madd = modified.append
960 madd = modified.append
958 aadd = added.append
961 aadd = added.append
959 uadd = unknown.append
962 uadd = unknown.append
960 iadd = ignored.append
963 iadd = ignored.append
961 radd = removed.append
964 radd = removed.append
962 dadd = deleted.append
965 dadd = deleted.append
963 cadd = clean.append
966 cadd = clean.append
964 mexact = match.exact
967 mexact = match.exact
965 dirignore = self._dirignore
968 dirignore = self._dirignore
966 checkexec = self._checkexec
969 checkexec = self._checkexec
967 copymap = self._copymap
970 copymap = self._copymap
968 lastnormaltime = self._lastnormaltime
971 lastnormaltime = self._lastnormaltime
969
972
970 # We need to do full walks when either
973 # We need to do full walks when either
971 # - we're listing all clean files, or
974 # - we're listing all clean files, or
972 # - match.traversedir does something, because match.traversedir should
975 # - match.traversedir does something, because match.traversedir should
973 # be called for every dir in the working dir
976 # be called for every dir in the working dir
974 full = listclean or match.traversedir is not None
977 full = listclean or match.traversedir is not None
975 for fn, st in self.walk(match, subrepos, listunknown, listignored,
978 for fn, st in self.walk(match, subrepos, listunknown, listignored,
976 full=full).iteritems():
979 full=full).iteritems():
977 if fn not in dmap:
980 if fn not in dmap:
978 if (listignored or mexact(fn)) and dirignore(fn):
981 if (listignored or mexact(fn)) and dirignore(fn):
979 if listignored:
982 if listignored:
980 iadd(fn)
983 iadd(fn)
981 else:
984 else:
982 uadd(fn)
985 uadd(fn)
983 continue
986 continue
984
987
985 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
988 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
986 # written like that for performance reasons. dmap[fn] is not a
989 # written like that for performance reasons. dmap[fn] is not a
987 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
990 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
988 # opcode has fast paths when the value to be unpacked is a tuple or
991 # opcode has fast paths when the value to be unpacked is a tuple or
989 # a list, but falls back to creating a full-fledged iterator in
992 # a list, but falls back to creating a full-fledged iterator in
990 # general. That is much slower than simply accessing and storing the
993 # general. That is much slower than simply accessing and storing the
991 # tuple members one by one.
994 # tuple members one by one.
992 t = dmap[fn]
995 t = dmap[fn]
993 state = t[0]
996 state = t[0]
994 mode = t[1]
997 mode = t[1]
995 size = t[2]
998 size = t[2]
996 time = t[3]
999 time = t[3]
997
1000
998 if not st and state in "nma":
1001 if not st and state in "nma":
999 dadd(fn)
1002 dadd(fn)
1000 elif state == 'n':
1003 elif state == 'n':
1001 mtime = util.statmtimesec(st)
1004 mtime = util.statmtimesec(st)
1002 if (size >= 0 and
1005 if (size >= 0 and
1003 ((size != st.st_size and size != st.st_size & _rangemask)
1006 ((size != st.st_size and size != st.st_size & _rangemask)
1004 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1007 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1005 or size == -2 # other parent
1008 or size == -2 # other parent
1006 or fn in copymap):
1009 or fn in copymap):
1007 madd(fn)
1010 madd(fn)
1008 elif time != mtime and time != mtime & _rangemask:
1011 elif time != mtime and time != mtime & _rangemask:
1009 ladd(fn)
1012 ladd(fn)
1010 elif mtime == lastnormaltime:
1013 elif mtime == lastnormaltime:
1011 # fn may have just been marked as normal and it may have
1014 # fn may have just been marked as normal and it may have
1012 # changed in the same second without changing its size.
1015 # changed in the same second without changing its size.
1013 # This can happen if we quickly do multiple commits.
1016 # This can happen if we quickly do multiple commits.
1014 # Force lookup, so we don't miss such a racy file change.
1017 # Force lookup, so we don't miss such a racy file change.
1015 ladd(fn)
1018 ladd(fn)
1016 elif listclean:
1019 elif listclean:
1017 cadd(fn)
1020 cadd(fn)
1018 elif state == 'm':
1021 elif state == 'm':
1019 madd(fn)
1022 madd(fn)
1020 elif state == 'a':
1023 elif state == 'a':
1021 aadd(fn)
1024 aadd(fn)
1022 elif state == 'r':
1025 elif state == 'r':
1023 radd(fn)
1026 radd(fn)
1024
1027
1025 return (lookup, scmutil.status(modified, added, removed, deleted,
1028 return (lookup, scmutil.status(modified, added, removed, deleted,
1026 unknown, ignored, clean))
1029 unknown, ignored, clean))
1027
1030
1028 def matches(self, match):
1031 def matches(self, match):
1029 '''
1032 '''
1030 return files in the dirstate (in whatever state) filtered by match
1033 return files in the dirstate (in whatever state) filtered by match
1031 '''
1034 '''
1032 dmap = self._map
1035 dmap = self._map
1033 if match.always():
1036 if match.always():
1034 return dmap.keys()
1037 return dmap.keys()
1035 files = match.files()
1038 files = match.files()
1036 if match.isexact():
1039 if match.isexact():
1037 # fast path -- filter the other way around, since typically files is
1040 # fast path -- filter the other way around, since typically files is
1038 # much smaller than dmap
1041 # much smaller than dmap
1039 return [f for f in files if f in dmap]
1042 return [f for f in files if f in dmap]
1040 if match.prefix() and all(fn in dmap for fn in files):
1043 if match.prefix() and all(fn in dmap for fn in files):
1041 # fast path -- all the values are known to be files, so just return
1044 # fast path -- all the values are known to be files, so just return
1042 # that
1045 # that
1043 return list(files)
1046 return list(files)
1044 return [f for f in dmap if match(f)]
1047 return [f for f in dmap if match(f)]
General Comments 0
You need to be logged in to leave comments. Login now