##// END OF EJS Templates
dirstate: implement __len__ on dirstatemap (issue5695)...
Simon Whitaker -
r34409:7d2f71b7 default
parent child Browse files
Show More
@@ -1,1398 +1,1401 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._read()
132 self._read()
133 return self._map
133 return self._map
134
134
135 @propertycache
135 @propertycache
136 def _identity(self):
136 def _identity(self):
137 self._read()
137 self._read()
138 return self._identity
138 return self._identity
139
139
140 @propertycache
140 @propertycache
141 def _nonnormalset(self):
141 def _nonnormalset(self):
142 nonnorm, otherparents = self._map.nonnormalentries()
142 nonnorm, otherparents = self._map.nonnormalentries()
143 self._otherparentset = otherparents
143 self._otherparentset = otherparents
144 return nonnorm
144 return nonnorm
145
145
146 @propertycache
146 @propertycache
147 def _otherparentset(self):
147 def _otherparentset(self):
148 nonnorm, otherparents = self._map.nonnormalentries()
148 nonnorm, otherparents = self._map.nonnormalentries()
149 self._nonnormalset = nonnorm
149 self._nonnormalset = nonnorm
150 return otherparents
150 return otherparents
151
151
152 @propertycache
152 @propertycache
153 def _filefoldmap(self):
153 def _filefoldmap(self):
154 return self._map.filefoldmap()
154 return self._map.filefoldmap()
155
155
156 @propertycache
156 @propertycache
157 def _dirfoldmap(self):
157 def _dirfoldmap(self):
158 f = {}
158 f = {}
159 normcase = util.normcase
159 normcase = util.normcase
160 for name in self._dirs:
160 for name in self._dirs:
161 f[normcase(name)] = name
161 f[normcase(name)] = name
162 return f
162 return f
163
163
164 @property
164 @property
165 def _sparsematcher(self):
165 def _sparsematcher(self):
166 """The matcher for the sparse checkout.
166 """The matcher for the sparse checkout.
167
167
168 The working directory may not include every file from a manifest. The
168 The working directory may not include every file from a manifest. The
169 matcher obtained by this property will match a path if it is to be
169 matcher obtained by this property will match a path if it is to be
170 included in the working directory.
170 included in the working directory.
171 """
171 """
172 # TODO there is potential to cache this property. For now, the matcher
172 # TODO there is potential to cache this property. For now, the matcher
173 # is resolved on every access. (But the called function does use a
173 # is resolved on every access. (But the called function does use a
174 # cache to keep the lookup fast.)
174 # cache to keep the lookup fast.)
175 return self._sparsematchfn()
175 return self._sparsematchfn()
176
176
177 @repocache('branch')
177 @repocache('branch')
178 def _branch(self):
178 def _branch(self):
179 try:
179 try:
180 return self._opener.read("branch").strip() or "default"
180 return self._opener.read("branch").strip() or "default"
181 except IOError as inst:
181 except IOError as inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return "default"
184 return "default"
185
185
186 @property
186 @property
187 def _pl(self):
187 def _pl(self):
188 return self._map.parents()
188 return self._map.parents()
189
189
190 @propertycache
190 @propertycache
191 def _dirs(self):
191 def _dirs(self):
192 return self._map.dirs()
192 return self._map.dirs()
193
193
194 def dirs(self):
194 def dirs(self):
195 return self._dirs
195 return self._dirs
196
196
197 @rootcache('.hgignore')
197 @rootcache('.hgignore')
198 def _ignore(self):
198 def _ignore(self):
199 files = self._ignorefiles()
199 files = self._ignorefiles()
200 if not files:
200 if not files:
201 return matchmod.never(self._root, '')
201 return matchmod.never(self._root, '')
202
202
203 pats = ['include:%s' % f for f in files]
203 pats = ['include:%s' % f for f in files]
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
205
205
206 @propertycache
206 @propertycache
207 def _slash(self):
207 def _slash(self):
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
209
209
210 @propertycache
210 @propertycache
211 def _checklink(self):
211 def _checklink(self):
212 return util.checklink(self._root)
212 return util.checklink(self._root)
213
213
214 @propertycache
214 @propertycache
215 def _checkexec(self):
215 def _checkexec(self):
216 return util.checkexec(self._root)
216 return util.checkexec(self._root)
217
217
218 @propertycache
218 @propertycache
219 def _checkcase(self):
219 def _checkcase(self):
220 return not util.fscasesensitive(self._join('.hg'))
220 return not util.fscasesensitive(self._join('.hg'))
221
221
222 def _join(self, f):
222 def _join(self, f):
223 # much faster than os.path.join()
223 # much faster than os.path.join()
224 # it's safe because f is always a relative path
224 # it's safe because f is always a relative path
225 return self._rootdir + f
225 return self._rootdir + f
226
226
227 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
228 if self._checklink and self._checkexec:
228 if self._checklink and self._checkexec:
229 def f(x):
229 def f(x):
230 try:
230 try:
231 st = os.lstat(self._join(x))
231 st = os.lstat(self._join(x))
232 if util.statislink(st):
232 if util.statislink(st):
233 return 'l'
233 return 'l'
234 if util.statisexec(st):
234 if util.statisexec(st):
235 return 'x'
235 return 'x'
236 except OSError:
236 except OSError:
237 pass
237 pass
238 return ''
238 return ''
239 return f
239 return f
240
240
241 fallback = buildfallback()
241 fallback = buildfallback()
242 if self._checklink:
242 if self._checklink:
243 def f(x):
243 def f(x):
244 if os.path.islink(self._join(x)):
244 if os.path.islink(self._join(x)):
245 return 'l'
245 return 'l'
246 if 'x' in fallback(x):
246 if 'x' in fallback(x):
247 return 'x'
247 return 'x'
248 return ''
248 return ''
249 return f
249 return f
250 if self._checkexec:
250 if self._checkexec:
251 def f(x):
251 def f(x):
252 if 'l' in fallback(x):
252 if 'l' in fallback(x):
253 return 'l'
253 return 'l'
254 if util.isexec(self._join(x)):
254 if util.isexec(self._join(x)):
255 return 'x'
255 return 'x'
256 return ''
256 return ''
257 return f
257 return f
258 else:
258 else:
259 return fallback
259 return fallback
260
260
261 @propertycache
261 @propertycache
262 def _cwd(self):
262 def _cwd(self):
263 # internal config: ui.forcecwd
263 # internal config: ui.forcecwd
264 forcecwd = self._ui.config('ui', 'forcecwd')
264 forcecwd = self._ui.config('ui', 'forcecwd')
265 if forcecwd:
265 if forcecwd:
266 return forcecwd
266 return forcecwd
267 return pycompat.getcwd()
267 return pycompat.getcwd()
268
268
269 def getcwd(self):
269 def getcwd(self):
270 '''Return the path from which a canonical path is calculated.
270 '''Return the path from which a canonical path is calculated.
271
271
272 This path should be used to resolve file patterns or to convert
272 This path should be used to resolve file patterns or to convert
273 canonical paths back to file paths for display. It shouldn't be
273 canonical paths back to file paths for display. It shouldn't be
274 used to get real file paths. Use vfs functions instead.
274 used to get real file paths. Use vfs functions instead.
275 '''
275 '''
276 cwd = self._cwd
276 cwd = self._cwd
277 if cwd == self._root:
277 if cwd == self._root:
278 return ''
278 return ''
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
280 rootsep = self._root
280 rootsep = self._root
281 if not util.endswithsep(rootsep):
281 if not util.endswithsep(rootsep):
282 rootsep += pycompat.ossep
282 rootsep += pycompat.ossep
283 if cwd.startswith(rootsep):
283 if cwd.startswith(rootsep):
284 return cwd[len(rootsep):]
284 return cwd[len(rootsep):]
285 else:
285 else:
286 # we're outside the repo. return an absolute path.
286 # we're outside the repo. return an absolute path.
287 return cwd
287 return cwd
288
288
289 def pathto(self, f, cwd=None):
289 def pathto(self, f, cwd=None):
290 if cwd is None:
290 if cwd is None:
291 cwd = self.getcwd()
291 cwd = self.getcwd()
292 path = util.pathto(self._root, cwd, f)
292 path = util.pathto(self._root, cwd, f)
293 if self._slash:
293 if self._slash:
294 return util.pconvert(path)
294 return util.pconvert(path)
295 return path
295 return path
296
296
297 def __getitem__(self, key):
297 def __getitem__(self, key):
298 '''Return the current state of key (a filename) in the dirstate.
298 '''Return the current state of key (a filename) in the dirstate.
299
299
300 States are:
300 States are:
301 n normal
301 n normal
302 m needs merging
302 m needs merging
303 r marked for removal
303 r marked for removal
304 a marked for addition
304 a marked for addition
305 ? not tracked
305 ? not tracked
306 '''
306 '''
307 return self._map.get(key, ("?",))[0]
307 return self._map.get(key, ("?",))[0]
308
308
309 def __contains__(self, key):
309 def __contains__(self, key):
310 return key in self._map
310 return key in self._map
311
311
312 def __iter__(self):
312 def __iter__(self):
313 return iter(sorted(self._map))
313 return iter(sorted(self._map))
314
314
315 def items(self):
315 def items(self):
316 return self._map.iteritems()
316 return self._map.iteritems()
317
317
318 iteritems = items
318 iteritems = items
319
319
320 def parents(self):
320 def parents(self):
321 return [self._validate(p) for p in self._pl]
321 return [self._validate(p) for p in self._pl]
322
322
323 def p1(self):
323 def p1(self):
324 return self._validate(self._pl[0])
324 return self._validate(self._pl[0])
325
325
326 def p2(self):
326 def p2(self):
327 return self._validate(self._pl[1])
327 return self._validate(self._pl[1])
328
328
329 def branch(self):
329 def branch(self):
330 return encoding.tolocal(self._branch)
330 return encoding.tolocal(self._branch)
331
331
332 def setparents(self, p1, p2=nullid):
332 def setparents(self, p1, p2=nullid):
333 """Set dirstate parents to p1 and p2.
333 """Set dirstate parents to p1 and p2.
334
334
335 When moving from two parents to one, 'm' merged entries a
335 When moving from two parents to one, 'm' merged entries a
336 adjusted to normal and previous copy records discarded and
336 adjusted to normal and previous copy records discarded and
337 returned by the call.
337 returned by the call.
338
338
339 See localrepo.setparents()
339 See localrepo.setparents()
340 """
340 """
341 if self._parentwriters == 0:
341 if self._parentwriters == 0:
342 raise ValueError("cannot set dirstate parent without "
342 raise ValueError("cannot set dirstate parent without "
343 "calling dirstate.beginparentchange")
343 "calling dirstate.beginparentchange")
344
344
345 self._dirty = True
345 self._dirty = True
346 oldp2 = self._pl[1]
346 oldp2 = self._pl[1]
347 if self._origpl is None:
347 if self._origpl is None:
348 self._origpl = self._pl
348 self._origpl = self._pl
349 self._map.setparents(p1, p2)
349 self._map.setparents(p1, p2)
350 copies = {}
350 copies = {}
351 if oldp2 != nullid and p2 == nullid:
351 if oldp2 != nullid and p2 == nullid:
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
353 for f in candidatefiles:
353 for f in candidatefiles:
354 s = self._map.get(f)
354 s = self._map.get(f)
355 if s is None:
355 if s is None:
356 continue
356 continue
357
357
358 # Discard 'm' markers when moving away from a merge state
358 # Discard 'm' markers when moving away from a merge state
359 if s[0] == 'm':
359 if s[0] == 'm':
360 source = self._map.copymap.get(f)
360 source = self._map.copymap.get(f)
361 if source:
361 if source:
362 copies[f] = source
362 copies[f] = source
363 self.normallookup(f)
363 self.normallookup(f)
364 # Also fix up otherparent markers
364 # Also fix up otherparent markers
365 elif s[0] == 'n' and s[2] == -2:
365 elif s[0] == 'n' and s[2] == -2:
366 source = self._map.copymap.get(f)
366 source = self._map.copymap.get(f)
367 if source:
367 if source:
368 copies[f] = source
368 copies[f] = source
369 self.add(f)
369 self.add(f)
370 return copies
370 return copies
371
371
372 def setbranch(self, branch):
372 def setbranch(self, branch):
373 self._branch = encoding.fromlocal(branch)
373 self._branch = encoding.fromlocal(branch)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
375 try:
375 try:
376 f.write(self._branch + '\n')
376 f.write(self._branch + '\n')
377 f.close()
377 f.close()
378
378
379 # make sure filecache has the correct stat info for _branch after
379 # make sure filecache has the correct stat info for _branch after
380 # replacing the underlying file
380 # replacing the underlying file
381 ce = self._filecache['_branch']
381 ce = self._filecache['_branch']
382 if ce:
382 if ce:
383 ce.refresh()
383 ce.refresh()
384 except: # re-raises
384 except: # re-raises
385 f.discard()
385 f.discard()
386 raise
386 raise
387
387
388 def _read(self):
388 def _read(self):
389 self._map = dirstatemap(self._ui, self._opener, self._root)
389 self._map = dirstatemap(self._ui, self._opener, self._root)
390
390
391 # ignore HG_PENDING because identity is used only for writing
391 # ignore HG_PENDING because identity is used only for writing
392 self._identity = util.filestat.frompath(
392 self._identity = util.filestat.frompath(
393 self._opener.join(self._filename))
393 self._opener.join(self._filename))
394 try:
394 try:
395 fp = self._map._opendirstatefile()
395 fp = self._map._opendirstatefile()
396 try:
396 try:
397 st = fp.read()
397 st = fp.read()
398 finally:
398 finally:
399 fp.close()
399 fp.close()
400 except IOError as err:
400 except IOError as err:
401 if err.errno != errno.ENOENT:
401 if err.errno != errno.ENOENT:
402 raise
402 raise
403 return
403 return
404 if not st:
404 if not st:
405 return
405 return
406
406
407 if util.safehasattr(parsers, 'dict_new_presized'):
407 if util.safehasattr(parsers, 'dict_new_presized'):
408 # Make an estimate of the number of files in the dirstate based on
408 # Make an estimate of the number of files in the dirstate based on
409 # its size. From a linear regression on a set of real-world repos,
409 # its size. From a linear regression on a set of real-world repos,
410 # all over 10,000 files, the size of a dirstate entry is 85
410 # all over 10,000 files, the size of a dirstate entry is 85
411 # bytes. The cost of resizing is significantly higher than the cost
411 # bytes. The cost of resizing is significantly higher than the cost
412 # of filling in a larger presized dict, so subtract 20% from the
412 # of filling in a larger presized dict, so subtract 20% from the
413 # size.
413 # size.
414 #
414 #
415 # This heuristic is imperfect in many ways, so in a future dirstate
415 # This heuristic is imperfect in many ways, so in a future dirstate
416 # format update it makes sense to just record the number of entries
416 # format update it makes sense to just record the number of entries
417 # on write.
417 # on write.
418 self._map._map = parsers.dict_new_presized(len(st) / 71)
418 self._map._map = parsers.dict_new_presized(len(st) / 71)
419
419
420 # Python's garbage collector triggers a GC each time a certain number
420 # Python's garbage collector triggers a GC each time a certain number
421 # of container objects (the number being defined by
421 # of container objects (the number being defined by
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
423 # for each file in the dirstate. The C version then immediately marks
423 # for each file in the dirstate. The C version then immediately marks
424 # them as not to be tracked by the collector. However, this has no
424 # them as not to be tracked by the collector. However, this has no
425 # effect on when GCs are triggered, only on what objects the GC looks
425 # effect on when GCs are triggered, only on what objects the GC looks
426 # into. This means that O(number of files) GCs are unavoidable.
426 # into. This means that O(number of files) GCs are unavoidable.
427 # Depending on when in the process's lifetime the dirstate is parsed,
427 # Depending on when in the process's lifetime the dirstate is parsed,
428 # this can get very expensive. As a workaround, disable GC while
428 # this can get very expensive. As a workaround, disable GC while
429 # parsing the dirstate.
429 # parsing the dirstate.
430 #
430 #
431 # (we cannot decorate the function directly since it is in a C module)
431 # (we cannot decorate the function directly since it is in a C module)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
433 p = parse_dirstate(self._map._map, self._map.copymap, st)
433 p = parse_dirstate(self._map._map, self._map.copymap, st)
434 if not self._map._dirtyparents:
434 if not self._map._dirtyparents:
435 self._map.setparents(*p)
435 self._map.setparents(*p)
436
436
437 def invalidate(self):
437 def invalidate(self):
438 '''Causes the next access to reread the dirstate.
438 '''Causes the next access to reread the dirstate.
439
439
440 This is different from localrepo.invalidatedirstate() because it always
440 This is different from localrepo.invalidatedirstate() because it always
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
442 check whether the dirstate has changed before rereading it.'''
442 check whether the dirstate has changed before rereading it.'''
443
443
444 for a in ("_map", "_identity",
444 for a in ("_map", "_identity",
445 "_filefoldmap", "_dirfoldmap", "_branch",
445 "_filefoldmap", "_dirfoldmap", "_branch",
446 "_dirs", "_ignore", "_nonnormalset",
446 "_dirs", "_ignore", "_nonnormalset",
447 "_otherparentset"):
447 "_otherparentset"):
448 if a in self.__dict__:
448 if a in self.__dict__:
449 delattr(self, a)
449 delattr(self, a)
450 self._lastnormaltime = 0
450 self._lastnormaltime = 0
451 self._dirty = False
451 self._dirty = False
452 self._updatedfiles.clear()
452 self._updatedfiles.clear()
453 self._parentwriters = 0
453 self._parentwriters = 0
454 self._origpl = None
454 self._origpl = None
455
455
456 def copy(self, source, dest):
456 def copy(self, source, dest):
457 """Mark dest as a copy of source. Unmark dest if source is None."""
457 """Mark dest as a copy of source. Unmark dest if source is None."""
458 if source == dest:
458 if source == dest:
459 return
459 return
460 self._dirty = True
460 self._dirty = True
461 if source is not None:
461 if source is not None:
462 self._map.copymap[dest] = source
462 self._map.copymap[dest] = source
463 self._updatedfiles.add(source)
463 self._updatedfiles.add(source)
464 self._updatedfiles.add(dest)
464 self._updatedfiles.add(dest)
465 elif self._map.copymap.pop(dest, None):
465 elif self._map.copymap.pop(dest, None):
466 self._updatedfiles.add(dest)
466 self._updatedfiles.add(dest)
467
467
468 def copied(self, file):
468 def copied(self, file):
469 return self._map.copymap.get(file, None)
469 return self._map.copymap.get(file, None)
470
470
471 def copies(self):
471 def copies(self):
472 return self._map.copymap
472 return self._map.copymap
473
473
474 def _droppath(self, f):
474 def _droppath(self, f):
475 if self[f] not in "?r" and "_dirs" in self.__dict__:
475 if self[f] not in "?r" and "_dirs" in self.__dict__:
476 self._dirs.delpath(f)
476 self._dirs.delpath(f)
477
477
478 if "_filefoldmap" in self.__dict__:
478 if "_filefoldmap" in self.__dict__:
479 normed = util.normcase(f)
479 normed = util.normcase(f)
480 if normed in self._filefoldmap:
480 if normed in self._filefoldmap:
481 del self._filefoldmap[normed]
481 del self._filefoldmap[normed]
482
482
483 self._updatedfiles.add(f)
483 self._updatedfiles.add(f)
484
484
485 def _addpath(self, f, state, mode, size, mtime):
485 def _addpath(self, f, state, mode, size, mtime):
486 oldstate = self[f]
486 oldstate = self[f]
487 if state == 'a' or oldstate == 'r':
487 if state == 'a' or oldstate == 'r':
488 scmutil.checkfilename(f)
488 scmutil.checkfilename(f)
489 if f in self._dirs:
489 if f in self._dirs:
490 raise error.Abort(_('directory %r already in dirstate') % f)
490 raise error.Abort(_('directory %r already in dirstate') % f)
491 # shadows
491 # shadows
492 for d in util.finddirs(f):
492 for d in util.finddirs(f):
493 if d in self._dirs:
493 if d in self._dirs:
494 break
494 break
495 entry = self._map.get(d)
495 entry = self._map.get(d)
496 if entry is not None and entry[0] != 'r':
496 if entry is not None and entry[0] != 'r':
497 raise error.Abort(
497 raise error.Abort(
498 _('file %r in dirstate clashes with %r') % (d, f))
498 _('file %r in dirstate clashes with %r') % (d, f))
499 if oldstate in "?r" and "_dirs" in self.__dict__:
499 if oldstate in "?r" and "_dirs" in self.__dict__:
500 self._dirs.addpath(f)
500 self._dirs.addpath(f)
501 self._dirty = True
501 self._dirty = True
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map[f] = dirstatetuple(state, mode, size, mtime)
503 self._map[f] = dirstatetuple(state, mode, size, mtime)
504 if state != 'n' or mtime == -1:
504 if state != 'n' or mtime == -1:
505 self._nonnormalset.add(f)
505 self._nonnormalset.add(f)
506 if size == -2:
506 if size == -2:
507 self._otherparentset.add(f)
507 self._otherparentset.add(f)
508
508
509 def normal(self, f):
509 def normal(self, f):
510 '''Mark a file normal and clean.'''
510 '''Mark a file normal and clean.'''
511 s = os.lstat(self._join(f))
511 s = os.lstat(self._join(f))
512 mtime = s.st_mtime
512 mtime = s.st_mtime
513 self._addpath(f, 'n', s.st_mode,
513 self._addpath(f, 'n', s.st_mode,
514 s.st_size & _rangemask, mtime & _rangemask)
514 s.st_size & _rangemask, mtime & _rangemask)
515 self._map.copymap.pop(f, None)
515 self._map.copymap.pop(f, None)
516 if f in self._nonnormalset:
516 if f in self._nonnormalset:
517 self._nonnormalset.remove(f)
517 self._nonnormalset.remove(f)
518 if mtime > self._lastnormaltime:
518 if mtime > self._lastnormaltime:
519 # Remember the most recent modification timeslot for status(),
519 # Remember the most recent modification timeslot for status(),
520 # to make sure we won't miss future size-preserving file content
520 # to make sure we won't miss future size-preserving file content
521 # modifications that happen within the same timeslot.
521 # modifications that happen within the same timeslot.
522 self._lastnormaltime = mtime
522 self._lastnormaltime = mtime
523
523
524 def normallookup(self, f):
524 def normallookup(self, f):
525 '''Mark a file normal, but possibly dirty.'''
525 '''Mark a file normal, but possibly dirty.'''
526 if self._pl[1] != nullid:
526 if self._pl[1] != nullid:
527 # if there is a merge going on and the file was either
527 # if there is a merge going on and the file was either
528 # in state 'm' (-1) or coming from other parent (-2) before
528 # in state 'm' (-1) or coming from other parent (-2) before
529 # being removed, restore that state.
529 # being removed, restore that state.
530 entry = self._map.get(f)
530 entry = self._map.get(f)
531 if entry is not None:
531 if entry is not None:
532 if entry[0] == 'r' and entry[2] in (-1, -2):
532 if entry[0] == 'r' and entry[2] in (-1, -2):
533 source = self._map.copymap.get(f)
533 source = self._map.copymap.get(f)
534 if entry[2] == -1:
534 if entry[2] == -1:
535 self.merge(f)
535 self.merge(f)
536 elif entry[2] == -2:
536 elif entry[2] == -2:
537 self.otherparent(f)
537 self.otherparent(f)
538 if source:
538 if source:
539 self.copy(source, f)
539 self.copy(source, f)
540 return
540 return
541 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
541 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
542 return
542 return
543 self._addpath(f, 'n', 0, -1, -1)
543 self._addpath(f, 'n', 0, -1, -1)
544 self._map.copymap.pop(f, None)
544 self._map.copymap.pop(f, None)
545 if f in self._nonnormalset:
545 if f in self._nonnormalset:
546 self._nonnormalset.remove(f)
546 self._nonnormalset.remove(f)
547
547
548 def otherparent(self, f):
548 def otherparent(self, f):
549 '''Mark as coming from the other parent, always dirty.'''
549 '''Mark as coming from the other parent, always dirty.'''
550 if self._pl[1] == nullid:
550 if self._pl[1] == nullid:
551 raise error.Abort(_("setting %r to other parent "
551 raise error.Abort(_("setting %r to other parent "
552 "only allowed in merges") % f)
552 "only allowed in merges") % f)
553 if f in self and self[f] == 'n':
553 if f in self and self[f] == 'n':
554 # merge-like
554 # merge-like
555 self._addpath(f, 'm', 0, -2, -1)
555 self._addpath(f, 'm', 0, -2, -1)
556 else:
556 else:
557 # add-like
557 # add-like
558 self._addpath(f, 'n', 0, -2, -1)
558 self._addpath(f, 'n', 0, -2, -1)
559 self._map.copymap.pop(f, None)
559 self._map.copymap.pop(f, None)
560
560
561 def add(self, f):
561 def add(self, f):
562 '''Mark a file added.'''
562 '''Mark a file added.'''
563 self._addpath(f, 'a', 0, -1, -1)
563 self._addpath(f, 'a', 0, -1, -1)
564 self._map.copymap.pop(f, None)
564 self._map.copymap.pop(f, None)
565
565
566 def remove(self, f):
566 def remove(self, f):
567 '''Mark a file removed.'''
567 '''Mark a file removed.'''
568 self._dirty = True
568 self._dirty = True
569 self._droppath(f)
569 self._droppath(f)
570 size = 0
570 size = 0
571 if self._pl[1] != nullid:
571 if self._pl[1] != nullid:
572 entry = self._map.get(f)
572 entry = self._map.get(f)
573 if entry is not None:
573 if entry is not None:
574 # backup the previous state
574 # backup the previous state
575 if entry[0] == 'm': # merge
575 if entry[0] == 'm': # merge
576 size = -1
576 size = -1
577 elif entry[0] == 'n' and entry[2] == -2: # other parent
577 elif entry[0] == 'n' and entry[2] == -2: # other parent
578 size = -2
578 size = -2
579 self._otherparentset.add(f)
579 self._otherparentset.add(f)
580 self._map[f] = dirstatetuple('r', 0, size, 0)
580 self._map[f] = dirstatetuple('r', 0, size, 0)
581 self._nonnormalset.add(f)
581 self._nonnormalset.add(f)
582 if size == 0:
582 if size == 0:
583 self._map.copymap.pop(f, None)
583 self._map.copymap.pop(f, None)
584
584
585 def merge(self, f):
585 def merge(self, f):
586 '''Mark a file merged.'''
586 '''Mark a file merged.'''
587 if self._pl[1] == nullid:
587 if self._pl[1] == nullid:
588 return self.normallookup(f)
588 return self.normallookup(f)
589 return self.otherparent(f)
589 return self.otherparent(f)
590
590
591 def drop(self, f):
591 def drop(self, f):
592 '''Drop a file from the dirstate'''
592 '''Drop a file from the dirstate'''
593 if f in self._map:
593 if f in self._map:
594 self._dirty = True
594 self._dirty = True
595 self._droppath(f)
595 self._droppath(f)
596 del self._map[f]
596 del self._map[f]
597 if f in self._nonnormalset:
597 if f in self._nonnormalset:
598 self._nonnormalset.remove(f)
598 self._nonnormalset.remove(f)
599 self._map.copymap.pop(f, None)
599 self._map.copymap.pop(f, None)
600
600
601 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
601 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
602 if exists is None:
602 if exists is None:
603 exists = os.path.lexists(os.path.join(self._root, path))
603 exists = os.path.lexists(os.path.join(self._root, path))
604 if not exists:
604 if not exists:
605 # Maybe a path component exists
605 # Maybe a path component exists
606 if not ignoremissing and '/' in path:
606 if not ignoremissing and '/' in path:
607 d, f = path.rsplit('/', 1)
607 d, f = path.rsplit('/', 1)
608 d = self._normalize(d, False, ignoremissing, None)
608 d = self._normalize(d, False, ignoremissing, None)
609 folded = d + "/" + f
609 folded = d + "/" + f
610 else:
610 else:
611 # No path components, preserve original case
611 # No path components, preserve original case
612 folded = path
612 folded = path
613 else:
613 else:
614 # recursively normalize leading directory components
614 # recursively normalize leading directory components
615 # against dirstate
615 # against dirstate
616 if '/' in normed:
616 if '/' in normed:
617 d, f = normed.rsplit('/', 1)
617 d, f = normed.rsplit('/', 1)
618 d = self._normalize(d, False, ignoremissing, True)
618 d = self._normalize(d, False, ignoremissing, True)
619 r = self._root + "/" + d
619 r = self._root + "/" + d
620 folded = d + "/" + util.fspath(f, r)
620 folded = d + "/" + util.fspath(f, r)
621 else:
621 else:
622 folded = util.fspath(normed, self._root)
622 folded = util.fspath(normed, self._root)
623 storemap[normed] = folded
623 storemap[normed] = folded
624
624
625 return folded
625 return folded
626
626
627 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
627 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
628 normed = util.normcase(path)
628 normed = util.normcase(path)
629 folded = self._filefoldmap.get(normed, None)
629 folded = self._filefoldmap.get(normed, None)
630 if folded is None:
630 if folded is None:
631 if isknown:
631 if isknown:
632 folded = path
632 folded = path
633 else:
633 else:
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
634 folded = self._discoverpath(path, normed, ignoremissing, exists,
635 self._filefoldmap)
635 self._filefoldmap)
636 return folded
636 return folded
637
637
638 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
638 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
639 normed = util.normcase(path)
639 normed = util.normcase(path)
640 folded = self._filefoldmap.get(normed, None)
640 folded = self._filefoldmap.get(normed, None)
641 if folded is None:
641 if folded is None:
642 folded = self._dirfoldmap.get(normed, None)
642 folded = self._dirfoldmap.get(normed, None)
643 if folded is None:
643 if folded is None:
644 if isknown:
644 if isknown:
645 folded = path
645 folded = path
646 else:
646 else:
647 # store discovered result in dirfoldmap so that future
647 # store discovered result in dirfoldmap so that future
648 # normalizefile calls don't start matching directories
648 # normalizefile calls don't start matching directories
649 folded = self._discoverpath(path, normed, ignoremissing, exists,
649 folded = self._discoverpath(path, normed, ignoremissing, exists,
650 self._dirfoldmap)
650 self._dirfoldmap)
651 return folded
651 return folded
652
652
653 def normalize(self, path, isknown=False, ignoremissing=False):
653 def normalize(self, path, isknown=False, ignoremissing=False):
654 '''
654 '''
655 normalize the case of a pathname when on a casefolding filesystem
655 normalize the case of a pathname when on a casefolding filesystem
656
656
657 isknown specifies whether the filename came from walking the
657 isknown specifies whether the filename came from walking the
658 disk, to avoid extra filesystem access.
658 disk, to avoid extra filesystem access.
659
659
660 If ignoremissing is True, missing path are returned
660 If ignoremissing is True, missing path are returned
661 unchanged. Otherwise, we try harder to normalize possibly
661 unchanged. Otherwise, we try harder to normalize possibly
662 existing path components.
662 existing path components.
663
663
664 The normalized case is determined based on the following precedence:
664 The normalized case is determined based on the following precedence:
665
665
666 - version of name already stored in the dirstate
666 - version of name already stored in the dirstate
667 - version of name stored on disk
667 - version of name stored on disk
668 - version provided via command arguments
668 - version provided via command arguments
669 '''
669 '''
670
670
671 if self._checkcase:
671 if self._checkcase:
672 return self._normalize(path, isknown, ignoremissing)
672 return self._normalize(path, isknown, ignoremissing)
673 return path
673 return path
674
674
675 def clear(self):
675 def clear(self):
676 self._map = dirstatemap(self._ui, self._opener, self._root)
676 self._map = dirstatemap(self._ui, self._opener, self._root)
677 self._nonnormalset = set()
677 self._nonnormalset = set()
678 self._otherparentset = set()
678 self._otherparentset = set()
679 if "_dirs" in self.__dict__:
679 if "_dirs" in self.__dict__:
680 delattr(self, "_dirs")
680 delattr(self, "_dirs")
681 self._map.setparents(nullid, nullid)
681 self._map.setparents(nullid, nullid)
682 self._lastnormaltime = 0
682 self._lastnormaltime = 0
683 self._updatedfiles.clear()
683 self._updatedfiles.clear()
684 self._dirty = True
684 self._dirty = True
685
685
686 def rebuild(self, parent, allfiles, changedfiles=None):
686 def rebuild(self, parent, allfiles, changedfiles=None):
687 if changedfiles is None:
687 if changedfiles is None:
688 # Rebuild entire dirstate
688 # Rebuild entire dirstate
689 changedfiles = allfiles
689 changedfiles = allfiles
690 lastnormaltime = self._lastnormaltime
690 lastnormaltime = self._lastnormaltime
691 self.clear()
691 self.clear()
692 self._lastnormaltime = lastnormaltime
692 self._lastnormaltime = lastnormaltime
693
693
694 if self._origpl is None:
694 if self._origpl is None:
695 self._origpl = self._pl
695 self._origpl = self._pl
696 self._map.setparents(parent, nullid)
696 self._map.setparents(parent, nullid)
697 for f in changedfiles:
697 for f in changedfiles:
698 if f in allfiles:
698 if f in allfiles:
699 self.normallookup(f)
699 self.normallookup(f)
700 else:
700 else:
701 self.drop(f)
701 self.drop(f)
702
702
703 self._dirty = True
703 self._dirty = True
704
704
705 def identity(self):
705 def identity(self):
706 '''Return identity of dirstate itself to detect changing in storage
706 '''Return identity of dirstate itself to detect changing in storage
707
707
708 If identity of previous dirstate is equal to this, writing
708 If identity of previous dirstate is equal to this, writing
709 changes based on the former dirstate out can keep consistency.
709 changes based on the former dirstate out can keep consistency.
710 '''
710 '''
711 return self._identity
711 return self._identity
712
712
713 def write(self, tr):
713 def write(self, tr):
714 if not self._dirty:
714 if not self._dirty:
715 return
715 return
716
716
717 filename = self._filename
717 filename = self._filename
718 if tr:
718 if tr:
719 # 'dirstate.write()' is not only for writing in-memory
719 # 'dirstate.write()' is not only for writing in-memory
720 # changes out, but also for dropping ambiguous timestamp.
720 # changes out, but also for dropping ambiguous timestamp.
721 # delayed writing re-raise "ambiguous timestamp issue".
721 # delayed writing re-raise "ambiguous timestamp issue".
722 # See also the wiki page below for detail:
722 # See also the wiki page below for detail:
723 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
723 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
724
724
725 # emulate dropping timestamp in 'parsers.pack_dirstate'
725 # emulate dropping timestamp in 'parsers.pack_dirstate'
726 now = _getfsnow(self._opener)
726 now = _getfsnow(self._opener)
727 dmap = self._map
727 dmap = self._map
728 for f in self._updatedfiles:
728 for f in self._updatedfiles:
729 e = dmap.get(f)
729 e = dmap.get(f)
730 if e is not None and e[0] == 'n' and e[3] == now:
730 if e is not None and e[0] == 'n' and e[3] == now:
731 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
731 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
732 self._nonnormalset.add(f)
732 self._nonnormalset.add(f)
733
733
734 # emulate that all 'dirstate.normal' results are written out
734 # emulate that all 'dirstate.normal' results are written out
735 self._lastnormaltime = 0
735 self._lastnormaltime = 0
736 self._updatedfiles.clear()
736 self._updatedfiles.clear()
737
737
738 # delay writing in-memory changes out
738 # delay writing in-memory changes out
739 tr.addfilegenerator('dirstate', (self._filename,),
739 tr.addfilegenerator('dirstate', (self._filename,),
740 self._writedirstate, location='plain')
740 self._writedirstate, location='plain')
741 return
741 return
742
742
743 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
743 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
744 self._writedirstate(st)
744 self._writedirstate(st)
745
745
746 def addparentchangecallback(self, category, callback):
746 def addparentchangecallback(self, category, callback):
747 """add a callback to be called when the wd parents are changed
747 """add a callback to be called when the wd parents are changed
748
748
749 Callback will be called with the following arguments:
749 Callback will be called with the following arguments:
750 dirstate, (oldp1, oldp2), (newp1, newp2)
750 dirstate, (oldp1, oldp2), (newp1, newp2)
751
751
752 Category is a unique identifier to allow overwriting an old callback
752 Category is a unique identifier to allow overwriting an old callback
753 with a newer callback.
753 with a newer callback.
754 """
754 """
755 self._plchangecallbacks[category] = callback
755 self._plchangecallbacks[category] = callback
756
756
757 def _writedirstate(self, st):
757 def _writedirstate(self, st):
758 # notify callbacks about parents change
758 # notify callbacks about parents change
759 if self._origpl is not None and self._origpl != self._pl:
759 if self._origpl is not None and self._origpl != self._pl:
760 for c, callback in sorted(self._plchangecallbacks.iteritems()):
760 for c, callback in sorted(self._plchangecallbacks.iteritems()):
761 callback(self, self._origpl, self._pl)
761 callback(self, self._origpl, self._pl)
762 self._origpl = None
762 self._origpl = None
763 # use the modification time of the newly created temporary file as the
763 # use the modification time of the newly created temporary file as the
764 # filesystem's notion of 'now'
764 # filesystem's notion of 'now'
765 now = util.fstat(st).st_mtime & _rangemask
765 now = util.fstat(st).st_mtime & _rangemask
766
766
767 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
767 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
768 # timestamp of each entries in dirstate, because of 'now > mtime'
768 # timestamp of each entries in dirstate, because of 'now > mtime'
769 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
769 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
770 if delaywrite > 0:
770 if delaywrite > 0:
771 # do we have any files to delay for?
771 # do we have any files to delay for?
772 for f, e in self._map.iteritems():
772 for f, e in self._map.iteritems():
773 if e[0] == 'n' and e[3] == now:
773 if e[0] == 'n' and e[3] == now:
774 import time # to avoid useless import
774 import time # to avoid useless import
775 # rather than sleep n seconds, sleep until the next
775 # rather than sleep n seconds, sleep until the next
776 # multiple of n seconds
776 # multiple of n seconds
777 clock = time.time()
777 clock = time.time()
778 start = int(clock) - (int(clock) % delaywrite)
778 start = int(clock) - (int(clock) % delaywrite)
779 end = start + delaywrite
779 end = start + delaywrite
780 time.sleep(end - clock)
780 time.sleep(end - clock)
781 now = end # trust our estimate that the end is near now
781 now = end # trust our estimate that the end is near now
782 break
782 break
783
783
784 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
784 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
785 self._pl, now))
785 self._pl, now))
786 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
786 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
787 st.close()
787 st.close()
788 self._lastnormaltime = 0
788 self._lastnormaltime = 0
789 self._dirty = self._map._dirtyparents = False
789 self._dirty = self._map._dirtyparents = False
790
790
791 def _dirignore(self, f):
791 def _dirignore(self, f):
792 if f == '.':
792 if f == '.':
793 return False
793 return False
794 if self._ignore(f):
794 if self._ignore(f):
795 return True
795 return True
796 for p in util.finddirs(f):
796 for p in util.finddirs(f):
797 if self._ignore(p):
797 if self._ignore(p):
798 return True
798 return True
799 return False
799 return False
800
800
801 def _ignorefiles(self):
801 def _ignorefiles(self):
802 files = []
802 files = []
803 if os.path.exists(self._join('.hgignore')):
803 if os.path.exists(self._join('.hgignore')):
804 files.append(self._join('.hgignore'))
804 files.append(self._join('.hgignore'))
805 for name, path in self._ui.configitems("ui"):
805 for name, path in self._ui.configitems("ui"):
806 if name == 'ignore' or name.startswith('ignore.'):
806 if name == 'ignore' or name.startswith('ignore.'):
807 # we need to use os.path.join here rather than self._join
807 # we need to use os.path.join here rather than self._join
808 # because path is arbitrary and user-specified
808 # because path is arbitrary and user-specified
809 files.append(os.path.join(self._rootdir, util.expandpath(path)))
809 files.append(os.path.join(self._rootdir, util.expandpath(path)))
810 return files
810 return files
811
811
812 def _ignorefileandline(self, f):
812 def _ignorefileandline(self, f):
813 files = collections.deque(self._ignorefiles())
813 files = collections.deque(self._ignorefiles())
814 visited = set()
814 visited = set()
815 while files:
815 while files:
816 i = files.popleft()
816 i = files.popleft()
817 patterns = matchmod.readpatternfile(i, self._ui.warn,
817 patterns = matchmod.readpatternfile(i, self._ui.warn,
818 sourceinfo=True)
818 sourceinfo=True)
819 for pattern, lineno, line in patterns:
819 for pattern, lineno, line in patterns:
820 kind, p = matchmod._patsplit(pattern, 'glob')
820 kind, p = matchmod._patsplit(pattern, 'glob')
821 if kind == "subinclude":
821 if kind == "subinclude":
822 if p not in visited:
822 if p not in visited:
823 files.append(p)
823 files.append(p)
824 continue
824 continue
825 m = matchmod.match(self._root, '', [], [pattern],
825 m = matchmod.match(self._root, '', [], [pattern],
826 warn=self._ui.warn)
826 warn=self._ui.warn)
827 if m(f):
827 if m(f):
828 return (i, lineno, line)
828 return (i, lineno, line)
829 visited.add(i)
829 visited.add(i)
830 return (None, -1, "")
830 return (None, -1, "")
831
831
832 def _walkexplicit(self, match, subrepos):
832 def _walkexplicit(self, match, subrepos):
833 '''Get stat data about the files explicitly specified by match.
833 '''Get stat data about the files explicitly specified by match.
834
834
835 Return a triple (results, dirsfound, dirsnotfound).
835 Return a triple (results, dirsfound, dirsnotfound).
836 - results is a mapping from filename to stat result. It also contains
836 - results is a mapping from filename to stat result. It also contains
837 listings mapping subrepos and .hg to None.
837 listings mapping subrepos and .hg to None.
838 - dirsfound is a list of files found to be directories.
838 - dirsfound is a list of files found to be directories.
839 - dirsnotfound is a list of files that the dirstate thinks are
839 - dirsnotfound is a list of files that the dirstate thinks are
840 directories and that were not found.'''
840 directories and that were not found.'''
841
841
842 def badtype(mode):
842 def badtype(mode):
843 kind = _('unknown')
843 kind = _('unknown')
844 if stat.S_ISCHR(mode):
844 if stat.S_ISCHR(mode):
845 kind = _('character device')
845 kind = _('character device')
846 elif stat.S_ISBLK(mode):
846 elif stat.S_ISBLK(mode):
847 kind = _('block device')
847 kind = _('block device')
848 elif stat.S_ISFIFO(mode):
848 elif stat.S_ISFIFO(mode):
849 kind = _('fifo')
849 kind = _('fifo')
850 elif stat.S_ISSOCK(mode):
850 elif stat.S_ISSOCK(mode):
851 kind = _('socket')
851 kind = _('socket')
852 elif stat.S_ISDIR(mode):
852 elif stat.S_ISDIR(mode):
853 kind = _('directory')
853 kind = _('directory')
854 return _('unsupported file type (type is %s)') % kind
854 return _('unsupported file type (type is %s)') % kind
855
855
856 matchedir = match.explicitdir
856 matchedir = match.explicitdir
857 badfn = match.bad
857 badfn = match.bad
858 dmap = self._map
858 dmap = self._map
859 lstat = os.lstat
859 lstat = os.lstat
860 getkind = stat.S_IFMT
860 getkind = stat.S_IFMT
861 dirkind = stat.S_IFDIR
861 dirkind = stat.S_IFDIR
862 regkind = stat.S_IFREG
862 regkind = stat.S_IFREG
863 lnkkind = stat.S_IFLNK
863 lnkkind = stat.S_IFLNK
864 join = self._join
864 join = self._join
865 dirsfound = []
865 dirsfound = []
866 foundadd = dirsfound.append
866 foundadd = dirsfound.append
867 dirsnotfound = []
867 dirsnotfound = []
868 notfoundadd = dirsnotfound.append
868 notfoundadd = dirsnotfound.append
869
869
870 if not match.isexact() and self._checkcase:
870 if not match.isexact() and self._checkcase:
871 normalize = self._normalize
871 normalize = self._normalize
872 else:
872 else:
873 normalize = None
873 normalize = None
874
874
875 files = sorted(match.files())
875 files = sorted(match.files())
876 subrepos.sort()
876 subrepos.sort()
877 i, j = 0, 0
877 i, j = 0, 0
878 while i < len(files) and j < len(subrepos):
878 while i < len(files) and j < len(subrepos):
879 subpath = subrepos[j] + "/"
879 subpath = subrepos[j] + "/"
880 if files[i] < subpath:
880 if files[i] < subpath:
881 i += 1
881 i += 1
882 continue
882 continue
883 while i < len(files) and files[i].startswith(subpath):
883 while i < len(files) and files[i].startswith(subpath):
884 del files[i]
884 del files[i]
885 j += 1
885 j += 1
886
886
887 if not files or '.' in files:
887 if not files or '.' in files:
888 files = ['.']
888 files = ['.']
889 results = dict.fromkeys(subrepos)
889 results = dict.fromkeys(subrepos)
890 results['.hg'] = None
890 results['.hg'] = None
891
891
892 alldirs = None
892 alldirs = None
893 for ff in files:
893 for ff in files:
894 # constructing the foldmap is expensive, so don't do it for the
894 # constructing the foldmap is expensive, so don't do it for the
895 # common case where files is ['.']
895 # common case where files is ['.']
896 if normalize and ff != '.':
896 if normalize and ff != '.':
897 nf = normalize(ff, False, True)
897 nf = normalize(ff, False, True)
898 else:
898 else:
899 nf = ff
899 nf = ff
900 if nf in results:
900 if nf in results:
901 continue
901 continue
902
902
903 try:
903 try:
904 st = lstat(join(nf))
904 st = lstat(join(nf))
905 kind = getkind(st.st_mode)
905 kind = getkind(st.st_mode)
906 if kind == dirkind:
906 if kind == dirkind:
907 if nf in dmap:
907 if nf in dmap:
908 # file replaced by dir on disk but still in dirstate
908 # file replaced by dir on disk but still in dirstate
909 results[nf] = None
909 results[nf] = None
910 if matchedir:
910 if matchedir:
911 matchedir(nf)
911 matchedir(nf)
912 foundadd((nf, ff))
912 foundadd((nf, ff))
913 elif kind == regkind or kind == lnkkind:
913 elif kind == regkind or kind == lnkkind:
914 results[nf] = st
914 results[nf] = st
915 else:
915 else:
916 badfn(ff, badtype(kind))
916 badfn(ff, badtype(kind))
917 if nf in dmap:
917 if nf in dmap:
918 results[nf] = None
918 results[nf] = None
919 except OSError as inst: # nf not found on disk - it is dirstate only
919 except OSError as inst: # nf not found on disk - it is dirstate only
920 if nf in dmap: # does it exactly match a missing file?
920 if nf in dmap: # does it exactly match a missing file?
921 results[nf] = None
921 results[nf] = None
922 else: # does it match a missing directory?
922 else: # does it match a missing directory?
923 if alldirs is None:
923 if alldirs is None:
924 alldirs = util.dirs(dmap._map)
924 alldirs = util.dirs(dmap._map)
925 if nf in alldirs:
925 if nf in alldirs:
926 if matchedir:
926 if matchedir:
927 matchedir(nf)
927 matchedir(nf)
928 notfoundadd(nf)
928 notfoundadd(nf)
929 else:
929 else:
930 badfn(ff, encoding.strtolocal(inst.strerror))
930 badfn(ff, encoding.strtolocal(inst.strerror))
931
931
932 # Case insensitive filesystems cannot rely on lstat() failing to detect
932 # Case insensitive filesystems cannot rely on lstat() failing to detect
933 # a case-only rename. Prune the stat object for any file that does not
933 # a case-only rename. Prune the stat object for any file that does not
934 # match the case in the filesystem, if there are multiple files that
934 # match the case in the filesystem, if there are multiple files that
935 # normalize to the same path.
935 # normalize to the same path.
936 if match.isexact() and self._checkcase:
936 if match.isexact() and self._checkcase:
937 normed = {}
937 normed = {}
938
938
939 for f, st in results.iteritems():
939 for f, st in results.iteritems():
940 if st is None:
940 if st is None:
941 continue
941 continue
942
942
943 nc = util.normcase(f)
943 nc = util.normcase(f)
944 paths = normed.get(nc)
944 paths = normed.get(nc)
945
945
946 if paths is None:
946 if paths is None:
947 paths = set()
947 paths = set()
948 normed[nc] = paths
948 normed[nc] = paths
949
949
950 paths.add(f)
950 paths.add(f)
951
951
952 for norm, paths in normed.iteritems():
952 for norm, paths in normed.iteritems():
953 if len(paths) > 1:
953 if len(paths) > 1:
954 for path in paths:
954 for path in paths:
955 folded = self._discoverpath(path, norm, True, None,
955 folded = self._discoverpath(path, norm, True, None,
956 self._dirfoldmap)
956 self._dirfoldmap)
957 if path != folded:
957 if path != folded:
958 results[path] = None
958 results[path] = None
959
959
960 return results, dirsfound, dirsnotfound
960 return results, dirsfound, dirsnotfound
961
961
962 def walk(self, match, subrepos, unknown, ignored, full=True):
962 def walk(self, match, subrepos, unknown, ignored, full=True):
963 '''
963 '''
964 Walk recursively through the directory tree, finding all files
964 Walk recursively through the directory tree, finding all files
965 matched by match.
965 matched by match.
966
966
967 If full is False, maybe skip some known-clean files.
967 If full is False, maybe skip some known-clean files.
968
968
969 Return a dict mapping filename to stat-like object (either
969 Return a dict mapping filename to stat-like object (either
970 mercurial.osutil.stat instance or return value of os.stat()).
970 mercurial.osutil.stat instance or return value of os.stat()).
971
971
972 '''
972 '''
973 # full is a flag that extensions that hook into walk can use -- this
973 # full is a flag that extensions that hook into walk can use -- this
974 # implementation doesn't use it at all. This satisfies the contract
974 # implementation doesn't use it at all. This satisfies the contract
975 # because we only guarantee a "maybe".
975 # because we only guarantee a "maybe".
976
976
977 if ignored:
977 if ignored:
978 ignore = util.never
978 ignore = util.never
979 dirignore = util.never
979 dirignore = util.never
980 elif unknown:
980 elif unknown:
981 ignore = self._ignore
981 ignore = self._ignore
982 dirignore = self._dirignore
982 dirignore = self._dirignore
983 else:
983 else:
984 # if not unknown and not ignored, drop dir recursion and step 2
984 # if not unknown and not ignored, drop dir recursion and step 2
985 ignore = util.always
985 ignore = util.always
986 dirignore = util.always
986 dirignore = util.always
987
987
988 matchfn = match.matchfn
988 matchfn = match.matchfn
989 matchalways = match.always()
989 matchalways = match.always()
990 matchtdir = match.traversedir
990 matchtdir = match.traversedir
991 dmap = self._map
991 dmap = self._map
992 listdir = util.listdir
992 listdir = util.listdir
993 lstat = os.lstat
993 lstat = os.lstat
994 dirkind = stat.S_IFDIR
994 dirkind = stat.S_IFDIR
995 regkind = stat.S_IFREG
995 regkind = stat.S_IFREG
996 lnkkind = stat.S_IFLNK
996 lnkkind = stat.S_IFLNK
997 join = self._join
997 join = self._join
998
998
999 exact = skipstep3 = False
999 exact = skipstep3 = False
1000 if match.isexact(): # match.exact
1000 if match.isexact(): # match.exact
1001 exact = True
1001 exact = True
1002 dirignore = util.always # skip step 2
1002 dirignore = util.always # skip step 2
1003 elif match.prefix(): # match.match, no patterns
1003 elif match.prefix(): # match.match, no patterns
1004 skipstep3 = True
1004 skipstep3 = True
1005
1005
1006 if not exact and self._checkcase:
1006 if not exact and self._checkcase:
1007 normalize = self._normalize
1007 normalize = self._normalize
1008 normalizefile = self._normalizefile
1008 normalizefile = self._normalizefile
1009 skipstep3 = False
1009 skipstep3 = False
1010 else:
1010 else:
1011 normalize = self._normalize
1011 normalize = self._normalize
1012 normalizefile = None
1012 normalizefile = None
1013
1013
1014 # step 1: find all explicit files
1014 # step 1: find all explicit files
1015 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1015 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1016
1016
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1018 work = [d for d in work if not dirignore(d[0])]
1018 work = [d for d in work if not dirignore(d[0])]
1019
1019
1020 # step 2: visit subdirectories
1020 # step 2: visit subdirectories
1021 def traverse(work, alreadynormed):
1021 def traverse(work, alreadynormed):
1022 wadd = work.append
1022 wadd = work.append
1023 while work:
1023 while work:
1024 nd = work.pop()
1024 nd = work.pop()
1025 if not match.visitdir(nd):
1025 if not match.visitdir(nd):
1026 continue
1026 continue
1027 skip = None
1027 skip = None
1028 if nd == '.':
1028 if nd == '.':
1029 nd = ''
1029 nd = ''
1030 else:
1030 else:
1031 skip = '.hg'
1031 skip = '.hg'
1032 try:
1032 try:
1033 entries = listdir(join(nd), stat=True, skip=skip)
1033 entries = listdir(join(nd), stat=True, skip=skip)
1034 except OSError as inst:
1034 except OSError as inst:
1035 if inst.errno in (errno.EACCES, errno.ENOENT):
1035 if inst.errno in (errno.EACCES, errno.ENOENT):
1036 match.bad(self.pathto(nd),
1036 match.bad(self.pathto(nd),
1037 encoding.strtolocal(inst.strerror))
1037 encoding.strtolocal(inst.strerror))
1038 continue
1038 continue
1039 raise
1039 raise
1040 for f, kind, st in entries:
1040 for f, kind, st in entries:
1041 if normalizefile:
1041 if normalizefile:
1042 # even though f might be a directory, we're only
1042 # even though f might be a directory, we're only
1043 # interested in comparing it to files currently in the
1043 # interested in comparing it to files currently in the
1044 # dmap -- therefore normalizefile is enough
1044 # dmap -- therefore normalizefile is enough
1045 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1045 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1046 True)
1046 True)
1047 else:
1047 else:
1048 nf = nd and (nd + "/" + f) or f
1048 nf = nd and (nd + "/" + f) or f
1049 if nf not in results:
1049 if nf not in results:
1050 if kind == dirkind:
1050 if kind == dirkind:
1051 if not ignore(nf):
1051 if not ignore(nf):
1052 if matchtdir:
1052 if matchtdir:
1053 matchtdir(nf)
1053 matchtdir(nf)
1054 wadd(nf)
1054 wadd(nf)
1055 if nf in dmap and (matchalways or matchfn(nf)):
1055 if nf in dmap and (matchalways or matchfn(nf)):
1056 results[nf] = None
1056 results[nf] = None
1057 elif kind == regkind or kind == lnkkind:
1057 elif kind == regkind or kind == lnkkind:
1058 if nf in dmap:
1058 if nf in dmap:
1059 if matchalways or matchfn(nf):
1059 if matchalways or matchfn(nf):
1060 results[nf] = st
1060 results[nf] = st
1061 elif ((matchalways or matchfn(nf))
1061 elif ((matchalways or matchfn(nf))
1062 and not ignore(nf)):
1062 and not ignore(nf)):
1063 # unknown file -- normalize if necessary
1063 # unknown file -- normalize if necessary
1064 if not alreadynormed:
1064 if not alreadynormed:
1065 nf = normalize(nf, False, True)
1065 nf = normalize(nf, False, True)
1066 results[nf] = st
1066 results[nf] = st
1067 elif nf in dmap and (matchalways or matchfn(nf)):
1067 elif nf in dmap and (matchalways or matchfn(nf)):
1068 results[nf] = None
1068 results[nf] = None
1069
1069
1070 for nd, d in work:
1070 for nd, d in work:
1071 # alreadynormed means that processwork doesn't have to do any
1071 # alreadynormed means that processwork doesn't have to do any
1072 # expensive directory normalization
1072 # expensive directory normalization
1073 alreadynormed = not normalize or nd == d
1073 alreadynormed = not normalize or nd == d
1074 traverse([d], alreadynormed)
1074 traverse([d], alreadynormed)
1075
1075
1076 for s in subrepos:
1076 for s in subrepos:
1077 del results[s]
1077 del results[s]
1078 del results['.hg']
1078 del results['.hg']
1079
1079
1080 # step 3: visit remaining files from dmap
1080 # step 3: visit remaining files from dmap
1081 if not skipstep3 and not exact:
1081 if not skipstep3 and not exact:
1082 # If a dmap file is not in results yet, it was either
1082 # If a dmap file is not in results yet, it was either
1083 # a) not matching matchfn b) ignored, c) missing, or d) under a
1083 # a) not matching matchfn b) ignored, c) missing, or d) under a
1084 # symlink directory.
1084 # symlink directory.
1085 if not results and matchalways:
1085 if not results and matchalways:
1086 visit = [f for f in dmap]
1086 visit = [f for f in dmap]
1087 else:
1087 else:
1088 visit = [f for f in dmap if f not in results and matchfn(f)]
1088 visit = [f for f in dmap if f not in results and matchfn(f)]
1089 visit.sort()
1089 visit.sort()
1090
1090
1091 if unknown:
1091 if unknown:
1092 # unknown == True means we walked all dirs under the roots
1092 # unknown == True means we walked all dirs under the roots
1093 # that wasn't ignored, and everything that matched was stat'ed
1093 # that wasn't ignored, and everything that matched was stat'ed
1094 # and is already in results.
1094 # and is already in results.
1095 # The rest must thus be ignored or under a symlink.
1095 # The rest must thus be ignored or under a symlink.
1096 audit_path = pathutil.pathauditor(self._root, cached=True)
1096 audit_path = pathutil.pathauditor(self._root, cached=True)
1097
1097
1098 for nf in iter(visit):
1098 for nf in iter(visit):
1099 # If a stat for the same file was already added with a
1099 # If a stat for the same file was already added with a
1100 # different case, don't add one for this, since that would
1100 # different case, don't add one for this, since that would
1101 # make it appear as if the file exists under both names
1101 # make it appear as if the file exists under both names
1102 # on disk.
1102 # on disk.
1103 if (normalizefile and
1103 if (normalizefile and
1104 normalizefile(nf, True, True) in results):
1104 normalizefile(nf, True, True) in results):
1105 results[nf] = None
1105 results[nf] = None
1106 # Report ignored items in the dmap as long as they are not
1106 # Report ignored items in the dmap as long as they are not
1107 # under a symlink directory.
1107 # under a symlink directory.
1108 elif audit_path.check(nf):
1108 elif audit_path.check(nf):
1109 try:
1109 try:
1110 results[nf] = lstat(join(nf))
1110 results[nf] = lstat(join(nf))
1111 # file was just ignored, no links, and exists
1111 # file was just ignored, no links, and exists
1112 except OSError:
1112 except OSError:
1113 # file doesn't exist
1113 # file doesn't exist
1114 results[nf] = None
1114 results[nf] = None
1115 else:
1115 else:
1116 # It's either missing or under a symlink directory
1116 # It's either missing or under a symlink directory
1117 # which we in this case report as missing
1117 # which we in this case report as missing
1118 results[nf] = None
1118 results[nf] = None
1119 else:
1119 else:
1120 # We may not have walked the full directory tree above,
1120 # We may not have walked the full directory tree above,
1121 # so stat and check everything we missed.
1121 # so stat and check everything we missed.
1122 iv = iter(visit)
1122 iv = iter(visit)
1123 for st in util.statfiles([join(i) for i in visit]):
1123 for st in util.statfiles([join(i) for i in visit]):
1124 results[next(iv)] = st
1124 results[next(iv)] = st
1125 return results
1125 return results
1126
1126
1127 def status(self, match, subrepos, ignored, clean, unknown):
1127 def status(self, match, subrepos, ignored, clean, unknown):
1128 '''Determine the status of the working copy relative to the
1128 '''Determine the status of the working copy relative to the
1129 dirstate and return a pair of (unsure, status), where status is of type
1129 dirstate and return a pair of (unsure, status), where status is of type
1130 scmutil.status and:
1130 scmutil.status and:
1131
1131
1132 unsure:
1132 unsure:
1133 files that might have been modified since the dirstate was
1133 files that might have been modified since the dirstate was
1134 written, but need to be read to be sure (size is the same
1134 written, but need to be read to be sure (size is the same
1135 but mtime differs)
1135 but mtime differs)
1136 status.modified:
1136 status.modified:
1137 files that have definitely been modified since the dirstate
1137 files that have definitely been modified since the dirstate
1138 was written (different size or mode)
1138 was written (different size or mode)
1139 status.clean:
1139 status.clean:
1140 files that have definitely not been modified since the
1140 files that have definitely not been modified since the
1141 dirstate was written
1141 dirstate was written
1142 '''
1142 '''
1143 listignored, listclean, listunknown = ignored, clean, unknown
1143 listignored, listclean, listunknown = ignored, clean, unknown
1144 lookup, modified, added, unknown, ignored = [], [], [], [], []
1144 lookup, modified, added, unknown, ignored = [], [], [], [], []
1145 removed, deleted, clean = [], [], []
1145 removed, deleted, clean = [], [], []
1146
1146
1147 dmap = self._map
1147 dmap = self._map
1148 ladd = lookup.append # aka "unsure"
1148 ladd = lookup.append # aka "unsure"
1149 madd = modified.append
1149 madd = modified.append
1150 aadd = added.append
1150 aadd = added.append
1151 uadd = unknown.append
1151 uadd = unknown.append
1152 iadd = ignored.append
1152 iadd = ignored.append
1153 radd = removed.append
1153 radd = removed.append
1154 dadd = deleted.append
1154 dadd = deleted.append
1155 cadd = clean.append
1155 cadd = clean.append
1156 mexact = match.exact
1156 mexact = match.exact
1157 dirignore = self._dirignore
1157 dirignore = self._dirignore
1158 checkexec = self._checkexec
1158 checkexec = self._checkexec
1159 copymap = self._map.copymap
1159 copymap = self._map.copymap
1160 lastnormaltime = self._lastnormaltime
1160 lastnormaltime = self._lastnormaltime
1161
1161
1162 # We need to do full walks when either
1162 # We need to do full walks when either
1163 # - we're listing all clean files, or
1163 # - we're listing all clean files, or
1164 # - match.traversedir does something, because match.traversedir should
1164 # - match.traversedir does something, because match.traversedir should
1165 # be called for every dir in the working dir
1165 # be called for every dir in the working dir
1166 full = listclean or match.traversedir is not None
1166 full = listclean or match.traversedir is not None
1167 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1167 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1168 full=full).iteritems():
1168 full=full).iteritems():
1169 if fn not in dmap:
1169 if fn not in dmap:
1170 if (listignored or mexact(fn)) and dirignore(fn):
1170 if (listignored or mexact(fn)) and dirignore(fn):
1171 if listignored:
1171 if listignored:
1172 iadd(fn)
1172 iadd(fn)
1173 else:
1173 else:
1174 uadd(fn)
1174 uadd(fn)
1175 continue
1175 continue
1176
1176
1177 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1177 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1178 # written like that for performance reasons. dmap[fn] is not a
1178 # written like that for performance reasons. dmap[fn] is not a
1179 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1179 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1180 # opcode has fast paths when the value to be unpacked is a tuple or
1180 # opcode has fast paths when the value to be unpacked is a tuple or
1181 # a list, but falls back to creating a full-fledged iterator in
1181 # a list, but falls back to creating a full-fledged iterator in
1182 # general. That is much slower than simply accessing and storing the
1182 # general. That is much slower than simply accessing and storing the
1183 # tuple members one by one.
1183 # tuple members one by one.
1184 t = dmap[fn]
1184 t = dmap[fn]
1185 state = t[0]
1185 state = t[0]
1186 mode = t[1]
1186 mode = t[1]
1187 size = t[2]
1187 size = t[2]
1188 time = t[3]
1188 time = t[3]
1189
1189
1190 if not st and state in "nma":
1190 if not st and state in "nma":
1191 dadd(fn)
1191 dadd(fn)
1192 elif state == 'n':
1192 elif state == 'n':
1193 if (size >= 0 and
1193 if (size >= 0 and
1194 ((size != st.st_size and size != st.st_size & _rangemask)
1194 ((size != st.st_size and size != st.st_size & _rangemask)
1195 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1195 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1196 or size == -2 # other parent
1196 or size == -2 # other parent
1197 or fn in copymap):
1197 or fn in copymap):
1198 madd(fn)
1198 madd(fn)
1199 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1199 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1200 ladd(fn)
1200 ladd(fn)
1201 elif st.st_mtime == lastnormaltime:
1201 elif st.st_mtime == lastnormaltime:
1202 # fn may have just been marked as normal and it may have
1202 # fn may have just been marked as normal and it may have
1203 # changed in the same second without changing its size.
1203 # changed in the same second without changing its size.
1204 # This can happen if we quickly do multiple commits.
1204 # This can happen if we quickly do multiple commits.
1205 # Force lookup, so we don't miss such a racy file change.
1205 # Force lookup, so we don't miss such a racy file change.
1206 ladd(fn)
1206 ladd(fn)
1207 elif listclean:
1207 elif listclean:
1208 cadd(fn)
1208 cadd(fn)
1209 elif state == 'm':
1209 elif state == 'm':
1210 madd(fn)
1210 madd(fn)
1211 elif state == 'a':
1211 elif state == 'a':
1212 aadd(fn)
1212 aadd(fn)
1213 elif state == 'r':
1213 elif state == 'r':
1214 radd(fn)
1214 radd(fn)
1215
1215
1216 return (lookup, scmutil.status(modified, added, removed, deleted,
1216 return (lookup, scmutil.status(modified, added, removed, deleted,
1217 unknown, ignored, clean))
1217 unknown, ignored, clean))
1218
1218
1219 def matches(self, match):
1219 def matches(self, match):
1220 '''
1220 '''
1221 return files in the dirstate (in whatever state) filtered by match
1221 return files in the dirstate (in whatever state) filtered by match
1222 '''
1222 '''
1223 dmap = self._map
1223 dmap = self._map
1224 if match.always():
1224 if match.always():
1225 return dmap.keys()
1225 return dmap.keys()
1226 files = match.files()
1226 files = match.files()
1227 if match.isexact():
1227 if match.isexact():
1228 # fast path -- filter the other way around, since typically files is
1228 # fast path -- filter the other way around, since typically files is
1229 # much smaller than dmap
1229 # much smaller than dmap
1230 return [f for f in files if f in dmap]
1230 return [f for f in files if f in dmap]
1231 if match.prefix() and all(fn in dmap for fn in files):
1231 if match.prefix() and all(fn in dmap for fn in files):
1232 # fast path -- all the values are known to be files, so just return
1232 # fast path -- all the values are known to be files, so just return
1233 # that
1233 # that
1234 return list(files)
1234 return list(files)
1235 return [f for f in dmap if match(f)]
1235 return [f for f in dmap if match(f)]
1236
1236
1237 def _actualfilename(self, tr):
1237 def _actualfilename(self, tr):
1238 if tr:
1238 if tr:
1239 return self._pendingfilename
1239 return self._pendingfilename
1240 else:
1240 else:
1241 return self._filename
1241 return self._filename
1242
1242
1243 def savebackup(self, tr, backupname):
1243 def savebackup(self, tr, backupname):
1244 '''Save current dirstate into backup file'''
1244 '''Save current dirstate into backup file'''
1245 filename = self._actualfilename(tr)
1245 filename = self._actualfilename(tr)
1246 assert backupname != filename
1246 assert backupname != filename
1247
1247
1248 # use '_writedirstate' instead of 'write' to write changes certainly,
1248 # use '_writedirstate' instead of 'write' to write changes certainly,
1249 # because the latter omits writing out if transaction is running.
1249 # because the latter omits writing out if transaction is running.
1250 # output file will be used to create backup of dirstate at this point.
1250 # output file will be used to create backup of dirstate at this point.
1251 if self._dirty or not self._opener.exists(filename):
1251 if self._dirty or not self._opener.exists(filename):
1252 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1252 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1253 checkambig=True))
1253 checkambig=True))
1254
1254
1255 if tr:
1255 if tr:
1256 # ensure that subsequent tr.writepending returns True for
1256 # ensure that subsequent tr.writepending returns True for
1257 # changes written out above, even if dirstate is never
1257 # changes written out above, even if dirstate is never
1258 # changed after this
1258 # changed after this
1259 tr.addfilegenerator('dirstate', (self._filename,),
1259 tr.addfilegenerator('dirstate', (self._filename,),
1260 self._writedirstate, location='plain')
1260 self._writedirstate, location='plain')
1261
1261
1262 # ensure that pending file written above is unlinked at
1262 # ensure that pending file written above is unlinked at
1263 # failure, even if tr.writepending isn't invoked until the
1263 # failure, even if tr.writepending isn't invoked until the
1264 # end of this transaction
1264 # end of this transaction
1265 tr.registertmp(filename, location='plain')
1265 tr.registertmp(filename, location='plain')
1266
1266
1267 self._opener.tryunlink(backupname)
1267 self._opener.tryunlink(backupname)
1268 # hardlink backup is okay because _writedirstate is always called
1268 # hardlink backup is okay because _writedirstate is always called
1269 # with an "atomictemp=True" file.
1269 # with an "atomictemp=True" file.
1270 util.copyfile(self._opener.join(filename),
1270 util.copyfile(self._opener.join(filename),
1271 self._opener.join(backupname), hardlink=True)
1271 self._opener.join(backupname), hardlink=True)
1272
1272
1273 def restorebackup(self, tr, backupname):
1273 def restorebackup(self, tr, backupname):
1274 '''Restore dirstate by backup file'''
1274 '''Restore dirstate by backup file'''
1275 # this "invalidate()" prevents "wlock.release()" from writing
1275 # this "invalidate()" prevents "wlock.release()" from writing
1276 # changes of dirstate out after restoring from backup file
1276 # changes of dirstate out after restoring from backup file
1277 self.invalidate()
1277 self.invalidate()
1278 filename = self._actualfilename(tr)
1278 filename = self._actualfilename(tr)
1279 self._opener.rename(backupname, filename, checkambig=True)
1279 self._opener.rename(backupname, filename, checkambig=True)
1280
1280
1281 def clearbackup(self, tr, backupname):
1281 def clearbackup(self, tr, backupname):
1282 '''Clear backup file'''
1282 '''Clear backup file'''
1283 self._opener.unlink(backupname)
1283 self._opener.unlink(backupname)
1284
1284
1285 class dirstatemap(object):
1285 class dirstatemap(object):
1286 def __init__(self, ui, opener, root):
1286 def __init__(self, ui, opener, root):
1287 self._ui = ui
1287 self._ui = ui
1288 self._opener = opener
1288 self._opener = opener
1289 self._root = root
1289 self._root = root
1290 self._filename = 'dirstate'
1290 self._filename = 'dirstate'
1291
1291
1292 self._map = {}
1292 self._map = {}
1293 self.copymap = {}
1293 self.copymap = {}
1294 self._parents = None
1294 self._parents = None
1295 self._dirtyparents = False
1295 self._dirtyparents = False
1296
1296
1297 # for consistent view between _pl() and _read() invocations
1297 # for consistent view between _pl() and _read() invocations
1298 self._pendingmode = None
1298 self._pendingmode = None
1299
1299
1300 def iteritems(self):
1300 def iteritems(self):
1301 return self._map.iteritems()
1301 return self._map.iteritems()
1302
1302
1303 def __len__(self):
1304 return len(self._map)
1305
1303 def __iter__(self):
1306 def __iter__(self):
1304 return iter(self._map)
1307 return iter(self._map)
1305
1308
1306 def get(self, key, default=None):
1309 def get(self, key, default=None):
1307 return self._map.get(key, default)
1310 return self._map.get(key, default)
1308
1311
1309 def __contains__(self, key):
1312 def __contains__(self, key):
1310 return key in self._map
1313 return key in self._map
1311
1314
1312 def __setitem__(self, key, value):
1315 def __setitem__(self, key, value):
1313 self._map[key] = value
1316 self._map[key] = value
1314
1317
1315 def __getitem__(self, key):
1318 def __getitem__(self, key):
1316 return self._map[key]
1319 return self._map[key]
1317
1320
1318 def __delitem__(self, key):
1321 def __delitem__(self, key):
1319 del self._map[key]
1322 del self._map[key]
1320
1323
1321 def keys(self):
1324 def keys(self):
1322 return self._map.keys()
1325 return self._map.keys()
1323
1326
1324 def nonnormalentries(self):
1327 def nonnormalentries(self):
1325 '''Compute the nonnormal dirstate entries from the dmap'''
1328 '''Compute the nonnormal dirstate entries from the dmap'''
1326 try:
1329 try:
1327 return parsers.nonnormalotherparententries(self._map)
1330 return parsers.nonnormalotherparententries(self._map)
1328 except AttributeError:
1331 except AttributeError:
1329 nonnorm = set()
1332 nonnorm = set()
1330 otherparent = set()
1333 otherparent = set()
1331 for fname, e in self._map.iteritems():
1334 for fname, e in self._map.iteritems():
1332 if e[0] != 'n' or e[3] == -1:
1335 if e[0] != 'n' or e[3] == -1:
1333 nonnorm.add(fname)
1336 nonnorm.add(fname)
1334 if e[0] == 'n' and e[2] == -2:
1337 if e[0] == 'n' and e[2] == -2:
1335 otherparent.add(fname)
1338 otherparent.add(fname)
1336 return nonnorm, otherparent
1339 return nonnorm, otherparent
1337
1340
1338 def filefoldmap(self):
1341 def filefoldmap(self):
1339 """Returns a dictionary mapping normalized case paths to their
1342 """Returns a dictionary mapping normalized case paths to their
1340 non-normalized versions.
1343 non-normalized versions.
1341 """
1344 """
1342 try:
1345 try:
1343 makefilefoldmap = parsers.make_file_foldmap
1346 makefilefoldmap = parsers.make_file_foldmap
1344 except AttributeError:
1347 except AttributeError:
1345 pass
1348 pass
1346 else:
1349 else:
1347 return makefilefoldmap(self._map, util.normcasespec,
1350 return makefilefoldmap(self._map, util.normcasespec,
1348 util.normcasefallback)
1351 util.normcasefallback)
1349
1352
1350 f = {}
1353 f = {}
1351 normcase = util.normcase
1354 normcase = util.normcase
1352 for name, s in self._map.iteritems():
1355 for name, s in self._map.iteritems():
1353 if s[0] != 'r':
1356 if s[0] != 'r':
1354 f[normcase(name)] = name
1357 f[normcase(name)] = name
1355 f['.'] = '.' # prevents useless util.fspath() invocation
1358 f['.'] = '.' # prevents useless util.fspath() invocation
1356 return f
1359 return f
1357
1360
1358 def dirs(self):
1361 def dirs(self):
1359 """Returns a set-like object containing all the directories in the
1362 """Returns a set-like object containing all the directories in the
1360 current dirstate.
1363 current dirstate.
1361 """
1364 """
1362 return util.dirs(self._map, 'r')
1365 return util.dirs(self._map, 'r')
1363
1366
1364 def _opendirstatefile(self):
1367 def _opendirstatefile(self):
1365 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1368 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1366 if self._pendingmode is not None and self._pendingmode != mode:
1369 if self._pendingmode is not None and self._pendingmode != mode:
1367 fp.close()
1370 fp.close()
1368 raise error.Abort(_('working directory state may be '
1371 raise error.Abort(_('working directory state may be '
1369 'changed parallelly'))
1372 'changed parallelly'))
1370 self._pendingmode = mode
1373 self._pendingmode = mode
1371 return fp
1374 return fp
1372
1375
1373 def parents(self):
1376 def parents(self):
1374 if not self._parents:
1377 if not self._parents:
1375 try:
1378 try:
1376 fp = self._opendirstatefile()
1379 fp = self._opendirstatefile()
1377 st = fp.read(40)
1380 st = fp.read(40)
1378 fp.close()
1381 fp.close()
1379 except IOError as err:
1382 except IOError as err:
1380 if err.errno != errno.ENOENT:
1383 if err.errno != errno.ENOENT:
1381 raise
1384 raise
1382 # File doesn't exist, so the current state is empty
1385 # File doesn't exist, so the current state is empty
1383 st = ''
1386 st = ''
1384
1387
1385 l = len(st)
1388 l = len(st)
1386 if l == 40:
1389 if l == 40:
1387 self._parents = st[:20], st[20:40]
1390 self._parents = st[:20], st[20:40]
1388 elif l == 0:
1391 elif l == 0:
1389 self._parents = [nullid, nullid]
1392 self._parents = [nullid, nullid]
1390 else:
1393 else:
1391 raise error.Abort(_('working directory state appears '
1394 raise error.Abort(_('working directory state appears '
1392 'damaged!'))
1395 'damaged!'))
1393
1396
1394 return self._parents
1397 return self._parents
1395
1398
1396 def setparents(self, p1, p2):
1399 def setparents(self, p1, p2):
1397 self._parents = (p1, p2)
1400 self._parents = (p1, p2)
1398 self._dirtyparents = True
1401 self._dirtyparents = True
General Comments 0
You need to be logged in to leave comments. Login now