##// END OF EJS Templates
dirstate: move write into dirstatemap...
Durham Goode -
r34674:e2214632 default
parent child Browse files
Show More
@@ -1,1403 +1,1407 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._read()
132 self._read()
133 return self._map
133 return self._map
134
134
135 @propertycache
135 @propertycache
136 def _identity(self):
136 def _identity(self):
137 self._read()
137 self._read()
138 return self._identity
138 return self._identity
139
139
140 @propertycache
140 @propertycache
141 def _nonnormalset(self):
141 def _nonnormalset(self):
142 nonnorm, otherparents = self._map.nonnormalentries()
142 nonnorm, otherparents = self._map.nonnormalentries()
143 self._otherparentset = otherparents
143 self._otherparentset = otherparents
144 return nonnorm
144 return nonnorm
145
145
146 @propertycache
146 @propertycache
147 def _otherparentset(self):
147 def _otherparentset(self):
148 nonnorm, otherparents = self._map.nonnormalentries()
148 nonnorm, otherparents = self._map.nonnormalentries()
149 self._nonnormalset = nonnorm
149 self._nonnormalset = nonnorm
150 return otherparents
150 return otherparents
151
151
152 @propertycache
152 @propertycache
153 def _filefoldmap(self):
153 def _filefoldmap(self):
154 return self._map.filefoldmap()
154 return self._map.filefoldmap()
155
155
156 @propertycache
156 @propertycache
157 def _dirfoldmap(self):
157 def _dirfoldmap(self):
158 f = {}
158 f = {}
159 normcase = util.normcase
159 normcase = util.normcase
160 for name in self._dirs:
160 for name in self._dirs:
161 f[normcase(name)] = name
161 f[normcase(name)] = name
162 return f
162 return f
163
163
164 @property
164 @property
165 def _sparsematcher(self):
165 def _sparsematcher(self):
166 """The matcher for the sparse checkout.
166 """The matcher for the sparse checkout.
167
167
168 The working directory may not include every file from a manifest. The
168 The working directory may not include every file from a manifest. The
169 matcher obtained by this property will match a path if it is to be
169 matcher obtained by this property will match a path if it is to be
170 included in the working directory.
170 included in the working directory.
171 """
171 """
172 # TODO there is potential to cache this property. For now, the matcher
172 # TODO there is potential to cache this property. For now, the matcher
173 # is resolved on every access. (But the called function does use a
173 # is resolved on every access. (But the called function does use a
174 # cache to keep the lookup fast.)
174 # cache to keep the lookup fast.)
175 return self._sparsematchfn()
175 return self._sparsematchfn()
176
176
177 @repocache('branch')
177 @repocache('branch')
178 def _branch(self):
178 def _branch(self):
179 try:
179 try:
180 return self._opener.read("branch").strip() or "default"
180 return self._opener.read("branch").strip() or "default"
181 except IOError as inst:
181 except IOError as inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return "default"
184 return "default"
185
185
186 @property
186 @property
187 def _pl(self):
187 def _pl(self):
188 return self._map.parents()
188 return self._map.parents()
189
189
190 @propertycache
190 @propertycache
191 def _dirs(self):
191 def _dirs(self):
192 return self._map.dirs()
192 return self._map.dirs()
193
193
194 def dirs(self):
194 def dirs(self):
195 return self._dirs
195 return self._dirs
196
196
197 @rootcache('.hgignore')
197 @rootcache('.hgignore')
198 def _ignore(self):
198 def _ignore(self):
199 files = self._ignorefiles()
199 files = self._ignorefiles()
200 if not files:
200 if not files:
201 return matchmod.never(self._root, '')
201 return matchmod.never(self._root, '')
202
202
203 pats = ['include:%s' % f for f in files]
203 pats = ['include:%s' % f for f in files]
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
205
205
206 @propertycache
206 @propertycache
207 def _slash(self):
207 def _slash(self):
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
209
209
210 @propertycache
210 @propertycache
211 def _checklink(self):
211 def _checklink(self):
212 return util.checklink(self._root)
212 return util.checklink(self._root)
213
213
214 @propertycache
214 @propertycache
215 def _checkexec(self):
215 def _checkexec(self):
216 return util.checkexec(self._root)
216 return util.checkexec(self._root)
217
217
218 @propertycache
218 @propertycache
219 def _checkcase(self):
219 def _checkcase(self):
220 return not util.fscasesensitive(self._join('.hg'))
220 return not util.fscasesensitive(self._join('.hg'))
221
221
222 def _join(self, f):
222 def _join(self, f):
223 # much faster than os.path.join()
223 # much faster than os.path.join()
224 # it's safe because f is always a relative path
224 # it's safe because f is always a relative path
225 return self._rootdir + f
225 return self._rootdir + f
226
226
227 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
228 if self._checklink and self._checkexec:
228 if self._checklink and self._checkexec:
229 def f(x):
229 def f(x):
230 try:
230 try:
231 st = os.lstat(self._join(x))
231 st = os.lstat(self._join(x))
232 if util.statislink(st):
232 if util.statislink(st):
233 return 'l'
233 return 'l'
234 if util.statisexec(st):
234 if util.statisexec(st):
235 return 'x'
235 return 'x'
236 except OSError:
236 except OSError:
237 pass
237 pass
238 return ''
238 return ''
239 return f
239 return f
240
240
241 fallback = buildfallback()
241 fallback = buildfallback()
242 if self._checklink:
242 if self._checklink:
243 def f(x):
243 def f(x):
244 if os.path.islink(self._join(x)):
244 if os.path.islink(self._join(x)):
245 return 'l'
245 return 'l'
246 if 'x' in fallback(x):
246 if 'x' in fallback(x):
247 return 'x'
247 return 'x'
248 return ''
248 return ''
249 return f
249 return f
250 if self._checkexec:
250 if self._checkexec:
251 def f(x):
251 def f(x):
252 if 'l' in fallback(x):
252 if 'l' in fallback(x):
253 return 'l'
253 return 'l'
254 if util.isexec(self._join(x)):
254 if util.isexec(self._join(x)):
255 return 'x'
255 return 'x'
256 return ''
256 return ''
257 return f
257 return f
258 else:
258 else:
259 return fallback
259 return fallback
260
260
261 @propertycache
261 @propertycache
262 def _cwd(self):
262 def _cwd(self):
263 # internal config: ui.forcecwd
263 # internal config: ui.forcecwd
264 forcecwd = self._ui.config('ui', 'forcecwd')
264 forcecwd = self._ui.config('ui', 'forcecwd')
265 if forcecwd:
265 if forcecwd:
266 return forcecwd
266 return forcecwd
267 return pycompat.getcwd()
267 return pycompat.getcwd()
268
268
269 def getcwd(self):
269 def getcwd(self):
270 '''Return the path from which a canonical path is calculated.
270 '''Return the path from which a canonical path is calculated.
271
271
272 This path should be used to resolve file patterns or to convert
272 This path should be used to resolve file patterns or to convert
273 canonical paths back to file paths for display. It shouldn't be
273 canonical paths back to file paths for display. It shouldn't be
274 used to get real file paths. Use vfs functions instead.
274 used to get real file paths. Use vfs functions instead.
275 '''
275 '''
276 cwd = self._cwd
276 cwd = self._cwd
277 if cwd == self._root:
277 if cwd == self._root:
278 return ''
278 return ''
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
280 rootsep = self._root
280 rootsep = self._root
281 if not util.endswithsep(rootsep):
281 if not util.endswithsep(rootsep):
282 rootsep += pycompat.ossep
282 rootsep += pycompat.ossep
283 if cwd.startswith(rootsep):
283 if cwd.startswith(rootsep):
284 return cwd[len(rootsep):]
284 return cwd[len(rootsep):]
285 else:
285 else:
286 # we're outside the repo. return an absolute path.
286 # we're outside the repo. return an absolute path.
287 return cwd
287 return cwd
288
288
289 def pathto(self, f, cwd=None):
289 def pathto(self, f, cwd=None):
290 if cwd is None:
290 if cwd is None:
291 cwd = self.getcwd()
291 cwd = self.getcwd()
292 path = util.pathto(self._root, cwd, f)
292 path = util.pathto(self._root, cwd, f)
293 if self._slash:
293 if self._slash:
294 return util.pconvert(path)
294 return util.pconvert(path)
295 return path
295 return path
296
296
297 def __getitem__(self, key):
297 def __getitem__(self, key):
298 '''Return the current state of key (a filename) in the dirstate.
298 '''Return the current state of key (a filename) in the dirstate.
299
299
300 States are:
300 States are:
301 n normal
301 n normal
302 m needs merging
302 m needs merging
303 r marked for removal
303 r marked for removal
304 a marked for addition
304 a marked for addition
305 ? not tracked
305 ? not tracked
306 '''
306 '''
307 return self._map.get(key, ("?",))[0]
307 return self._map.get(key, ("?",))[0]
308
308
309 def __contains__(self, key):
309 def __contains__(self, key):
310 return key in self._map
310 return key in self._map
311
311
312 def __iter__(self):
312 def __iter__(self):
313 return iter(sorted(self._map))
313 return iter(sorted(self._map))
314
314
315 def items(self):
315 def items(self):
316 return self._map.iteritems()
316 return self._map.iteritems()
317
317
318 iteritems = items
318 iteritems = items
319
319
320 def parents(self):
320 def parents(self):
321 return [self._validate(p) for p in self._pl]
321 return [self._validate(p) for p in self._pl]
322
322
323 def p1(self):
323 def p1(self):
324 return self._validate(self._pl[0])
324 return self._validate(self._pl[0])
325
325
326 def p2(self):
326 def p2(self):
327 return self._validate(self._pl[1])
327 return self._validate(self._pl[1])
328
328
329 def branch(self):
329 def branch(self):
330 return encoding.tolocal(self._branch)
330 return encoding.tolocal(self._branch)
331
331
332 def setparents(self, p1, p2=nullid):
332 def setparents(self, p1, p2=nullid):
333 """Set dirstate parents to p1 and p2.
333 """Set dirstate parents to p1 and p2.
334
334
335 When moving from two parents to one, 'm' merged entries a
335 When moving from two parents to one, 'm' merged entries a
336 adjusted to normal and previous copy records discarded and
336 adjusted to normal and previous copy records discarded and
337 returned by the call.
337 returned by the call.
338
338
339 See localrepo.setparents()
339 See localrepo.setparents()
340 """
340 """
341 if self._parentwriters == 0:
341 if self._parentwriters == 0:
342 raise ValueError("cannot set dirstate parent without "
342 raise ValueError("cannot set dirstate parent without "
343 "calling dirstate.beginparentchange")
343 "calling dirstate.beginparentchange")
344
344
345 self._dirty = True
345 self._dirty = True
346 oldp2 = self._pl[1]
346 oldp2 = self._pl[1]
347 if self._origpl is None:
347 if self._origpl is None:
348 self._origpl = self._pl
348 self._origpl = self._pl
349 self._map.setparents(p1, p2)
349 self._map.setparents(p1, p2)
350 copies = {}
350 copies = {}
351 if oldp2 != nullid and p2 == nullid:
351 if oldp2 != nullid and p2 == nullid:
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
353 for f in candidatefiles:
353 for f in candidatefiles:
354 s = self._map.get(f)
354 s = self._map.get(f)
355 if s is None:
355 if s is None:
356 continue
356 continue
357
357
358 # Discard 'm' markers when moving away from a merge state
358 # Discard 'm' markers when moving away from a merge state
359 if s[0] == 'm':
359 if s[0] == 'm':
360 source = self._map.copymap.get(f)
360 source = self._map.copymap.get(f)
361 if source:
361 if source:
362 copies[f] = source
362 copies[f] = source
363 self.normallookup(f)
363 self.normallookup(f)
364 # Also fix up otherparent markers
364 # Also fix up otherparent markers
365 elif s[0] == 'n' and s[2] == -2:
365 elif s[0] == 'n' and s[2] == -2:
366 source = self._map.copymap.get(f)
366 source = self._map.copymap.get(f)
367 if source:
367 if source:
368 copies[f] = source
368 copies[f] = source
369 self.add(f)
369 self.add(f)
370 return copies
370 return copies
371
371
372 def setbranch(self, branch):
372 def setbranch(self, branch):
373 self._branch = encoding.fromlocal(branch)
373 self._branch = encoding.fromlocal(branch)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
375 try:
375 try:
376 f.write(self._branch + '\n')
376 f.write(self._branch + '\n')
377 f.close()
377 f.close()
378
378
379 # make sure filecache has the correct stat info for _branch after
379 # make sure filecache has the correct stat info for _branch after
380 # replacing the underlying file
380 # replacing the underlying file
381 ce = self._filecache['_branch']
381 ce = self._filecache['_branch']
382 if ce:
382 if ce:
383 ce.refresh()
383 ce.refresh()
384 except: # re-raises
384 except: # re-raises
385 f.discard()
385 f.discard()
386 raise
386 raise
387
387
388 def _read(self):
388 def _read(self):
389 # ignore HG_PENDING because identity is used only for writing
389 # ignore HG_PENDING because identity is used only for writing
390 self._identity = util.filestat.frompath(
390 self._identity = util.filestat.frompath(
391 self._opener.join(self._filename))
391 self._opener.join(self._filename))
392 self._map = dirstatemap(self._ui, self._opener, self._root)
392 self._map = dirstatemap(self._ui, self._opener, self._root)
393 self._map.read()
393 self._map.read()
394
394
395 def invalidate(self):
395 def invalidate(self):
396 '''Causes the next access to reread the dirstate.
396 '''Causes the next access to reread the dirstate.
397
397
398 This is different from localrepo.invalidatedirstate() because it always
398 This is different from localrepo.invalidatedirstate() because it always
399 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
399 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
400 check whether the dirstate has changed before rereading it.'''
400 check whether the dirstate has changed before rereading it.'''
401
401
402 for a in ("_map", "_identity",
402 for a in ("_map", "_identity",
403 "_filefoldmap", "_dirfoldmap", "_branch",
403 "_filefoldmap", "_dirfoldmap", "_branch",
404 "_dirs", "_ignore", "_nonnormalset",
404 "_dirs", "_ignore", "_nonnormalset",
405 "_otherparentset"):
405 "_otherparentset"):
406 if a in self.__dict__:
406 if a in self.__dict__:
407 delattr(self, a)
407 delattr(self, a)
408 self._lastnormaltime = 0
408 self._lastnormaltime = 0
409 self._dirty = False
409 self._dirty = False
410 self._updatedfiles.clear()
410 self._updatedfiles.clear()
411 self._parentwriters = 0
411 self._parentwriters = 0
412 self._origpl = None
412 self._origpl = None
413
413
414 def copy(self, source, dest):
414 def copy(self, source, dest):
415 """Mark dest as a copy of source. Unmark dest if source is None."""
415 """Mark dest as a copy of source. Unmark dest if source is None."""
416 if source == dest:
416 if source == dest:
417 return
417 return
418 self._dirty = True
418 self._dirty = True
419 if source is not None:
419 if source is not None:
420 self._map.copymap[dest] = source
420 self._map.copymap[dest] = source
421 self._updatedfiles.add(source)
421 self._updatedfiles.add(source)
422 self._updatedfiles.add(dest)
422 self._updatedfiles.add(dest)
423 elif self._map.copymap.pop(dest, None):
423 elif self._map.copymap.pop(dest, None):
424 self._updatedfiles.add(dest)
424 self._updatedfiles.add(dest)
425
425
426 def copied(self, file):
426 def copied(self, file):
427 return self._map.copymap.get(file, None)
427 return self._map.copymap.get(file, None)
428
428
429 def copies(self):
429 def copies(self):
430 return self._map.copymap
430 return self._map.copymap
431
431
432 def _droppath(self, f):
432 def _droppath(self, f):
433 if self[f] not in "?r" and "_dirs" in self.__dict__:
433 if self[f] not in "?r" and "_dirs" in self.__dict__:
434 self._dirs.delpath(f)
434 self._dirs.delpath(f)
435
435
436 if "_filefoldmap" in self.__dict__:
436 if "_filefoldmap" in self.__dict__:
437 normed = util.normcase(f)
437 normed = util.normcase(f)
438 if normed in self._filefoldmap:
438 if normed in self._filefoldmap:
439 del self._filefoldmap[normed]
439 del self._filefoldmap[normed]
440
440
441 self._updatedfiles.add(f)
441 self._updatedfiles.add(f)
442
442
443 def _addpath(self, f, state, mode, size, mtime):
443 def _addpath(self, f, state, mode, size, mtime):
444 oldstate = self[f]
444 oldstate = self[f]
445 if state == 'a' or oldstate == 'r':
445 if state == 'a' or oldstate == 'r':
446 scmutil.checkfilename(f)
446 scmutil.checkfilename(f)
447 if f in self._dirs:
447 if f in self._dirs:
448 raise error.Abort(_('directory %r already in dirstate') % f)
448 raise error.Abort(_('directory %r already in dirstate') % f)
449 # shadows
449 # shadows
450 for d in util.finddirs(f):
450 for d in util.finddirs(f):
451 if d in self._dirs:
451 if d in self._dirs:
452 break
452 break
453 entry = self._map.get(d)
453 entry = self._map.get(d)
454 if entry is not None and entry[0] != 'r':
454 if entry is not None and entry[0] != 'r':
455 raise error.Abort(
455 raise error.Abort(
456 _('file %r in dirstate clashes with %r') % (d, f))
456 _('file %r in dirstate clashes with %r') % (d, f))
457 if oldstate in "?r" and "_dirs" in self.__dict__:
457 if oldstate in "?r" and "_dirs" in self.__dict__:
458 self._dirs.addpath(f)
458 self._dirs.addpath(f)
459 self._dirty = True
459 self._dirty = True
460 self._updatedfiles.add(f)
460 self._updatedfiles.add(f)
461 self._map[f] = dirstatetuple(state, mode, size, mtime)
461 self._map[f] = dirstatetuple(state, mode, size, mtime)
462 if state != 'n' or mtime == -1:
462 if state != 'n' or mtime == -1:
463 self._nonnormalset.add(f)
463 self._nonnormalset.add(f)
464 if size == -2:
464 if size == -2:
465 self._otherparentset.add(f)
465 self._otherparentset.add(f)
466
466
467 def normal(self, f):
467 def normal(self, f):
468 '''Mark a file normal and clean.'''
468 '''Mark a file normal and clean.'''
469 s = os.lstat(self._join(f))
469 s = os.lstat(self._join(f))
470 mtime = s.st_mtime
470 mtime = s.st_mtime
471 self._addpath(f, 'n', s.st_mode,
471 self._addpath(f, 'n', s.st_mode,
472 s.st_size & _rangemask, mtime & _rangemask)
472 s.st_size & _rangemask, mtime & _rangemask)
473 self._map.copymap.pop(f, None)
473 self._map.copymap.pop(f, None)
474 if f in self._nonnormalset:
474 if f in self._nonnormalset:
475 self._nonnormalset.remove(f)
475 self._nonnormalset.remove(f)
476 if mtime > self._lastnormaltime:
476 if mtime > self._lastnormaltime:
477 # Remember the most recent modification timeslot for status(),
477 # Remember the most recent modification timeslot for status(),
478 # to make sure we won't miss future size-preserving file content
478 # to make sure we won't miss future size-preserving file content
479 # modifications that happen within the same timeslot.
479 # modifications that happen within the same timeslot.
480 self._lastnormaltime = mtime
480 self._lastnormaltime = mtime
481
481
482 def normallookup(self, f):
482 def normallookup(self, f):
483 '''Mark a file normal, but possibly dirty.'''
483 '''Mark a file normal, but possibly dirty.'''
484 if self._pl[1] != nullid:
484 if self._pl[1] != nullid:
485 # if there is a merge going on and the file was either
485 # if there is a merge going on and the file was either
486 # in state 'm' (-1) or coming from other parent (-2) before
486 # in state 'm' (-1) or coming from other parent (-2) before
487 # being removed, restore that state.
487 # being removed, restore that state.
488 entry = self._map.get(f)
488 entry = self._map.get(f)
489 if entry is not None:
489 if entry is not None:
490 if entry[0] == 'r' and entry[2] in (-1, -2):
490 if entry[0] == 'r' and entry[2] in (-1, -2):
491 source = self._map.copymap.get(f)
491 source = self._map.copymap.get(f)
492 if entry[2] == -1:
492 if entry[2] == -1:
493 self.merge(f)
493 self.merge(f)
494 elif entry[2] == -2:
494 elif entry[2] == -2:
495 self.otherparent(f)
495 self.otherparent(f)
496 if source:
496 if source:
497 self.copy(source, f)
497 self.copy(source, f)
498 return
498 return
499 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
499 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
500 return
500 return
501 self._addpath(f, 'n', 0, -1, -1)
501 self._addpath(f, 'n', 0, -1, -1)
502 self._map.copymap.pop(f, None)
502 self._map.copymap.pop(f, None)
503 if f in self._nonnormalset:
503 if f in self._nonnormalset:
504 self._nonnormalset.remove(f)
504 self._nonnormalset.remove(f)
505
505
506 def otherparent(self, f):
506 def otherparent(self, f):
507 '''Mark as coming from the other parent, always dirty.'''
507 '''Mark as coming from the other parent, always dirty.'''
508 if self._pl[1] == nullid:
508 if self._pl[1] == nullid:
509 raise error.Abort(_("setting %r to other parent "
509 raise error.Abort(_("setting %r to other parent "
510 "only allowed in merges") % f)
510 "only allowed in merges") % f)
511 if f in self and self[f] == 'n':
511 if f in self and self[f] == 'n':
512 # merge-like
512 # merge-like
513 self._addpath(f, 'm', 0, -2, -1)
513 self._addpath(f, 'm', 0, -2, -1)
514 else:
514 else:
515 # add-like
515 # add-like
516 self._addpath(f, 'n', 0, -2, -1)
516 self._addpath(f, 'n', 0, -2, -1)
517 self._map.copymap.pop(f, None)
517 self._map.copymap.pop(f, None)
518
518
519 def add(self, f):
519 def add(self, f):
520 '''Mark a file added.'''
520 '''Mark a file added.'''
521 self._addpath(f, 'a', 0, -1, -1)
521 self._addpath(f, 'a', 0, -1, -1)
522 self._map.copymap.pop(f, None)
522 self._map.copymap.pop(f, None)
523
523
524 def remove(self, f):
524 def remove(self, f):
525 '''Mark a file removed.'''
525 '''Mark a file removed.'''
526 self._dirty = True
526 self._dirty = True
527 self._droppath(f)
527 self._droppath(f)
528 size = 0
528 size = 0
529 if self._pl[1] != nullid:
529 if self._pl[1] != nullid:
530 entry = self._map.get(f)
530 entry = self._map.get(f)
531 if entry is not None:
531 if entry is not None:
532 # backup the previous state
532 # backup the previous state
533 if entry[0] == 'm': # merge
533 if entry[0] == 'm': # merge
534 size = -1
534 size = -1
535 elif entry[0] == 'n' and entry[2] == -2: # other parent
535 elif entry[0] == 'n' and entry[2] == -2: # other parent
536 size = -2
536 size = -2
537 self._otherparentset.add(f)
537 self._otherparentset.add(f)
538 self._map[f] = dirstatetuple('r', 0, size, 0)
538 self._map[f] = dirstatetuple('r', 0, size, 0)
539 self._nonnormalset.add(f)
539 self._nonnormalset.add(f)
540 if size == 0:
540 if size == 0:
541 self._map.copymap.pop(f, None)
541 self._map.copymap.pop(f, None)
542
542
543 def merge(self, f):
543 def merge(self, f):
544 '''Mark a file merged.'''
544 '''Mark a file merged.'''
545 if self._pl[1] == nullid:
545 if self._pl[1] == nullid:
546 return self.normallookup(f)
546 return self.normallookup(f)
547 return self.otherparent(f)
547 return self.otherparent(f)
548
548
549 def drop(self, f):
549 def drop(self, f):
550 '''Drop a file from the dirstate'''
550 '''Drop a file from the dirstate'''
551 if f in self._map:
551 if f in self._map:
552 self._dirty = True
552 self._dirty = True
553 self._droppath(f)
553 self._droppath(f)
554 del self._map[f]
554 del self._map[f]
555 if f in self._nonnormalset:
555 if f in self._nonnormalset:
556 self._nonnormalset.remove(f)
556 self._nonnormalset.remove(f)
557 self._map.copymap.pop(f, None)
557 self._map.copymap.pop(f, None)
558
558
559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
560 if exists is None:
560 if exists is None:
561 exists = os.path.lexists(os.path.join(self._root, path))
561 exists = os.path.lexists(os.path.join(self._root, path))
562 if not exists:
562 if not exists:
563 # Maybe a path component exists
563 # Maybe a path component exists
564 if not ignoremissing and '/' in path:
564 if not ignoremissing and '/' in path:
565 d, f = path.rsplit('/', 1)
565 d, f = path.rsplit('/', 1)
566 d = self._normalize(d, False, ignoremissing, None)
566 d = self._normalize(d, False, ignoremissing, None)
567 folded = d + "/" + f
567 folded = d + "/" + f
568 else:
568 else:
569 # No path components, preserve original case
569 # No path components, preserve original case
570 folded = path
570 folded = path
571 else:
571 else:
572 # recursively normalize leading directory components
572 # recursively normalize leading directory components
573 # against dirstate
573 # against dirstate
574 if '/' in normed:
574 if '/' in normed:
575 d, f = normed.rsplit('/', 1)
575 d, f = normed.rsplit('/', 1)
576 d = self._normalize(d, False, ignoremissing, True)
576 d = self._normalize(d, False, ignoremissing, True)
577 r = self._root + "/" + d
577 r = self._root + "/" + d
578 folded = d + "/" + util.fspath(f, r)
578 folded = d + "/" + util.fspath(f, r)
579 else:
579 else:
580 folded = util.fspath(normed, self._root)
580 folded = util.fspath(normed, self._root)
581 storemap[normed] = folded
581 storemap[normed] = folded
582
582
583 return folded
583 return folded
584
584
585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
586 normed = util.normcase(path)
586 normed = util.normcase(path)
587 folded = self._filefoldmap.get(normed, None)
587 folded = self._filefoldmap.get(normed, None)
588 if folded is None:
588 if folded is None:
589 if isknown:
589 if isknown:
590 folded = path
590 folded = path
591 else:
591 else:
592 folded = self._discoverpath(path, normed, ignoremissing, exists,
592 folded = self._discoverpath(path, normed, ignoremissing, exists,
593 self._filefoldmap)
593 self._filefoldmap)
594 return folded
594 return folded
595
595
596 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
596 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
597 normed = util.normcase(path)
597 normed = util.normcase(path)
598 folded = self._filefoldmap.get(normed, None)
598 folded = self._filefoldmap.get(normed, None)
599 if folded is None:
599 if folded is None:
600 folded = self._dirfoldmap.get(normed, None)
600 folded = self._dirfoldmap.get(normed, None)
601 if folded is None:
601 if folded is None:
602 if isknown:
602 if isknown:
603 folded = path
603 folded = path
604 else:
604 else:
605 # store discovered result in dirfoldmap so that future
605 # store discovered result in dirfoldmap so that future
606 # normalizefile calls don't start matching directories
606 # normalizefile calls don't start matching directories
607 folded = self._discoverpath(path, normed, ignoremissing, exists,
607 folded = self._discoverpath(path, normed, ignoremissing, exists,
608 self._dirfoldmap)
608 self._dirfoldmap)
609 return folded
609 return folded
610
610
611 def normalize(self, path, isknown=False, ignoremissing=False):
611 def normalize(self, path, isknown=False, ignoremissing=False):
612 '''
612 '''
613 normalize the case of a pathname when on a casefolding filesystem
613 normalize the case of a pathname when on a casefolding filesystem
614
614
615 isknown specifies whether the filename came from walking the
615 isknown specifies whether the filename came from walking the
616 disk, to avoid extra filesystem access.
616 disk, to avoid extra filesystem access.
617
617
618 If ignoremissing is True, missing path are returned
618 If ignoremissing is True, missing path are returned
619 unchanged. Otherwise, we try harder to normalize possibly
619 unchanged. Otherwise, we try harder to normalize possibly
620 existing path components.
620 existing path components.
621
621
622 The normalized case is determined based on the following precedence:
622 The normalized case is determined based on the following precedence:
623
623
624 - version of name already stored in the dirstate
624 - version of name already stored in the dirstate
625 - version of name stored on disk
625 - version of name stored on disk
626 - version provided via command arguments
626 - version provided via command arguments
627 '''
627 '''
628
628
629 if self._checkcase:
629 if self._checkcase:
630 return self._normalize(path, isknown, ignoremissing)
630 return self._normalize(path, isknown, ignoremissing)
631 return path
631 return path
632
632
633 def clear(self):
633 def clear(self):
634 self._map = dirstatemap(self._ui, self._opener, self._root)
634 self._map = dirstatemap(self._ui, self._opener, self._root)
635 self._nonnormalset = set()
635 self._nonnormalset = set()
636 self._otherparentset = set()
636 self._otherparentset = set()
637 if "_dirs" in self.__dict__:
637 if "_dirs" in self.__dict__:
638 delattr(self, "_dirs")
638 delattr(self, "_dirs")
639 self._map.setparents(nullid, nullid)
639 self._map.setparents(nullid, nullid)
640 self._lastnormaltime = 0
640 self._lastnormaltime = 0
641 self._updatedfiles.clear()
641 self._updatedfiles.clear()
642 self._dirty = True
642 self._dirty = True
643
643
644 def rebuild(self, parent, allfiles, changedfiles=None):
644 def rebuild(self, parent, allfiles, changedfiles=None):
645 if changedfiles is None:
645 if changedfiles is None:
646 # Rebuild entire dirstate
646 # Rebuild entire dirstate
647 changedfiles = allfiles
647 changedfiles = allfiles
648 lastnormaltime = self._lastnormaltime
648 lastnormaltime = self._lastnormaltime
649 self.clear()
649 self.clear()
650 self._lastnormaltime = lastnormaltime
650 self._lastnormaltime = lastnormaltime
651
651
652 if self._origpl is None:
652 if self._origpl is None:
653 self._origpl = self._pl
653 self._origpl = self._pl
654 self._map.setparents(parent, nullid)
654 self._map.setparents(parent, nullid)
655 for f in changedfiles:
655 for f in changedfiles:
656 if f in allfiles:
656 if f in allfiles:
657 self.normallookup(f)
657 self.normallookup(f)
658 else:
658 else:
659 self.drop(f)
659 self.drop(f)
660
660
661 self._dirty = True
661 self._dirty = True
662
662
663 def identity(self):
663 def identity(self):
664 '''Return identity of dirstate itself to detect changing in storage
664 '''Return identity of dirstate itself to detect changing in storage
665
665
666 If identity of previous dirstate is equal to this, writing
666 If identity of previous dirstate is equal to this, writing
667 changes based on the former dirstate out can keep consistency.
667 changes based on the former dirstate out can keep consistency.
668 '''
668 '''
669 return self._identity
669 return self._identity
670
670
671 def write(self, tr):
671 def write(self, tr):
672 if not self._dirty:
672 if not self._dirty:
673 return
673 return
674
674
675 filename = self._filename
675 filename = self._filename
676 if tr:
676 if tr:
677 # 'dirstate.write()' is not only for writing in-memory
677 # 'dirstate.write()' is not only for writing in-memory
678 # changes out, but also for dropping ambiguous timestamp.
678 # changes out, but also for dropping ambiguous timestamp.
679 # delayed writing re-raise "ambiguous timestamp issue".
679 # delayed writing re-raise "ambiguous timestamp issue".
680 # See also the wiki page below for detail:
680 # See also the wiki page below for detail:
681 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
681 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
682
682
683 # emulate dropping timestamp in 'parsers.pack_dirstate'
683 # emulate dropping timestamp in 'parsers.pack_dirstate'
684 now = _getfsnow(self._opener)
684 now = _getfsnow(self._opener)
685 dmap = self._map
685 dmap = self._map
686 for f in self._updatedfiles:
686 for f in self._updatedfiles:
687 e = dmap.get(f)
687 e = dmap.get(f)
688 if e is not None and e[0] == 'n' and e[3] == now:
688 if e is not None and e[0] == 'n' and e[3] == now:
689 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
689 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
690 self._nonnormalset.add(f)
690 self._nonnormalset.add(f)
691
691
692 # emulate that all 'dirstate.normal' results are written out
692 # emulate that all 'dirstate.normal' results are written out
693 self._lastnormaltime = 0
693 self._lastnormaltime = 0
694 self._updatedfiles.clear()
694 self._updatedfiles.clear()
695
695
696 # delay writing in-memory changes out
696 # delay writing in-memory changes out
697 tr.addfilegenerator('dirstate', (self._filename,),
697 tr.addfilegenerator('dirstate', (self._filename,),
698 self._writedirstate, location='plain')
698 self._writedirstate, location='plain')
699 return
699 return
700
700
701 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
701 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
702 self._writedirstate(st)
702 self._writedirstate(st)
703
703
704 def addparentchangecallback(self, category, callback):
704 def addparentchangecallback(self, category, callback):
705 """add a callback to be called when the wd parents are changed
705 """add a callback to be called when the wd parents are changed
706
706
707 Callback will be called with the following arguments:
707 Callback will be called with the following arguments:
708 dirstate, (oldp1, oldp2), (newp1, newp2)
708 dirstate, (oldp1, oldp2), (newp1, newp2)
709
709
710 Category is a unique identifier to allow overwriting an old callback
710 Category is a unique identifier to allow overwriting an old callback
711 with a newer callback.
711 with a newer callback.
712 """
712 """
713 self._plchangecallbacks[category] = callback
713 self._plchangecallbacks[category] = callback
714
714
715 def _writedirstate(self, st):
715 def _writedirstate(self, st):
716 # notify callbacks about parents change
716 # notify callbacks about parents change
717 if self._origpl is not None and self._origpl != self._pl:
717 if self._origpl is not None and self._origpl != self._pl:
718 for c, callback in sorted(self._plchangecallbacks.iteritems()):
718 for c, callback in sorted(self._plchangecallbacks.iteritems()):
719 callback(self, self._origpl, self._pl)
719 callback(self, self._origpl, self._pl)
720 self._origpl = None
720 self._origpl = None
721 # use the modification time of the newly created temporary file as the
721 # use the modification time of the newly created temporary file as the
722 # filesystem's notion of 'now'
722 # filesystem's notion of 'now'
723 now = util.fstat(st).st_mtime & _rangemask
723 now = util.fstat(st).st_mtime & _rangemask
724
724
725 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
725 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
726 # timestamp of each entries in dirstate, because of 'now > mtime'
726 # timestamp of each entries in dirstate, because of 'now > mtime'
727 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
727 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
728 if delaywrite > 0:
728 if delaywrite > 0:
729 # do we have any files to delay for?
729 # do we have any files to delay for?
730 for f, e in self._map.iteritems():
730 for f, e in self._map.iteritems():
731 if e[0] == 'n' and e[3] == now:
731 if e[0] == 'n' and e[3] == now:
732 import time # to avoid useless import
732 import time # to avoid useless import
733 # rather than sleep n seconds, sleep until the next
733 # rather than sleep n seconds, sleep until the next
734 # multiple of n seconds
734 # multiple of n seconds
735 clock = time.time()
735 clock = time.time()
736 start = int(clock) - (int(clock) % delaywrite)
736 start = int(clock) - (int(clock) % delaywrite)
737 end = start + delaywrite
737 end = start + delaywrite
738 time.sleep(end - clock)
738 time.sleep(end - clock)
739 now = end # trust our estimate that the end is near now
739 now = end # trust our estimate that the end is near now
740 break
740 break
741
741
742 st.write(parsers.pack_dirstate(self._map._map, self._map.copymap,
742 self._map.write(st, now)
743 self._pl, now))
744 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
743 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
745 st.close()
746 self._lastnormaltime = 0
744 self._lastnormaltime = 0
747 self._dirty = self._map._dirtyparents = False
745 self._dirty = False
748
746
749 def _dirignore(self, f):
747 def _dirignore(self, f):
750 if f == '.':
748 if f == '.':
751 return False
749 return False
752 if self._ignore(f):
750 if self._ignore(f):
753 return True
751 return True
754 for p in util.finddirs(f):
752 for p in util.finddirs(f):
755 if self._ignore(p):
753 if self._ignore(p):
756 return True
754 return True
757 return False
755 return False
758
756
759 def _ignorefiles(self):
757 def _ignorefiles(self):
760 files = []
758 files = []
761 if os.path.exists(self._join('.hgignore')):
759 if os.path.exists(self._join('.hgignore')):
762 files.append(self._join('.hgignore'))
760 files.append(self._join('.hgignore'))
763 for name, path in self._ui.configitems("ui"):
761 for name, path in self._ui.configitems("ui"):
764 if name == 'ignore' or name.startswith('ignore.'):
762 if name == 'ignore' or name.startswith('ignore.'):
765 # we need to use os.path.join here rather than self._join
763 # we need to use os.path.join here rather than self._join
766 # because path is arbitrary and user-specified
764 # because path is arbitrary and user-specified
767 files.append(os.path.join(self._rootdir, util.expandpath(path)))
765 files.append(os.path.join(self._rootdir, util.expandpath(path)))
768 return files
766 return files
769
767
770 def _ignorefileandline(self, f):
768 def _ignorefileandline(self, f):
771 files = collections.deque(self._ignorefiles())
769 files = collections.deque(self._ignorefiles())
772 visited = set()
770 visited = set()
773 while files:
771 while files:
774 i = files.popleft()
772 i = files.popleft()
775 patterns = matchmod.readpatternfile(i, self._ui.warn,
773 patterns = matchmod.readpatternfile(i, self._ui.warn,
776 sourceinfo=True)
774 sourceinfo=True)
777 for pattern, lineno, line in patterns:
775 for pattern, lineno, line in patterns:
778 kind, p = matchmod._patsplit(pattern, 'glob')
776 kind, p = matchmod._patsplit(pattern, 'glob')
779 if kind == "subinclude":
777 if kind == "subinclude":
780 if p not in visited:
778 if p not in visited:
781 files.append(p)
779 files.append(p)
782 continue
780 continue
783 m = matchmod.match(self._root, '', [], [pattern],
781 m = matchmod.match(self._root, '', [], [pattern],
784 warn=self._ui.warn)
782 warn=self._ui.warn)
785 if m(f):
783 if m(f):
786 return (i, lineno, line)
784 return (i, lineno, line)
787 visited.add(i)
785 visited.add(i)
788 return (None, -1, "")
786 return (None, -1, "")
789
787
790 def _walkexplicit(self, match, subrepos):
788 def _walkexplicit(self, match, subrepos):
791 '''Get stat data about the files explicitly specified by match.
789 '''Get stat data about the files explicitly specified by match.
792
790
793 Return a triple (results, dirsfound, dirsnotfound).
791 Return a triple (results, dirsfound, dirsnotfound).
794 - results is a mapping from filename to stat result. It also contains
792 - results is a mapping from filename to stat result. It also contains
795 listings mapping subrepos and .hg to None.
793 listings mapping subrepos and .hg to None.
796 - dirsfound is a list of files found to be directories.
794 - dirsfound is a list of files found to be directories.
797 - dirsnotfound is a list of files that the dirstate thinks are
795 - dirsnotfound is a list of files that the dirstate thinks are
798 directories and that were not found.'''
796 directories and that were not found.'''
799
797
800 def badtype(mode):
798 def badtype(mode):
801 kind = _('unknown')
799 kind = _('unknown')
802 if stat.S_ISCHR(mode):
800 if stat.S_ISCHR(mode):
803 kind = _('character device')
801 kind = _('character device')
804 elif stat.S_ISBLK(mode):
802 elif stat.S_ISBLK(mode):
805 kind = _('block device')
803 kind = _('block device')
806 elif stat.S_ISFIFO(mode):
804 elif stat.S_ISFIFO(mode):
807 kind = _('fifo')
805 kind = _('fifo')
808 elif stat.S_ISSOCK(mode):
806 elif stat.S_ISSOCK(mode):
809 kind = _('socket')
807 kind = _('socket')
810 elif stat.S_ISDIR(mode):
808 elif stat.S_ISDIR(mode):
811 kind = _('directory')
809 kind = _('directory')
812 return _('unsupported file type (type is %s)') % kind
810 return _('unsupported file type (type is %s)') % kind
813
811
814 matchedir = match.explicitdir
812 matchedir = match.explicitdir
815 badfn = match.bad
813 badfn = match.bad
816 dmap = self._map
814 dmap = self._map
817 lstat = os.lstat
815 lstat = os.lstat
818 getkind = stat.S_IFMT
816 getkind = stat.S_IFMT
819 dirkind = stat.S_IFDIR
817 dirkind = stat.S_IFDIR
820 regkind = stat.S_IFREG
818 regkind = stat.S_IFREG
821 lnkkind = stat.S_IFLNK
819 lnkkind = stat.S_IFLNK
822 join = self._join
820 join = self._join
823 dirsfound = []
821 dirsfound = []
824 foundadd = dirsfound.append
822 foundadd = dirsfound.append
825 dirsnotfound = []
823 dirsnotfound = []
826 notfoundadd = dirsnotfound.append
824 notfoundadd = dirsnotfound.append
827
825
828 if not match.isexact() and self._checkcase:
826 if not match.isexact() and self._checkcase:
829 normalize = self._normalize
827 normalize = self._normalize
830 else:
828 else:
831 normalize = None
829 normalize = None
832
830
833 files = sorted(match.files())
831 files = sorted(match.files())
834 subrepos.sort()
832 subrepos.sort()
835 i, j = 0, 0
833 i, j = 0, 0
836 while i < len(files) and j < len(subrepos):
834 while i < len(files) and j < len(subrepos):
837 subpath = subrepos[j] + "/"
835 subpath = subrepos[j] + "/"
838 if files[i] < subpath:
836 if files[i] < subpath:
839 i += 1
837 i += 1
840 continue
838 continue
841 while i < len(files) and files[i].startswith(subpath):
839 while i < len(files) and files[i].startswith(subpath):
842 del files[i]
840 del files[i]
843 j += 1
841 j += 1
844
842
845 if not files or '.' in files:
843 if not files or '.' in files:
846 files = ['.']
844 files = ['.']
847 results = dict.fromkeys(subrepos)
845 results = dict.fromkeys(subrepos)
848 results['.hg'] = None
846 results['.hg'] = None
849
847
850 alldirs = None
848 alldirs = None
851 for ff in files:
849 for ff in files:
852 # constructing the foldmap is expensive, so don't do it for the
850 # constructing the foldmap is expensive, so don't do it for the
853 # common case where files is ['.']
851 # common case where files is ['.']
854 if normalize and ff != '.':
852 if normalize and ff != '.':
855 nf = normalize(ff, False, True)
853 nf = normalize(ff, False, True)
856 else:
854 else:
857 nf = ff
855 nf = ff
858 if nf in results:
856 if nf in results:
859 continue
857 continue
860
858
861 try:
859 try:
862 st = lstat(join(nf))
860 st = lstat(join(nf))
863 kind = getkind(st.st_mode)
861 kind = getkind(st.st_mode)
864 if kind == dirkind:
862 if kind == dirkind:
865 if nf in dmap:
863 if nf in dmap:
866 # file replaced by dir on disk but still in dirstate
864 # file replaced by dir on disk but still in dirstate
867 results[nf] = None
865 results[nf] = None
868 if matchedir:
866 if matchedir:
869 matchedir(nf)
867 matchedir(nf)
870 foundadd((nf, ff))
868 foundadd((nf, ff))
871 elif kind == regkind or kind == lnkkind:
869 elif kind == regkind or kind == lnkkind:
872 results[nf] = st
870 results[nf] = st
873 else:
871 else:
874 badfn(ff, badtype(kind))
872 badfn(ff, badtype(kind))
875 if nf in dmap:
873 if nf in dmap:
876 results[nf] = None
874 results[nf] = None
877 except OSError as inst: # nf not found on disk - it is dirstate only
875 except OSError as inst: # nf not found on disk - it is dirstate only
878 if nf in dmap: # does it exactly match a missing file?
876 if nf in dmap: # does it exactly match a missing file?
879 results[nf] = None
877 results[nf] = None
880 else: # does it match a missing directory?
878 else: # does it match a missing directory?
881 if alldirs is None:
879 if alldirs is None:
882 alldirs = util.dirs(dmap._map)
880 alldirs = util.dirs(dmap._map)
883 if nf in alldirs:
881 if nf in alldirs:
884 if matchedir:
882 if matchedir:
885 matchedir(nf)
883 matchedir(nf)
886 notfoundadd(nf)
884 notfoundadd(nf)
887 else:
885 else:
888 badfn(ff, encoding.strtolocal(inst.strerror))
886 badfn(ff, encoding.strtolocal(inst.strerror))
889
887
890 # Case insensitive filesystems cannot rely on lstat() failing to detect
888 # Case insensitive filesystems cannot rely on lstat() failing to detect
891 # a case-only rename. Prune the stat object for any file that does not
889 # a case-only rename. Prune the stat object for any file that does not
892 # match the case in the filesystem, if there are multiple files that
890 # match the case in the filesystem, if there are multiple files that
893 # normalize to the same path.
891 # normalize to the same path.
894 if match.isexact() and self._checkcase:
892 if match.isexact() and self._checkcase:
895 normed = {}
893 normed = {}
896
894
897 for f, st in results.iteritems():
895 for f, st in results.iteritems():
898 if st is None:
896 if st is None:
899 continue
897 continue
900
898
901 nc = util.normcase(f)
899 nc = util.normcase(f)
902 paths = normed.get(nc)
900 paths = normed.get(nc)
903
901
904 if paths is None:
902 if paths is None:
905 paths = set()
903 paths = set()
906 normed[nc] = paths
904 normed[nc] = paths
907
905
908 paths.add(f)
906 paths.add(f)
909
907
910 for norm, paths in normed.iteritems():
908 for norm, paths in normed.iteritems():
911 if len(paths) > 1:
909 if len(paths) > 1:
912 for path in paths:
910 for path in paths:
913 folded = self._discoverpath(path, norm, True, None,
911 folded = self._discoverpath(path, norm, True, None,
914 self._dirfoldmap)
912 self._dirfoldmap)
915 if path != folded:
913 if path != folded:
916 results[path] = None
914 results[path] = None
917
915
918 return results, dirsfound, dirsnotfound
916 return results, dirsfound, dirsnotfound
919
917
920 def walk(self, match, subrepos, unknown, ignored, full=True):
918 def walk(self, match, subrepos, unknown, ignored, full=True):
921 '''
919 '''
922 Walk recursively through the directory tree, finding all files
920 Walk recursively through the directory tree, finding all files
923 matched by match.
921 matched by match.
924
922
925 If full is False, maybe skip some known-clean files.
923 If full is False, maybe skip some known-clean files.
926
924
927 Return a dict mapping filename to stat-like object (either
925 Return a dict mapping filename to stat-like object (either
928 mercurial.osutil.stat instance or return value of os.stat()).
926 mercurial.osutil.stat instance or return value of os.stat()).
929
927
930 '''
928 '''
931 # full is a flag that extensions that hook into walk can use -- this
929 # full is a flag that extensions that hook into walk can use -- this
932 # implementation doesn't use it at all. This satisfies the contract
930 # implementation doesn't use it at all. This satisfies the contract
933 # because we only guarantee a "maybe".
931 # because we only guarantee a "maybe".
934
932
935 if ignored:
933 if ignored:
936 ignore = util.never
934 ignore = util.never
937 dirignore = util.never
935 dirignore = util.never
938 elif unknown:
936 elif unknown:
939 ignore = self._ignore
937 ignore = self._ignore
940 dirignore = self._dirignore
938 dirignore = self._dirignore
941 else:
939 else:
942 # if not unknown and not ignored, drop dir recursion and step 2
940 # if not unknown and not ignored, drop dir recursion and step 2
943 ignore = util.always
941 ignore = util.always
944 dirignore = util.always
942 dirignore = util.always
945
943
946 matchfn = match.matchfn
944 matchfn = match.matchfn
947 matchalways = match.always()
945 matchalways = match.always()
948 matchtdir = match.traversedir
946 matchtdir = match.traversedir
949 dmap = self._map
947 dmap = self._map
950 listdir = util.listdir
948 listdir = util.listdir
951 lstat = os.lstat
949 lstat = os.lstat
952 dirkind = stat.S_IFDIR
950 dirkind = stat.S_IFDIR
953 regkind = stat.S_IFREG
951 regkind = stat.S_IFREG
954 lnkkind = stat.S_IFLNK
952 lnkkind = stat.S_IFLNK
955 join = self._join
953 join = self._join
956
954
957 exact = skipstep3 = False
955 exact = skipstep3 = False
958 if match.isexact(): # match.exact
956 if match.isexact(): # match.exact
959 exact = True
957 exact = True
960 dirignore = util.always # skip step 2
958 dirignore = util.always # skip step 2
961 elif match.prefix(): # match.match, no patterns
959 elif match.prefix(): # match.match, no patterns
962 skipstep3 = True
960 skipstep3 = True
963
961
964 if not exact and self._checkcase:
962 if not exact and self._checkcase:
965 normalize = self._normalize
963 normalize = self._normalize
966 normalizefile = self._normalizefile
964 normalizefile = self._normalizefile
967 skipstep3 = False
965 skipstep3 = False
968 else:
966 else:
969 normalize = self._normalize
967 normalize = self._normalize
970 normalizefile = None
968 normalizefile = None
971
969
972 # step 1: find all explicit files
970 # step 1: find all explicit files
973 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
971 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
974
972
975 skipstep3 = skipstep3 and not (work or dirsnotfound)
973 skipstep3 = skipstep3 and not (work or dirsnotfound)
976 work = [d for d in work if not dirignore(d[0])]
974 work = [d for d in work if not dirignore(d[0])]
977
975
978 # step 2: visit subdirectories
976 # step 2: visit subdirectories
979 def traverse(work, alreadynormed):
977 def traverse(work, alreadynormed):
980 wadd = work.append
978 wadd = work.append
981 while work:
979 while work:
982 nd = work.pop()
980 nd = work.pop()
983 if not match.visitdir(nd):
981 if not match.visitdir(nd):
984 continue
982 continue
985 skip = None
983 skip = None
986 if nd == '.':
984 if nd == '.':
987 nd = ''
985 nd = ''
988 else:
986 else:
989 skip = '.hg'
987 skip = '.hg'
990 try:
988 try:
991 entries = listdir(join(nd), stat=True, skip=skip)
989 entries = listdir(join(nd), stat=True, skip=skip)
992 except OSError as inst:
990 except OSError as inst:
993 if inst.errno in (errno.EACCES, errno.ENOENT):
991 if inst.errno in (errno.EACCES, errno.ENOENT):
994 match.bad(self.pathto(nd),
992 match.bad(self.pathto(nd),
995 encoding.strtolocal(inst.strerror))
993 encoding.strtolocal(inst.strerror))
996 continue
994 continue
997 raise
995 raise
998 for f, kind, st in entries:
996 for f, kind, st in entries:
999 if normalizefile:
997 if normalizefile:
1000 # even though f might be a directory, we're only
998 # even though f might be a directory, we're only
1001 # interested in comparing it to files currently in the
999 # interested in comparing it to files currently in the
1002 # dmap -- therefore normalizefile is enough
1000 # dmap -- therefore normalizefile is enough
1003 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1001 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1004 True)
1002 True)
1005 else:
1003 else:
1006 nf = nd and (nd + "/" + f) or f
1004 nf = nd and (nd + "/" + f) or f
1007 if nf not in results:
1005 if nf not in results:
1008 if kind == dirkind:
1006 if kind == dirkind:
1009 if not ignore(nf):
1007 if not ignore(nf):
1010 if matchtdir:
1008 if matchtdir:
1011 matchtdir(nf)
1009 matchtdir(nf)
1012 wadd(nf)
1010 wadd(nf)
1013 if nf in dmap and (matchalways or matchfn(nf)):
1011 if nf in dmap and (matchalways or matchfn(nf)):
1014 results[nf] = None
1012 results[nf] = None
1015 elif kind == regkind or kind == lnkkind:
1013 elif kind == regkind or kind == lnkkind:
1016 if nf in dmap:
1014 if nf in dmap:
1017 if matchalways or matchfn(nf):
1015 if matchalways or matchfn(nf):
1018 results[nf] = st
1016 results[nf] = st
1019 elif ((matchalways or matchfn(nf))
1017 elif ((matchalways or matchfn(nf))
1020 and not ignore(nf)):
1018 and not ignore(nf)):
1021 # unknown file -- normalize if necessary
1019 # unknown file -- normalize if necessary
1022 if not alreadynormed:
1020 if not alreadynormed:
1023 nf = normalize(nf, False, True)
1021 nf = normalize(nf, False, True)
1024 results[nf] = st
1022 results[nf] = st
1025 elif nf in dmap and (matchalways or matchfn(nf)):
1023 elif nf in dmap and (matchalways or matchfn(nf)):
1026 results[nf] = None
1024 results[nf] = None
1027
1025
1028 for nd, d in work:
1026 for nd, d in work:
1029 # alreadynormed means that processwork doesn't have to do any
1027 # alreadynormed means that processwork doesn't have to do any
1030 # expensive directory normalization
1028 # expensive directory normalization
1031 alreadynormed = not normalize or nd == d
1029 alreadynormed = not normalize or nd == d
1032 traverse([d], alreadynormed)
1030 traverse([d], alreadynormed)
1033
1031
1034 for s in subrepos:
1032 for s in subrepos:
1035 del results[s]
1033 del results[s]
1036 del results['.hg']
1034 del results['.hg']
1037
1035
1038 # step 3: visit remaining files from dmap
1036 # step 3: visit remaining files from dmap
1039 if not skipstep3 and not exact:
1037 if not skipstep3 and not exact:
1040 # If a dmap file is not in results yet, it was either
1038 # If a dmap file is not in results yet, it was either
1041 # a) not matching matchfn b) ignored, c) missing, or d) under a
1039 # a) not matching matchfn b) ignored, c) missing, or d) under a
1042 # symlink directory.
1040 # symlink directory.
1043 if not results and matchalways:
1041 if not results and matchalways:
1044 visit = [f for f in dmap]
1042 visit = [f for f in dmap]
1045 else:
1043 else:
1046 visit = [f for f in dmap if f not in results and matchfn(f)]
1044 visit = [f for f in dmap if f not in results and matchfn(f)]
1047 visit.sort()
1045 visit.sort()
1048
1046
1049 if unknown:
1047 if unknown:
1050 # unknown == True means we walked all dirs under the roots
1048 # unknown == True means we walked all dirs under the roots
1051 # that wasn't ignored, and everything that matched was stat'ed
1049 # that wasn't ignored, and everything that matched was stat'ed
1052 # and is already in results.
1050 # and is already in results.
1053 # The rest must thus be ignored or under a symlink.
1051 # The rest must thus be ignored or under a symlink.
1054 audit_path = pathutil.pathauditor(self._root, cached=True)
1052 audit_path = pathutil.pathauditor(self._root, cached=True)
1055
1053
1056 for nf in iter(visit):
1054 for nf in iter(visit):
1057 # If a stat for the same file was already added with a
1055 # If a stat for the same file was already added with a
1058 # different case, don't add one for this, since that would
1056 # different case, don't add one for this, since that would
1059 # make it appear as if the file exists under both names
1057 # make it appear as if the file exists under both names
1060 # on disk.
1058 # on disk.
1061 if (normalizefile and
1059 if (normalizefile and
1062 normalizefile(nf, True, True) in results):
1060 normalizefile(nf, True, True) in results):
1063 results[nf] = None
1061 results[nf] = None
1064 # Report ignored items in the dmap as long as they are not
1062 # Report ignored items in the dmap as long as they are not
1065 # under a symlink directory.
1063 # under a symlink directory.
1066 elif audit_path.check(nf):
1064 elif audit_path.check(nf):
1067 try:
1065 try:
1068 results[nf] = lstat(join(nf))
1066 results[nf] = lstat(join(nf))
1069 # file was just ignored, no links, and exists
1067 # file was just ignored, no links, and exists
1070 except OSError:
1068 except OSError:
1071 # file doesn't exist
1069 # file doesn't exist
1072 results[nf] = None
1070 results[nf] = None
1073 else:
1071 else:
1074 # It's either missing or under a symlink directory
1072 # It's either missing or under a symlink directory
1075 # which we in this case report as missing
1073 # which we in this case report as missing
1076 results[nf] = None
1074 results[nf] = None
1077 else:
1075 else:
1078 # We may not have walked the full directory tree above,
1076 # We may not have walked the full directory tree above,
1079 # so stat and check everything we missed.
1077 # so stat and check everything we missed.
1080 iv = iter(visit)
1078 iv = iter(visit)
1081 for st in util.statfiles([join(i) for i in visit]):
1079 for st in util.statfiles([join(i) for i in visit]):
1082 results[next(iv)] = st
1080 results[next(iv)] = st
1083 return results
1081 return results
1084
1082
1085 def status(self, match, subrepos, ignored, clean, unknown):
1083 def status(self, match, subrepos, ignored, clean, unknown):
1086 '''Determine the status of the working copy relative to the
1084 '''Determine the status of the working copy relative to the
1087 dirstate and return a pair of (unsure, status), where status is of type
1085 dirstate and return a pair of (unsure, status), where status is of type
1088 scmutil.status and:
1086 scmutil.status and:
1089
1087
1090 unsure:
1088 unsure:
1091 files that might have been modified since the dirstate was
1089 files that might have been modified since the dirstate was
1092 written, but need to be read to be sure (size is the same
1090 written, but need to be read to be sure (size is the same
1093 but mtime differs)
1091 but mtime differs)
1094 status.modified:
1092 status.modified:
1095 files that have definitely been modified since the dirstate
1093 files that have definitely been modified since the dirstate
1096 was written (different size or mode)
1094 was written (different size or mode)
1097 status.clean:
1095 status.clean:
1098 files that have definitely not been modified since the
1096 files that have definitely not been modified since the
1099 dirstate was written
1097 dirstate was written
1100 '''
1098 '''
1101 listignored, listclean, listunknown = ignored, clean, unknown
1099 listignored, listclean, listunknown = ignored, clean, unknown
1102 lookup, modified, added, unknown, ignored = [], [], [], [], []
1100 lookup, modified, added, unknown, ignored = [], [], [], [], []
1103 removed, deleted, clean = [], [], []
1101 removed, deleted, clean = [], [], []
1104
1102
1105 dmap = self._map
1103 dmap = self._map
1106 ladd = lookup.append # aka "unsure"
1104 ladd = lookup.append # aka "unsure"
1107 madd = modified.append
1105 madd = modified.append
1108 aadd = added.append
1106 aadd = added.append
1109 uadd = unknown.append
1107 uadd = unknown.append
1110 iadd = ignored.append
1108 iadd = ignored.append
1111 radd = removed.append
1109 radd = removed.append
1112 dadd = deleted.append
1110 dadd = deleted.append
1113 cadd = clean.append
1111 cadd = clean.append
1114 mexact = match.exact
1112 mexact = match.exact
1115 dirignore = self._dirignore
1113 dirignore = self._dirignore
1116 checkexec = self._checkexec
1114 checkexec = self._checkexec
1117 copymap = self._map.copymap
1115 copymap = self._map.copymap
1118 lastnormaltime = self._lastnormaltime
1116 lastnormaltime = self._lastnormaltime
1119
1117
1120 # We need to do full walks when either
1118 # We need to do full walks when either
1121 # - we're listing all clean files, or
1119 # - we're listing all clean files, or
1122 # - match.traversedir does something, because match.traversedir should
1120 # - match.traversedir does something, because match.traversedir should
1123 # be called for every dir in the working dir
1121 # be called for every dir in the working dir
1124 full = listclean or match.traversedir is not None
1122 full = listclean or match.traversedir is not None
1125 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1123 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1126 full=full).iteritems():
1124 full=full).iteritems():
1127 if fn not in dmap:
1125 if fn not in dmap:
1128 if (listignored or mexact(fn)) and dirignore(fn):
1126 if (listignored or mexact(fn)) and dirignore(fn):
1129 if listignored:
1127 if listignored:
1130 iadd(fn)
1128 iadd(fn)
1131 else:
1129 else:
1132 uadd(fn)
1130 uadd(fn)
1133 continue
1131 continue
1134
1132
1135 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1133 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1136 # written like that for performance reasons. dmap[fn] is not a
1134 # written like that for performance reasons. dmap[fn] is not a
1137 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1135 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1138 # opcode has fast paths when the value to be unpacked is a tuple or
1136 # opcode has fast paths when the value to be unpacked is a tuple or
1139 # a list, but falls back to creating a full-fledged iterator in
1137 # a list, but falls back to creating a full-fledged iterator in
1140 # general. That is much slower than simply accessing and storing the
1138 # general. That is much slower than simply accessing and storing the
1141 # tuple members one by one.
1139 # tuple members one by one.
1142 t = dmap[fn]
1140 t = dmap[fn]
1143 state = t[0]
1141 state = t[0]
1144 mode = t[1]
1142 mode = t[1]
1145 size = t[2]
1143 size = t[2]
1146 time = t[3]
1144 time = t[3]
1147
1145
1148 if not st and state in "nma":
1146 if not st and state in "nma":
1149 dadd(fn)
1147 dadd(fn)
1150 elif state == 'n':
1148 elif state == 'n':
1151 if (size >= 0 and
1149 if (size >= 0 and
1152 ((size != st.st_size and size != st.st_size & _rangemask)
1150 ((size != st.st_size and size != st.st_size & _rangemask)
1153 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1151 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1154 or size == -2 # other parent
1152 or size == -2 # other parent
1155 or fn in copymap):
1153 or fn in copymap):
1156 madd(fn)
1154 madd(fn)
1157 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1155 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1158 ladd(fn)
1156 ladd(fn)
1159 elif st.st_mtime == lastnormaltime:
1157 elif st.st_mtime == lastnormaltime:
1160 # fn may have just been marked as normal and it may have
1158 # fn may have just been marked as normal and it may have
1161 # changed in the same second without changing its size.
1159 # changed in the same second without changing its size.
1162 # This can happen if we quickly do multiple commits.
1160 # This can happen if we quickly do multiple commits.
1163 # Force lookup, so we don't miss such a racy file change.
1161 # Force lookup, so we don't miss such a racy file change.
1164 ladd(fn)
1162 ladd(fn)
1165 elif listclean:
1163 elif listclean:
1166 cadd(fn)
1164 cadd(fn)
1167 elif state == 'm':
1165 elif state == 'm':
1168 madd(fn)
1166 madd(fn)
1169 elif state == 'a':
1167 elif state == 'a':
1170 aadd(fn)
1168 aadd(fn)
1171 elif state == 'r':
1169 elif state == 'r':
1172 radd(fn)
1170 radd(fn)
1173
1171
1174 return (lookup, scmutil.status(modified, added, removed, deleted,
1172 return (lookup, scmutil.status(modified, added, removed, deleted,
1175 unknown, ignored, clean))
1173 unknown, ignored, clean))
1176
1174
1177 def matches(self, match):
1175 def matches(self, match):
1178 '''
1176 '''
1179 return files in the dirstate (in whatever state) filtered by match
1177 return files in the dirstate (in whatever state) filtered by match
1180 '''
1178 '''
1181 dmap = self._map
1179 dmap = self._map
1182 if match.always():
1180 if match.always():
1183 return dmap.keys()
1181 return dmap.keys()
1184 files = match.files()
1182 files = match.files()
1185 if match.isexact():
1183 if match.isexact():
1186 # fast path -- filter the other way around, since typically files is
1184 # fast path -- filter the other way around, since typically files is
1187 # much smaller than dmap
1185 # much smaller than dmap
1188 return [f for f in files if f in dmap]
1186 return [f for f in files if f in dmap]
1189 if match.prefix() and all(fn in dmap for fn in files):
1187 if match.prefix() and all(fn in dmap for fn in files):
1190 # fast path -- all the values are known to be files, so just return
1188 # fast path -- all the values are known to be files, so just return
1191 # that
1189 # that
1192 return list(files)
1190 return list(files)
1193 return [f for f in dmap if match(f)]
1191 return [f for f in dmap if match(f)]
1194
1192
1195 def _actualfilename(self, tr):
1193 def _actualfilename(self, tr):
1196 if tr:
1194 if tr:
1197 return self._pendingfilename
1195 return self._pendingfilename
1198 else:
1196 else:
1199 return self._filename
1197 return self._filename
1200
1198
1201 def savebackup(self, tr, backupname):
1199 def savebackup(self, tr, backupname):
1202 '''Save current dirstate into backup file'''
1200 '''Save current dirstate into backup file'''
1203 filename = self._actualfilename(tr)
1201 filename = self._actualfilename(tr)
1204 assert backupname != filename
1202 assert backupname != filename
1205
1203
1206 # use '_writedirstate' instead of 'write' to write changes certainly,
1204 # use '_writedirstate' instead of 'write' to write changes certainly,
1207 # because the latter omits writing out if transaction is running.
1205 # because the latter omits writing out if transaction is running.
1208 # output file will be used to create backup of dirstate at this point.
1206 # output file will be used to create backup of dirstate at this point.
1209 if self._dirty or not self._opener.exists(filename):
1207 if self._dirty or not self._opener.exists(filename):
1210 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1208 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1211 checkambig=True))
1209 checkambig=True))
1212
1210
1213 if tr:
1211 if tr:
1214 # ensure that subsequent tr.writepending returns True for
1212 # ensure that subsequent tr.writepending returns True for
1215 # changes written out above, even if dirstate is never
1213 # changes written out above, even if dirstate is never
1216 # changed after this
1214 # changed after this
1217 tr.addfilegenerator('dirstate', (self._filename,),
1215 tr.addfilegenerator('dirstate', (self._filename,),
1218 self._writedirstate, location='plain')
1216 self._writedirstate, location='plain')
1219
1217
1220 # ensure that pending file written above is unlinked at
1218 # ensure that pending file written above is unlinked at
1221 # failure, even if tr.writepending isn't invoked until the
1219 # failure, even if tr.writepending isn't invoked until the
1222 # end of this transaction
1220 # end of this transaction
1223 tr.registertmp(filename, location='plain')
1221 tr.registertmp(filename, location='plain')
1224
1222
1225 self._opener.tryunlink(backupname)
1223 self._opener.tryunlink(backupname)
1226 # hardlink backup is okay because _writedirstate is always called
1224 # hardlink backup is okay because _writedirstate is always called
1227 # with an "atomictemp=True" file.
1225 # with an "atomictemp=True" file.
1228 util.copyfile(self._opener.join(filename),
1226 util.copyfile(self._opener.join(filename),
1229 self._opener.join(backupname), hardlink=True)
1227 self._opener.join(backupname), hardlink=True)
1230
1228
1231 def restorebackup(self, tr, backupname):
1229 def restorebackup(self, tr, backupname):
1232 '''Restore dirstate by backup file'''
1230 '''Restore dirstate by backup file'''
1233 # this "invalidate()" prevents "wlock.release()" from writing
1231 # this "invalidate()" prevents "wlock.release()" from writing
1234 # changes of dirstate out after restoring from backup file
1232 # changes of dirstate out after restoring from backup file
1235 self.invalidate()
1233 self.invalidate()
1236 filename = self._actualfilename(tr)
1234 filename = self._actualfilename(tr)
1237 self._opener.rename(backupname, filename, checkambig=True)
1235 self._opener.rename(backupname, filename, checkambig=True)
1238
1236
1239 def clearbackup(self, tr, backupname):
1237 def clearbackup(self, tr, backupname):
1240 '''Clear backup file'''
1238 '''Clear backup file'''
1241 self._opener.unlink(backupname)
1239 self._opener.unlink(backupname)
1242
1240
1243 class dirstatemap(object):
1241 class dirstatemap(object):
1244 def __init__(self, ui, opener, root):
1242 def __init__(self, ui, opener, root):
1245 self._ui = ui
1243 self._ui = ui
1246 self._opener = opener
1244 self._opener = opener
1247 self._root = root
1245 self._root = root
1248 self._filename = 'dirstate'
1246 self._filename = 'dirstate'
1249
1247
1250 self._map = {}
1248 self._map = {}
1251 self.copymap = {}
1249 self.copymap = {}
1252 self._parents = None
1250 self._parents = None
1253 self._dirtyparents = False
1251 self._dirtyparents = False
1254
1252
1255 # for consistent view between _pl() and _read() invocations
1253 # for consistent view between _pl() and _read() invocations
1256 self._pendingmode = None
1254 self._pendingmode = None
1257
1255
1258 def iteritems(self):
1256 def iteritems(self):
1259 return self._map.iteritems()
1257 return self._map.iteritems()
1260
1258
1261 def __len__(self):
1259 def __len__(self):
1262 return len(self._map)
1260 return len(self._map)
1263
1261
1264 def __iter__(self):
1262 def __iter__(self):
1265 return iter(self._map)
1263 return iter(self._map)
1266
1264
1267 def get(self, key, default=None):
1265 def get(self, key, default=None):
1268 return self._map.get(key, default)
1266 return self._map.get(key, default)
1269
1267
1270 def __contains__(self, key):
1268 def __contains__(self, key):
1271 return key in self._map
1269 return key in self._map
1272
1270
1273 def __setitem__(self, key, value):
1271 def __setitem__(self, key, value):
1274 self._map[key] = value
1272 self._map[key] = value
1275
1273
1276 def __getitem__(self, key):
1274 def __getitem__(self, key):
1277 return self._map[key]
1275 return self._map[key]
1278
1276
1279 def __delitem__(self, key):
1277 def __delitem__(self, key):
1280 del self._map[key]
1278 del self._map[key]
1281
1279
1282 def keys(self):
1280 def keys(self):
1283 return self._map.keys()
1281 return self._map.keys()
1284
1282
1285 def nonnormalentries(self):
1283 def nonnormalentries(self):
1286 '''Compute the nonnormal dirstate entries from the dmap'''
1284 '''Compute the nonnormal dirstate entries from the dmap'''
1287 try:
1285 try:
1288 return parsers.nonnormalotherparententries(self._map)
1286 return parsers.nonnormalotherparententries(self._map)
1289 except AttributeError:
1287 except AttributeError:
1290 nonnorm = set()
1288 nonnorm = set()
1291 otherparent = set()
1289 otherparent = set()
1292 for fname, e in self._map.iteritems():
1290 for fname, e in self._map.iteritems():
1293 if e[0] != 'n' or e[3] == -1:
1291 if e[0] != 'n' or e[3] == -1:
1294 nonnorm.add(fname)
1292 nonnorm.add(fname)
1295 if e[0] == 'n' and e[2] == -2:
1293 if e[0] == 'n' and e[2] == -2:
1296 otherparent.add(fname)
1294 otherparent.add(fname)
1297 return nonnorm, otherparent
1295 return nonnorm, otherparent
1298
1296
1299 def filefoldmap(self):
1297 def filefoldmap(self):
1300 """Returns a dictionary mapping normalized case paths to their
1298 """Returns a dictionary mapping normalized case paths to their
1301 non-normalized versions.
1299 non-normalized versions.
1302 """
1300 """
1303 try:
1301 try:
1304 makefilefoldmap = parsers.make_file_foldmap
1302 makefilefoldmap = parsers.make_file_foldmap
1305 except AttributeError:
1303 except AttributeError:
1306 pass
1304 pass
1307 else:
1305 else:
1308 return makefilefoldmap(self._map, util.normcasespec,
1306 return makefilefoldmap(self._map, util.normcasespec,
1309 util.normcasefallback)
1307 util.normcasefallback)
1310
1308
1311 f = {}
1309 f = {}
1312 normcase = util.normcase
1310 normcase = util.normcase
1313 for name, s in self._map.iteritems():
1311 for name, s in self._map.iteritems():
1314 if s[0] != 'r':
1312 if s[0] != 'r':
1315 f[normcase(name)] = name
1313 f[normcase(name)] = name
1316 f['.'] = '.' # prevents useless util.fspath() invocation
1314 f['.'] = '.' # prevents useless util.fspath() invocation
1317 return f
1315 return f
1318
1316
1319 def dirs(self):
1317 def dirs(self):
1320 """Returns a set-like object containing all the directories in the
1318 """Returns a set-like object containing all the directories in the
1321 current dirstate.
1319 current dirstate.
1322 """
1320 """
1323 return util.dirs(self._map, 'r')
1321 return util.dirs(self._map, 'r')
1324
1322
1325 def _opendirstatefile(self):
1323 def _opendirstatefile(self):
1326 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1324 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1327 if self._pendingmode is not None and self._pendingmode != mode:
1325 if self._pendingmode is not None and self._pendingmode != mode:
1328 fp.close()
1326 fp.close()
1329 raise error.Abort(_('working directory state may be '
1327 raise error.Abort(_('working directory state may be '
1330 'changed parallelly'))
1328 'changed parallelly'))
1331 self._pendingmode = mode
1329 self._pendingmode = mode
1332 return fp
1330 return fp
1333
1331
1334 def parents(self):
1332 def parents(self):
1335 if not self._parents:
1333 if not self._parents:
1336 try:
1334 try:
1337 fp = self._opendirstatefile()
1335 fp = self._opendirstatefile()
1338 st = fp.read(40)
1336 st = fp.read(40)
1339 fp.close()
1337 fp.close()
1340 except IOError as err:
1338 except IOError as err:
1341 if err.errno != errno.ENOENT:
1339 if err.errno != errno.ENOENT:
1342 raise
1340 raise
1343 # File doesn't exist, so the current state is empty
1341 # File doesn't exist, so the current state is empty
1344 st = ''
1342 st = ''
1345
1343
1346 l = len(st)
1344 l = len(st)
1347 if l == 40:
1345 if l == 40:
1348 self._parents = st[:20], st[20:40]
1346 self._parents = st[:20], st[20:40]
1349 elif l == 0:
1347 elif l == 0:
1350 self._parents = [nullid, nullid]
1348 self._parents = [nullid, nullid]
1351 else:
1349 else:
1352 raise error.Abort(_('working directory state appears '
1350 raise error.Abort(_('working directory state appears '
1353 'damaged!'))
1351 'damaged!'))
1354
1352
1355 return self._parents
1353 return self._parents
1356
1354
1357 def setparents(self, p1, p2):
1355 def setparents(self, p1, p2):
1358 self._parents = (p1, p2)
1356 self._parents = (p1, p2)
1359 self._dirtyparents = True
1357 self._dirtyparents = True
1360
1358
1361 def read(self):
1359 def read(self):
1362 try:
1360 try:
1363 fp = self._opendirstatefile()
1361 fp = self._opendirstatefile()
1364 try:
1362 try:
1365 st = fp.read()
1363 st = fp.read()
1366 finally:
1364 finally:
1367 fp.close()
1365 fp.close()
1368 except IOError as err:
1366 except IOError as err:
1369 if err.errno != errno.ENOENT:
1367 if err.errno != errno.ENOENT:
1370 raise
1368 raise
1371 return
1369 return
1372 if not st:
1370 if not st:
1373 return
1371 return
1374
1372
1375 if util.safehasattr(parsers, 'dict_new_presized'):
1373 if util.safehasattr(parsers, 'dict_new_presized'):
1376 # Make an estimate of the number of files in the dirstate based on
1374 # Make an estimate of the number of files in the dirstate based on
1377 # its size. From a linear regression on a set of real-world repos,
1375 # its size. From a linear regression on a set of real-world repos,
1378 # all over 10,000 files, the size of a dirstate entry is 85
1376 # all over 10,000 files, the size of a dirstate entry is 85
1379 # bytes. The cost of resizing is significantly higher than the cost
1377 # bytes. The cost of resizing is significantly higher than the cost
1380 # of filling in a larger presized dict, so subtract 20% from the
1378 # of filling in a larger presized dict, so subtract 20% from the
1381 # size.
1379 # size.
1382 #
1380 #
1383 # This heuristic is imperfect in many ways, so in a future dirstate
1381 # This heuristic is imperfect in many ways, so in a future dirstate
1384 # format update it makes sense to just record the number of entries
1382 # format update it makes sense to just record the number of entries
1385 # on write.
1383 # on write.
1386 self._map = parsers.dict_new_presized(len(st) / 71)
1384 self._map = parsers.dict_new_presized(len(st) / 71)
1387
1385
1388 # Python's garbage collector triggers a GC each time a certain number
1386 # Python's garbage collector triggers a GC each time a certain number
1389 # of container objects (the number being defined by
1387 # of container objects (the number being defined by
1390 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1388 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1391 # for each file in the dirstate. The C version then immediately marks
1389 # for each file in the dirstate. The C version then immediately marks
1392 # them as not to be tracked by the collector. However, this has no
1390 # them as not to be tracked by the collector. However, this has no
1393 # effect on when GCs are triggered, only on what objects the GC looks
1391 # effect on when GCs are triggered, only on what objects the GC looks
1394 # into. This means that O(number of files) GCs are unavoidable.
1392 # into. This means that O(number of files) GCs are unavoidable.
1395 # Depending on when in the process's lifetime the dirstate is parsed,
1393 # Depending on when in the process's lifetime the dirstate is parsed,
1396 # this can get very expensive. As a workaround, disable GC while
1394 # this can get very expensive. As a workaround, disable GC while
1397 # parsing the dirstate.
1395 # parsing the dirstate.
1398 #
1396 #
1399 # (we cannot decorate the function directly since it is in a C module)
1397 # (we cannot decorate the function directly since it is in a C module)
1400 parse_dirstate = util.nogc(parsers.parse_dirstate)
1398 parse_dirstate = util.nogc(parsers.parse_dirstate)
1401 p = parse_dirstate(self._map, self.copymap, st)
1399 p = parse_dirstate(self._map, self.copymap, st)
1402 if not self._dirtyparents:
1400 if not self._dirtyparents:
1403 self.setparents(*p)
1401 self.setparents(*p)
1402
1403 def write(self, st, now):
1404 st.write(parsers.pack_dirstate(self._map, self.copymap,
1405 self.parents(), now))
1406 st.close()
1407 self._dirtyparents = False
General Comments 0
You need to be logged in to leave comments. Login now