##// END OF EJS Templates
dirstate: stop using `oldstate` in `dirstate._addpath`...
marmoute -
r48312:b76d54b9 default
parent child Browse files
Show More
@@ -1,1436 +1,1437 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304
304
305 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
306 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
307 instead.
307 instead.
308 """
308 """
309 entry = self._map.get(key)
309 entry = self._map.get(key)
310 if entry is not None:
310 if entry is not None:
311 return entry.state
311 return entry.state
312 return b'?'
312 return b'?'
313
313
314 def __contains__(self, key):
314 def __contains__(self, key):
315 return key in self._map
315 return key in self._map
316
316
317 def __iter__(self):
317 def __iter__(self):
318 return iter(sorted(self._map))
318 return iter(sorted(self._map))
319
319
320 def items(self):
320 def items(self):
321 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
322
322
323 iteritems = items
323 iteritems = items
324
324
325 def directories(self):
325 def directories(self):
326 return self._map.directories()
326 return self._map.directories()
327
327
328 def parents(self):
328 def parents(self):
329 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
330
330
331 def p1(self):
331 def p1(self):
332 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
333
333
334 def p2(self):
334 def p2(self):
335 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
336
336
337 @property
337 @property
338 def in_merge(self):
338 def in_merge(self):
339 """True if a merge is in progress"""
339 """True if a merge is in progress"""
340 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
341
341
342 def branch(self):
342 def branch(self):
343 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
344
344
345 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
346 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
347
347
348 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
349 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
350 returned by the call.
350 returned by the call.
351
351
352 See localrepo.setparents()
352 See localrepo.setparents()
353 """
353 """
354 if p2 is None:
354 if p2 is None:
355 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
356 if self._parentwriters == 0:
356 if self._parentwriters == 0:
357 raise ValueError(
357 raise ValueError(
358 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
359 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
360 )
360 )
361
361
362 self._dirty = True
362 self._dirty = True
363 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
364 if self._origpl is None:
364 if self._origpl is None:
365 self._origpl = self._pl
365 self._origpl = self._pl
366 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
367 copies = {}
367 copies = {}
368 if (
368 if (
369 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
371 ):
371 ):
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
373
373
374 for f in candidatefiles:
374 for f in candidatefiles:
375 s = self._map.get(f)
375 s = self._map.get(f)
376 if s is None:
376 if s is None:
377 continue
377 continue
378
378
379 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
380 if s.merged:
380 if s.merged:
381 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
382 if source:
382 if source:
383 copies[f] = source
383 copies[f] = source
384 self.normallookup(f)
384 self.normallookup(f)
385 # Also fix up otherparent markers
385 # Also fix up otherparent markers
386 elif s.from_p2:
386 elif s.from_p2:
387 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
388 if source:
388 if source:
389 copies[f] = source
389 copies[f] = source
390 self.add(f)
390 self.add(f)
391 return copies
391 return copies
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._lastnormaltime = 0
419 self._lastnormaltime = 0
420 self._dirty = False
420 self._dirty = False
421 self._updatedfiles.clear()
421 self._updatedfiles.clear()
422 self._parentwriters = 0
422 self._parentwriters = 0
423 self._origpl = None
423 self._origpl = None
424
424
425 def copy(self, source, dest):
425 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
427 if source == dest:
428 return
428 return
429 self._dirty = True
429 self._dirty = True
430 if source is not None:
430 if source is not None:
431 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
432 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
435 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
436
436
437 def copied(self, file):
437 def copied(self, file):
438 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
439
439
440 def copies(self):
440 def copies(self):
441 return self._map.copymap
441 return self._map.copymap
442
442
443 def _addpath(
443 def _addpath(
444 self,
444 self,
445 f,
445 f,
446 state,
446 state,
447 mode,
447 mode,
448 size=None,
448 size=None,
449 mtime=None,
449 mtime=None,
450 from_p2=False,
450 from_p2=False,
451 possibly_dirty=False,
451 possibly_dirty=False,
452 ):
452 ):
453 oldstate = self[f]
453 oldstate = self[f]
454 if state == b'a' or oldstate == b'r':
454 entry = self._map.get(f)
455 if state == b'a' or entry is not None and entry.removed:
455 scmutil.checkfilename(f)
456 scmutil.checkfilename(f)
456 if self._map.hastrackeddir(f):
457 if self._map.hastrackeddir(f):
457 msg = _(b'directory %r already in dirstate')
458 msg = _(b'directory %r already in dirstate')
458 msg %= pycompat.bytestr(f)
459 msg %= pycompat.bytestr(f)
459 raise error.Abort(msg)
460 raise error.Abort(msg)
460 # shadows
461 # shadows
461 for d in pathutil.finddirs(f):
462 for d in pathutil.finddirs(f):
462 if self._map.hastrackeddir(d):
463 if self._map.hastrackeddir(d):
463 break
464 break
464 entry = self._map.get(d)
465 entry = self._map.get(d)
465 if entry is not None and not entry.removed:
466 if entry is not None and not entry.removed:
466 msg = _(b'file %r in dirstate clashes with %r')
467 msg = _(b'file %r in dirstate clashes with %r')
467 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 raise error.Abort(msg)
469 raise error.Abort(msg)
469 self._dirty = True
470 self._dirty = True
470 self._updatedfiles.add(f)
471 self._updatedfiles.add(f)
471 self._map.addfile(
472 self._map.addfile(
472 f,
473 f,
473 oldstate,
474 oldstate,
474 state=state,
475 state=state,
475 mode=mode,
476 mode=mode,
476 size=size,
477 size=size,
477 mtime=mtime,
478 mtime=mtime,
478 from_p2=from_p2,
479 from_p2=from_p2,
479 possibly_dirty=possibly_dirty,
480 possibly_dirty=possibly_dirty,
480 )
481 )
481
482
482 def normal(self, f, parentfiledata=None):
483 def normal(self, f, parentfiledata=None):
483 """Mark a file normal and clean.
484 """Mark a file normal and clean.
484
485
485 parentfiledata: (mode, size, mtime) of the clean file
486 parentfiledata: (mode, size, mtime) of the clean file
486
487
487 parentfiledata should be computed from memory (for mode,
488 parentfiledata should be computed from memory (for mode,
488 size), as or close as possible from the point where we
489 size), as or close as possible from the point where we
489 determined the file was clean, to limit the risk of the
490 determined the file was clean, to limit the risk of the
490 file having been changed by an external process between the
491 file having been changed by an external process between the
491 moment where the file was determined to be clean and now."""
492 moment where the file was determined to be clean and now."""
492 if parentfiledata:
493 if parentfiledata:
493 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
494 else:
495 else:
495 s = os.lstat(self._join(f))
496 s = os.lstat(self._join(f))
496 mode = s.st_mode
497 mode = s.st_mode
497 size = s.st_size
498 size = s.st_size
498 mtime = s[stat.ST_MTIME]
499 mtime = s[stat.ST_MTIME]
499 self._addpath(f, b'n', mode, size, mtime)
500 self._addpath(f, b'n', mode, size, mtime)
500 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
501 if f in self._map.nonnormalset:
502 if f in self._map.nonnormalset:
502 self._map.nonnormalset.remove(f)
503 self._map.nonnormalset.remove(f)
503 if mtime > self._lastnormaltime:
504 if mtime > self._lastnormaltime:
504 # Remember the most recent modification timeslot for status(),
505 # Remember the most recent modification timeslot for status(),
505 # to make sure we won't miss future size-preserving file content
506 # to make sure we won't miss future size-preserving file content
506 # modifications that happen within the same timeslot.
507 # modifications that happen within the same timeslot.
507 self._lastnormaltime = mtime
508 self._lastnormaltime = mtime
508
509
509 def normallookup(self, f):
510 def normallookup(self, f):
510 '''Mark a file normal, but possibly dirty.'''
511 '''Mark a file normal, but possibly dirty.'''
511 if self.in_merge:
512 if self.in_merge:
512 # if there is a merge going on and the file was either
513 # if there is a merge going on and the file was either
513 # "merged" or coming from other parent (-2) before
514 # "merged" or coming from other parent (-2) before
514 # being removed, restore that state.
515 # being removed, restore that state.
515 entry = self._map.get(f)
516 entry = self._map.get(f)
516 if entry is not None:
517 if entry is not None:
517 # XXX this should probably be dealt with a a lower level
518 # XXX this should probably be dealt with a a lower level
518 # (see `merged_removed` and `from_p2_removed`)
519 # (see `merged_removed` and `from_p2_removed`)
519 if entry.merged_removed or entry.from_p2_removed:
520 if entry.merged_removed or entry.from_p2_removed:
520 source = self._map.copymap.get(f)
521 source = self._map.copymap.get(f)
521 if entry.merged_removed:
522 if entry.merged_removed:
522 self.merge(f)
523 self.merge(f)
523 elif entry.from_p2_removed:
524 elif entry.from_p2_removed:
524 self.otherparent(f)
525 self.otherparent(f)
525 if source is not None:
526 if source is not None:
526 self.copy(source, f)
527 self.copy(source, f)
527 return
528 return
528 elif entry.merged or entry.from_p2:
529 elif entry.merged or entry.from_p2:
529 return
530 return
530 self._addpath(f, b'n', 0, possibly_dirty=True)
531 self._addpath(f, b'n', 0, possibly_dirty=True)
531 self._map.copymap.pop(f, None)
532 self._map.copymap.pop(f, None)
532
533
533 def otherparent(self, f):
534 def otherparent(self, f):
534 '''Mark as coming from the other parent, always dirty.'''
535 '''Mark as coming from the other parent, always dirty.'''
535 if not self.in_merge:
536 if not self.in_merge:
536 msg = _(b"setting %r to other parent only allowed in merges") % f
537 msg = _(b"setting %r to other parent only allowed in merges") % f
537 raise error.Abort(msg)
538 raise error.Abort(msg)
538 if f in self and self[f] == b'n':
539 if f in self and self[f] == b'n':
539 # merge-like
540 # merge-like
540 self._addpath(f, b'm', 0, from_p2=True)
541 self._addpath(f, b'm', 0, from_p2=True)
541 else:
542 else:
542 # add-like
543 # add-like
543 self._addpath(f, b'n', 0, from_p2=True)
544 self._addpath(f, b'n', 0, from_p2=True)
544 self._map.copymap.pop(f, None)
545 self._map.copymap.pop(f, None)
545
546
546 def add(self, f):
547 def add(self, f):
547 '''Mark a file added.'''
548 '''Mark a file added.'''
548 self._addpath(f, b'a', 0)
549 self._addpath(f, b'a', 0)
549 self._map.copymap.pop(f, None)
550 self._map.copymap.pop(f, None)
550
551
551 def remove(self, f):
552 def remove(self, f):
552 '''Mark a file removed.'''
553 '''Mark a file removed.'''
553 self._dirty = True
554 self._dirty = True
554 self._updatedfiles.add(f)
555 self._updatedfiles.add(f)
555 self._map.removefile(f, in_merge=self.in_merge)
556 self._map.removefile(f, in_merge=self.in_merge)
556
557
557 def merge(self, f):
558 def merge(self, f):
558 '''Mark a file merged.'''
559 '''Mark a file merged.'''
559 if not self.in_merge:
560 if not self.in_merge:
560 return self.normallookup(f)
561 return self.normallookup(f)
561 return self.otherparent(f)
562 return self.otherparent(f)
562
563
563 def drop(self, f):
564 def drop(self, f):
564 '''Drop a file from the dirstate'''
565 '''Drop a file from the dirstate'''
565 oldstate = self[f]
566 oldstate = self[f]
566 if self._map.dropfile(f, oldstate):
567 if self._map.dropfile(f, oldstate):
567 self._dirty = True
568 self._dirty = True
568 self._updatedfiles.add(f)
569 self._updatedfiles.add(f)
569 self._map.copymap.pop(f, None)
570 self._map.copymap.pop(f, None)
570
571
571 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
572 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
572 if exists is None:
573 if exists is None:
573 exists = os.path.lexists(os.path.join(self._root, path))
574 exists = os.path.lexists(os.path.join(self._root, path))
574 if not exists:
575 if not exists:
575 # Maybe a path component exists
576 # Maybe a path component exists
576 if not ignoremissing and b'/' in path:
577 if not ignoremissing and b'/' in path:
577 d, f = path.rsplit(b'/', 1)
578 d, f = path.rsplit(b'/', 1)
578 d = self._normalize(d, False, ignoremissing, None)
579 d = self._normalize(d, False, ignoremissing, None)
579 folded = d + b"/" + f
580 folded = d + b"/" + f
580 else:
581 else:
581 # No path components, preserve original case
582 # No path components, preserve original case
582 folded = path
583 folded = path
583 else:
584 else:
584 # recursively normalize leading directory components
585 # recursively normalize leading directory components
585 # against dirstate
586 # against dirstate
586 if b'/' in normed:
587 if b'/' in normed:
587 d, f = normed.rsplit(b'/', 1)
588 d, f = normed.rsplit(b'/', 1)
588 d = self._normalize(d, False, ignoremissing, True)
589 d = self._normalize(d, False, ignoremissing, True)
589 r = self._root + b"/" + d
590 r = self._root + b"/" + d
590 folded = d + b"/" + util.fspath(f, r)
591 folded = d + b"/" + util.fspath(f, r)
591 else:
592 else:
592 folded = util.fspath(normed, self._root)
593 folded = util.fspath(normed, self._root)
593 storemap[normed] = folded
594 storemap[normed] = folded
594
595
595 return folded
596 return folded
596
597
597 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
598 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
598 normed = util.normcase(path)
599 normed = util.normcase(path)
599 folded = self._map.filefoldmap.get(normed, None)
600 folded = self._map.filefoldmap.get(normed, None)
600 if folded is None:
601 if folded is None:
601 if isknown:
602 if isknown:
602 folded = path
603 folded = path
603 else:
604 else:
604 folded = self._discoverpath(
605 folded = self._discoverpath(
605 path, normed, ignoremissing, exists, self._map.filefoldmap
606 path, normed, ignoremissing, exists, self._map.filefoldmap
606 )
607 )
607 return folded
608 return folded
608
609
609 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
610 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
610 normed = util.normcase(path)
611 normed = util.normcase(path)
611 folded = self._map.filefoldmap.get(normed, None)
612 folded = self._map.filefoldmap.get(normed, None)
612 if folded is None:
613 if folded is None:
613 folded = self._map.dirfoldmap.get(normed, None)
614 folded = self._map.dirfoldmap.get(normed, None)
614 if folded is None:
615 if folded is None:
615 if isknown:
616 if isknown:
616 folded = path
617 folded = path
617 else:
618 else:
618 # store discovered result in dirfoldmap so that future
619 # store discovered result in dirfoldmap so that future
619 # normalizefile calls don't start matching directories
620 # normalizefile calls don't start matching directories
620 folded = self._discoverpath(
621 folded = self._discoverpath(
621 path, normed, ignoremissing, exists, self._map.dirfoldmap
622 path, normed, ignoremissing, exists, self._map.dirfoldmap
622 )
623 )
623 return folded
624 return folded
624
625
625 def normalize(self, path, isknown=False, ignoremissing=False):
626 def normalize(self, path, isknown=False, ignoremissing=False):
626 """
627 """
627 normalize the case of a pathname when on a casefolding filesystem
628 normalize the case of a pathname when on a casefolding filesystem
628
629
629 isknown specifies whether the filename came from walking the
630 isknown specifies whether the filename came from walking the
630 disk, to avoid extra filesystem access.
631 disk, to avoid extra filesystem access.
631
632
632 If ignoremissing is True, missing path are returned
633 If ignoremissing is True, missing path are returned
633 unchanged. Otherwise, we try harder to normalize possibly
634 unchanged. Otherwise, we try harder to normalize possibly
634 existing path components.
635 existing path components.
635
636
636 The normalized case is determined based on the following precedence:
637 The normalized case is determined based on the following precedence:
637
638
638 - version of name already stored in the dirstate
639 - version of name already stored in the dirstate
639 - version of name stored on disk
640 - version of name stored on disk
640 - version provided via command arguments
641 - version provided via command arguments
641 """
642 """
642
643
643 if self._checkcase:
644 if self._checkcase:
644 return self._normalize(path, isknown, ignoremissing)
645 return self._normalize(path, isknown, ignoremissing)
645 return path
646 return path
646
647
647 def clear(self):
648 def clear(self):
648 self._map.clear()
649 self._map.clear()
649 self._lastnormaltime = 0
650 self._lastnormaltime = 0
650 self._updatedfiles.clear()
651 self._updatedfiles.clear()
651 self._dirty = True
652 self._dirty = True
652
653
653 def rebuild(self, parent, allfiles, changedfiles=None):
654 def rebuild(self, parent, allfiles, changedfiles=None):
654 if changedfiles is None:
655 if changedfiles is None:
655 # Rebuild entire dirstate
656 # Rebuild entire dirstate
656 to_lookup = allfiles
657 to_lookup = allfiles
657 to_drop = []
658 to_drop = []
658 lastnormaltime = self._lastnormaltime
659 lastnormaltime = self._lastnormaltime
659 self.clear()
660 self.clear()
660 self._lastnormaltime = lastnormaltime
661 self._lastnormaltime = lastnormaltime
661 elif len(changedfiles) < 10:
662 elif len(changedfiles) < 10:
662 # Avoid turning allfiles into a set, which can be expensive if it's
663 # Avoid turning allfiles into a set, which can be expensive if it's
663 # large.
664 # large.
664 to_lookup = []
665 to_lookup = []
665 to_drop = []
666 to_drop = []
666 for f in changedfiles:
667 for f in changedfiles:
667 if f in allfiles:
668 if f in allfiles:
668 to_lookup.append(f)
669 to_lookup.append(f)
669 else:
670 else:
670 to_drop.append(f)
671 to_drop.append(f)
671 else:
672 else:
672 changedfilesset = set(changedfiles)
673 changedfilesset = set(changedfiles)
673 to_lookup = changedfilesset & set(allfiles)
674 to_lookup = changedfilesset & set(allfiles)
674 to_drop = changedfilesset - to_lookup
675 to_drop = changedfilesset - to_lookup
675
676
676 if self._origpl is None:
677 if self._origpl is None:
677 self._origpl = self._pl
678 self._origpl = self._pl
678 self._map.setparents(parent, self._nodeconstants.nullid)
679 self._map.setparents(parent, self._nodeconstants.nullid)
679
680
680 for f in to_lookup:
681 for f in to_lookup:
681 self.normallookup(f)
682 self.normallookup(f)
682 for f in to_drop:
683 for f in to_drop:
683 self.drop(f)
684 self.drop(f)
684
685
685 self._dirty = True
686 self._dirty = True
686
687
687 def identity(self):
688 def identity(self):
688 """Return identity of dirstate itself to detect changing in storage
689 """Return identity of dirstate itself to detect changing in storage
689
690
690 If identity of previous dirstate is equal to this, writing
691 If identity of previous dirstate is equal to this, writing
691 changes based on the former dirstate out can keep consistency.
692 changes based on the former dirstate out can keep consistency.
692 """
693 """
693 return self._map.identity
694 return self._map.identity
694
695
695 def write(self, tr):
696 def write(self, tr):
696 if not self._dirty:
697 if not self._dirty:
697 return
698 return
698
699
699 filename = self._filename
700 filename = self._filename
700 if tr:
701 if tr:
701 # 'dirstate.write()' is not only for writing in-memory
702 # 'dirstate.write()' is not only for writing in-memory
702 # changes out, but also for dropping ambiguous timestamp.
703 # changes out, but also for dropping ambiguous timestamp.
703 # delayed writing re-raise "ambiguous timestamp issue".
704 # delayed writing re-raise "ambiguous timestamp issue".
704 # See also the wiki page below for detail:
705 # See also the wiki page below for detail:
705 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
706 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
706
707
707 # emulate dropping timestamp in 'parsers.pack_dirstate'
708 # emulate dropping timestamp in 'parsers.pack_dirstate'
708 now = _getfsnow(self._opener)
709 now = _getfsnow(self._opener)
709 self._map.clearambiguoustimes(self._updatedfiles, now)
710 self._map.clearambiguoustimes(self._updatedfiles, now)
710
711
711 # emulate that all 'dirstate.normal' results are written out
712 # emulate that all 'dirstate.normal' results are written out
712 self._lastnormaltime = 0
713 self._lastnormaltime = 0
713 self._updatedfiles.clear()
714 self._updatedfiles.clear()
714
715
715 # delay writing in-memory changes out
716 # delay writing in-memory changes out
716 tr.addfilegenerator(
717 tr.addfilegenerator(
717 b'dirstate',
718 b'dirstate',
718 (self._filename,),
719 (self._filename,),
719 self._writedirstate,
720 self._writedirstate,
720 location=b'plain',
721 location=b'plain',
721 )
722 )
722 return
723 return
723
724
724 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
725 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
725 self._writedirstate(st)
726 self._writedirstate(st)
726
727
727 def addparentchangecallback(self, category, callback):
728 def addparentchangecallback(self, category, callback):
728 """add a callback to be called when the wd parents are changed
729 """add a callback to be called when the wd parents are changed
729
730
730 Callback will be called with the following arguments:
731 Callback will be called with the following arguments:
731 dirstate, (oldp1, oldp2), (newp1, newp2)
732 dirstate, (oldp1, oldp2), (newp1, newp2)
732
733
733 Category is a unique identifier to allow overwriting an old callback
734 Category is a unique identifier to allow overwriting an old callback
734 with a newer callback.
735 with a newer callback.
735 """
736 """
736 self._plchangecallbacks[category] = callback
737 self._plchangecallbacks[category] = callback
737
738
738 def _writedirstate(self, st):
739 def _writedirstate(self, st):
739 # notify callbacks about parents change
740 # notify callbacks about parents change
740 if self._origpl is not None and self._origpl != self._pl:
741 if self._origpl is not None and self._origpl != self._pl:
741 for c, callback in sorted(
742 for c, callback in sorted(
742 pycompat.iteritems(self._plchangecallbacks)
743 pycompat.iteritems(self._plchangecallbacks)
743 ):
744 ):
744 callback(self, self._origpl, self._pl)
745 callback(self, self._origpl, self._pl)
745 self._origpl = None
746 self._origpl = None
746 # use the modification time of the newly created temporary file as the
747 # use the modification time of the newly created temporary file as the
747 # filesystem's notion of 'now'
748 # filesystem's notion of 'now'
748 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
749 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
749
750
750 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
751 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
751 # timestamp of each entries in dirstate, because of 'now > mtime'
752 # timestamp of each entries in dirstate, because of 'now > mtime'
752 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
753 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
753 if delaywrite > 0:
754 if delaywrite > 0:
754 # do we have any files to delay for?
755 # do we have any files to delay for?
755 for f, e in pycompat.iteritems(self._map):
756 for f, e in pycompat.iteritems(self._map):
756 if e.state == b'n' and e[3] == now:
757 if e.state == b'n' and e[3] == now:
757 import time # to avoid useless import
758 import time # to avoid useless import
758
759
759 # rather than sleep n seconds, sleep until the next
760 # rather than sleep n seconds, sleep until the next
760 # multiple of n seconds
761 # multiple of n seconds
761 clock = time.time()
762 clock = time.time()
762 start = int(clock) - (int(clock) % delaywrite)
763 start = int(clock) - (int(clock) % delaywrite)
763 end = start + delaywrite
764 end = start + delaywrite
764 time.sleep(end - clock)
765 time.sleep(end - clock)
765 now = end # trust our estimate that the end is near now
766 now = end # trust our estimate that the end is near now
766 break
767 break
767
768
768 self._map.write(st, now)
769 self._map.write(st, now)
769 self._lastnormaltime = 0
770 self._lastnormaltime = 0
770 self._dirty = False
771 self._dirty = False
771
772
772 def _dirignore(self, f):
773 def _dirignore(self, f):
773 if self._ignore(f):
774 if self._ignore(f):
774 return True
775 return True
775 for p in pathutil.finddirs(f):
776 for p in pathutil.finddirs(f):
776 if self._ignore(p):
777 if self._ignore(p):
777 return True
778 return True
778 return False
779 return False
779
780
780 def _ignorefiles(self):
781 def _ignorefiles(self):
781 files = []
782 files = []
782 if os.path.exists(self._join(b'.hgignore')):
783 if os.path.exists(self._join(b'.hgignore')):
783 files.append(self._join(b'.hgignore'))
784 files.append(self._join(b'.hgignore'))
784 for name, path in self._ui.configitems(b"ui"):
785 for name, path in self._ui.configitems(b"ui"):
785 if name == b'ignore' or name.startswith(b'ignore.'):
786 if name == b'ignore' or name.startswith(b'ignore.'):
786 # we need to use os.path.join here rather than self._join
787 # we need to use os.path.join here rather than self._join
787 # because path is arbitrary and user-specified
788 # because path is arbitrary and user-specified
788 files.append(os.path.join(self._rootdir, util.expandpath(path)))
789 files.append(os.path.join(self._rootdir, util.expandpath(path)))
789 return files
790 return files
790
791
791 def _ignorefileandline(self, f):
792 def _ignorefileandline(self, f):
792 files = collections.deque(self._ignorefiles())
793 files = collections.deque(self._ignorefiles())
793 visited = set()
794 visited = set()
794 while files:
795 while files:
795 i = files.popleft()
796 i = files.popleft()
796 patterns = matchmod.readpatternfile(
797 patterns = matchmod.readpatternfile(
797 i, self._ui.warn, sourceinfo=True
798 i, self._ui.warn, sourceinfo=True
798 )
799 )
799 for pattern, lineno, line in patterns:
800 for pattern, lineno, line in patterns:
800 kind, p = matchmod._patsplit(pattern, b'glob')
801 kind, p = matchmod._patsplit(pattern, b'glob')
801 if kind == b"subinclude":
802 if kind == b"subinclude":
802 if p not in visited:
803 if p not in visited:
803 files.append(p)
804 files.append(p)
804 continue
805 continue
805 m = matchmod.match(
806 m = matchmod.match(
806 self._root, b'', [], [pattern], warn=self._ui.warn
807 self._root, b'', [], [pattern], warn=self._ui.warn
807 )
808 )
808 if m(f):
809 if m(f):
809 return (i, lineno, line)
810 return (i, lineno, line)
810 visited.add(i)
811 visited.add(i)
811 return (None, -1, b"")
812 return (None, -1, b"")
812
813
813 def _walkexplicit(self, match, subrepos):
814 def _walkexplicit(self, match, subrepos):
814 """Get stat data about the files explicitly specified by match.
815 """Get stat data about the files explicitly specified by match.
815
816
816 Return a triple (results, dirsfound, dirsnotfound).
817 Return a triple (results, dirsfound, dirsnotfound).
817 - results is a mapping from filename to stat result. It also contains
818 - results is a mapping from filename to stat result. It also contains
818 listings mapping subrepos and .hg to None.
819 listings mapping subrepos and .hg to None.
819 - dirsfound is a list of files found to be directories.
820 - dirsfound is a list of files found to be directories.
820 - dirsnotfound is a list of files that the dirstate thinks are
821 - dirsnotfound is a list of files that the dirstate thinks are
821 directories and that were not found."""
822 directories and that were not found."""
822
823
823 def badtype(mode):
824 def badtype(mode):
824 kind = _(b'unknown')
825 kind = _(b'unknown')
825 if stat.S_ISCHR(mode):
826 if stat.S_ISCHR(mode):
826 kind = _(b'character device')
827 kind = _(b'character device')
827 elif stat.S_ISBLK(mode):
828 elif stat.S_ISBLK(mode):
828 kind = _(b'block device')
829 kind = _(b'block device')
829 elif stat.S_ISFIFO(mode):
830 elif stat.S_ISFIFO(mode):
830 kind = _(b'fifo')
831 kind = _(b'fifo')
831 elif stat.S_ISSOCK(mode):
832 elif stat.S_ISSOCK(mode):
832 kind = _(b'socket')
833 kind = _(b'socket')
833 elif stat.S_ISDIR(mode):
834 elif stat.S_ISDIR(mode):
834 kind = _(b'directory')
835 kind = _(b'directory')
835 return _(b'unsupported file type (type is %s)') % kind
836 return _(b'unsupported file type (type is %s)') % kind
836
837
837 badfn = match.bad
838 badfn = match.bad
838 dmap = self._map
839 dmap = self._map
839 lstat = os.lstat
840 lstat = os.lstat
840 getkind = stat.S_IFMT
841 getkind = stat.S_IFMT
841 dirkind = stat.S_IFDIR
842 dirkind = stat.S_IFDIR
842 regkind = stat.S_IFREG
843 regkind = stat.S_IFREG
843 lnkkind = stat.S_IFLNK
844 lnkkind = stat.S_IFLNK
844 join = self._join
845 join = self._join
845 dirsfound = []
846 dirsfound = []
846 foundadd = dirsfound.append
847 foundadd = dirsfound.append
847 dirsnotfound = []
848 dirsnotfound = []
848 notfoundadd = dirsnotfound.append
849 notfoundadd = dirsnotfound.append
849
850
850 if not match.isexact() and self._checkcase:
851 if not match.isexact() and self._checkcase:
851 normalize = self._normalize
852 normalize = self._normalize
852 else:
853 else:
853 normalize = None
854 normalize = None
854
855
855 files = sorted(match.files())
856 files = sorted(match.files())
856 subrepos.sort()
857 subrepos.sort()
857 i, j = 0, 0
858 i, j = 0, 0
858 while i < len(files) and j < len(subrepos):
859 while i < len(files) and j < len(subrepos):
859 subpath = subrepos[j] + b"/"
860 subpath = subrepos[j] + b"/"
860 if files[i] < subpath:
861 if files[i] < subpath:
861 i += 1
862 i += 1
862 continue
863 continue
863 while i < len(files) and files[i].startswith(subpath):
864 while i < len(files) and files[i].startswith(subpath):
864 del files[i]
865 del files[i]
865 j += 1
866 j += 1
866
867
867 if not files or b'' in files:
868 if not files or b'' in files:
868 files = [b'']
869 files = [b'']
869 # constructing the foldmap is expensive, so don't do it for the
870 # constructing the foldmap is expensive, so don't do it for the
870 # common case where files is ['']
871 # common case where files is ['']
871 normalize = None
872 normalize = None
872 results = dict.fromkeys(subrepos)
873 results = dict.fromkeys(subrepos)
873 results[b'.hg'] = None
874 results[b'.hg'] = None
874
875
875 for ff in files:
876 for ff in files:
876 if normalize:
877 if normalize:
877 nf = normalize(ff, False, True)
878 nf = normalize(ff, False, True)
878 else:
879 else:
879 nf = ff
880 nf = ff
880 if nf in results:
881 if nf in results:
881 continue
882 continue
882
883
883 try:
884 try:
884 st = lstat(join(nf))
885 st = lstat(join(nf))
885 kind = getkind(st.st_mode)
886 kind = getkind(st.st_mode)
886 if kind == dirkind:
887 if kind == dirkind:
887 if nf in dmap:
888 if nf in dmap:
888 # file replaced by dir on disk but still in dirstate
889 # file replaced by dir on disk but still in dirstate
889 results[nf] = None
890 results[nf] = None
890 foundadd((nf, ff))
891 foundadd((nf, ff))
891 elif kind == regkind or kind == lnkkind:
892 elif kind == regkind or kind == lnkkind:
892 results[nf] = st
893 results[nf] = st
893 else:
894 else:
894 badfn(ff, badtype(kind))
895 badfn(ff, badtype(kind))
895 if nf in dmap:
896 if nf in dmap:
896 results[nf] = None
897 results[nf] = None
897 except OSError as inst: # nf not found on disk - it is dirstate only
898 except OSError as inst: # nf not found on disk - it is dirstate only
898 if nf in dmap: # does it exactly match a missing file?
899 if nf in dmap: # does it exactly match a missing file?
899 results[nf] = None
900 results[nf] = None
900 else: # does it match a missing directory?
901 else: # does it match a missing directory?
901 if self._map.hasdir(nf):
902 if self._map.hasdir(nf):
902 notfoundadd(nf)
903 notfoundadd(nf)
903 else:
904 else:
904 badfn(ff, encoding.strtolocal(inst.strerror))
905 badfn(ff, encoding.strtolocal(inst.strerror))
905
906
906 # match.files() may contain explicitly-specified paths that shouldn't
907 # match.files() may contain explicitly-specified paths that shouldn't
907 # be taken; drop them from the list of files found. dirsfound/notfound
908 # be taken; drop them from the list of files found. dirsfound/notfound
908 # aren't filtered here because they will be tested later.
909 # aren't filtered here because they will be tested later.
909 if match.anypats():
910 if match.anypats():
910 for f in list(results):
911 for f in list(results):
911 if f == b'.hg' or f in subrepos:
912 if f == b'.hg' or f in subrepos:
912 # keep sentinel to disable further out-of-repo walks
913 # keep sentinel to disable further out-of-repo walks
913 continue
914 continue
914 if not match(f):
915 if not match(f):
915 del results[f]
916 del results[f]
916
917
917 # Case insensitive filesystems cannot rely on lstat() failing to detect
918 # Case insensitive filesystems cannot rely on lstat() failing to detect
918 # a case-only rename. Prune the stat object for any file that does not
919 # a case-only rename. Prune the stat object for any file that does not
919 # match the case in the filesystem, if there are multiple files that
920 # match the case in the filesystem, if there are multiple files that
920 # normalize to the same path.
921 # normalize to the same path.
921 if match.isexact() and self._checkcase:
922 if match.isexact() and self._checkcase:
922 normed = {}
923 normed = {}
923
924
924 for f, st in pycompat.iteritems(results):
925 for f, st in pycompat.iteritems(results):
925 if st is None:
926 if st is None:
926 continue
927 continue
927
928
928 nc = util.normcase(f)
929 nc = util.normcase(f)
929 paths = normed.get(nc)
930 paths = normed.get(nc)
930
931
931 if paths is None:
932 if paths is None:
932 paths = set()
933 paths = set()
933 normed[nc] = paths
934 normed[nc] = paths
934
935
935 paths.add(f)
936 paths.add(f)
936
937
937 for norm, paths in pycompat.iteritems(normed):
938 for norm, paths in pycompat.iteritems(normed):
938 if len(paths) > 1:
939 if len(paths) > 1:
939 for path in paths:
940 for path in paths:
940 folded = self._discoverpath(
941 folded = self._discoverpath(
941 path, norm, True, None, self._map.dirfoldmap
942 path, norm, True, None, self._map.dirfoldmap
942 )
943 )
943 if path != folded:
944 if path != folded:
944 results[path] = None
945 results[path] = None
945
946
946 return results, dirsfound, dirsnotfound
947 return results, dirsfound, dirsnotfound
947
948
948 def walk(self, match, subrepos, unknown, ignored, full=True):
949 def walk(self, match, subrepos, unknown, ignored, full=True):
949 """
950 """
950 Walk recursively through the directory tree, finding all files
951 Walk recursively through the directory tree, finding all files
951 matched by match.
952 matched by match.
952
953
953 If full is False, maybe skip some known-clean files.
954 If full is False, maybe skip some known-clean files.
954
955
955 Return a dict mapping filename to stat-like object (either
956 Return a dict mapping filename to stat-like object (either
956 mercurial.osutil.stat instance or return value of os.stat()).
957 mercurial.osutil.stat instance or return value of os.stat()).
957
958
958 """
959 """
959 # full is a flag that extensions that hook into walk can use -- this
960 # full is a flag that extensions that hook into walk can use -- this
960 # implementation doesn't use it at all. This satisfies the contract
961 # implementation doesn't use it at all. This satisfies the contract
961 # because we only guarantee a "maybe".
962 # because we only guarantee a "maybe".
962
963
963 if ignored:
964 if ignored:
964 ignore = util.never
965 ignore = util.never
965 dirignore = util.never
966 dirignore = util.never
966 elif unknown:
967 elif unknown:
967 ignore = self._ignore
968 ignore = self._ignore
968 dirignore = self._dirignore
969 dirignore = self._dirignore
969 else:
970 else:
970 # if not unknown and not ignored, drop dir recursion and step 2
971 # if not unknown and not ignored, drop dir recursion and step 2
971 ignore = util.always
972 ignore = util.always
972 dirignore = util.always
973 dirignore = util.always
973
974
974 matchfn = match.matchfn
975 matchfn = match.matchfn
975 matchalways = match.always()
976 matchalways = match.always()
976 matchtdir = match.traversedir
977 matchtdir = match.traversedir
977 dmap = self._map
978 dmap = self._map
978 listdir = util.listdir
979 listdir = util.listdir
979 lstat = os.lstat
980 lstat = os.lstat
980 dirkind = stat.S_IFDIR
981 dirkind = stat.S_IFDIR
981 regkind = stat.S_IFREG
982 regkind = stat.S_IFREG
982 lnkkind = stat.S_IFLNK
983 lnkkind = stat.S_IFLNK
983 join = self._join
984 join = self._join
984
985
985 exact = skipstep3 = False
986 exact = skipstep3 = False
986 if match.isexact(): # match.exact
987 if match.isexact(): # match.exact
987 exact = True
988 exact = True
988 dirignore = util.always # skip step 2
989 dirignore = util.always # skip step 2
989 elif match.prefix(): # match.match, no patterns
990 elif match.prefix(): # match.match, no patterns
990 skipstep3 = True
991 skipstep3 = True
991
992
992 if not exact and self._checkcase:
993 if not exact and self._checkcase:
993 normalize = self._normalize
994 normalize = self._normalize
994 normalizefile = self._normalizefile
995 normalizefile = self._normalizefile
995 skipstep3 = False
996 skipstep3 = False
996 else:
997 else:
997 normalize = self._normalize
998 normalize = self._normalize
998 normalizefile = None
999 normalizefile = None
999
1000
1000 # step 1: find all explicit files
1001 # step 1: find all explicit files
1001 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1002 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1002 if matchtdir:
1003 if matchtdir:
1003 for d in work:
1004 for d in work:
1004 matchtdir(d[0])
1005 matchtdir(d[0])
1005 for d in dirsnotfound:
1006 for d in dirsnotfound:
1006 matchtdir(d)
1007 matchtdir(d)
1007
1008
1008 skipstep3 = skipstep3 and not (work or dirsnotfound)
1009 skipstep3 = skipstep3 and not (work or dirsnotfound)
1009 work = [d for d in work if not dirignore(d[0])]
1010 work = [d for d in work if not dirignore(d[0])]
1010
1011
1011 # step 2: visit subdirectories
1012 # step 2: visit subdirectories
1012 def traverse(work, alreadynormed):
1013 def traverse(work, alreadynormed):
1013 wadd = work.append
1014 wadd = work.append
1014 while work:
1015 while work:
1015 tracing.counter('dirstate.walk work', len(work))
1016 tracing.counter('dirstate.walk work', len(work))
1016 nd = work.pop()
1017 nd = work.pop()
1017 visitentries = match.visitchildrenset(nd)
1018 visitentries = match.visitchildrenset(nd)
1018 if not visitentries:
1019 if not visitentries:
1019 continue
1020 continue
1020 if visitentries == b'this' or visitentries == b'all':
1021 if visitentries == b'this' or visitentries == b'all':
1021 visitentries = None
1022 visitentries = None
1022 skip = None
1023 skip = None
1023 if nd != b'':
1024 if nd != b'':
1024 skip = b'.hg'
1025 skip = b'.hg'
1025 try:
1026 try:
1026 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1027 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1027 entries = listdir(join(nd), stat=True, skip=skip)
1028 entries = listdir(join(nd), stat=True, skip=skip)
1028 except OSError as inst:
1029 except OSError as inst:
1029 if inst.errno in (errno.EACCES, errno.ENOENT):
1030 if inst.errno in (errno.EACCES, errno.ENOENT):
1030 match.bad(
1031 match.bad(
1031 self.pathto(nd), encoding.strtolocal(inst.strerror)
1032 self.pathto(nd), encoding.strtolocal(inst.strerror)
1032 )
1033 )
1033 continue
1034 continue
1034 raise
1035 raise
1035 for f, kind, st in entries:
1036 for f, kind, st in entries:
1036 # Some matchers may return files in the visitentries set,
1037 # Some matchers may return files in the visitentries set,
1037 # instead of 'this', if the matcher explicitly mentions them
1038 # instead of 'this', if the matcher explicitly mentions them
1038 # and is not an exactmatcher. This is acceptable; we do not
1039 # and is not an exactmatcher. This is acceptable; we do not
1039 # make any hard assumptions about file-or-directory below
1040 # make any hard assumptions about file-or-directory below
1040 # based on the presence of `f` in visitentries. If
1041 # based on the presence of `f` in visitentries. If
1041 # visitchildrenset returned a set, we can always skip the
1042 # visitchildrenset returned a set, we can always skip the
1042 # entries *not* in the set it provided regardless of whether
1043 # entries *not* in the set it provided regardless of whether
1043 # they're actually a file or a directory.
1044 # they're actually a file or a directory.
1044 if visitentries and f not in visitentries:
1045 if visitentries and f not in visitentries:
1045 continue
1046 continue
1046 if normalizefile:
1047 if normalizefile:
1047 # even though f might be a directory, we're only
1048 # even though f might be a directory, we're only
1048 # interested in comparing it to files currently in the
1049 # interested in comparing it to files currently in the
1049 # dmap -- therefore normalizefile is enough
1050 # dmap -- therefore normalizefile is enough
1050 nf = normalizefile(
1051 nf = normalizefile(
1051 nd and (nd + b"/" + f) or f, True, True
1052 nd and (nd + b"/" + f) or f, True, True
1052 )
1053 )
1053 else:
1054 else:
1054 nf = nd and (nd + b"/" + f) or f
1055 nf = nd and (nd + b"/" + f) or f
1055 if nf not in results:
1056 if nf not in results:
1056 if kind == dirkind:
1057 if kind == dirkind:
1057 if not ignore(nf):
1058 if not ignore(nf):
1058 if matchtdir:
1059 if matchtdir:
1059 matchtdir(nf)
1060 matchtdir(nf)
1060 wadd(nf)
1061 wadd(nf)
1061 if nf in dmap and (matchalways or matchfn(nf)):
1062 if nf in dmap and (matchalways or matchfn(nf)):
1062 results[nf] = None
1063 results[nf] = None
1063 elif kind == regkind or kind == lnkkind:
1064 elif kind == regkind or kind == lnkkind:
1064 if nf in dmap:
1065 if nf in dmap:
1065 if matchalways or matchfn(nf):
1066 if matchalways or matchfn(nf):
1066 results[nf] = st
1067 results[nf] = st
1067 elif (matchalways or matchfn(nf)) and not ignore(
1068 elif (matchalways or matchfn(nf)) and not ignore(
1068 nf
1069 nf
1069 ):
1070 ):
1070 # unknown file -- normalize if necessary
1071 # unknown file -- normalize if necessary
1071 if not alreadynormed:
1072 if not alreadynormed:
1072 nf = normalize(nf, False, True)
1073 nf = normalize(nf, False, True)
1073 results[nf] = st
1074 results[nf] = st
1074 elif nf in dmap and (matchalways or matchfn(nf)):
1075 elif nf in dmap and (matchalways or matchfn(nf)):
1075 results[nf] = None
1076 results[nf] = None
1076
1077
1077 for nd, d in work:
1078 for nd, d in work:
1078 # alreadynormed means that processwork doesn't have to do any
1079 # alreadynormed means that processwork doesn't have to do any
1079 # expensive directory normalization
1080 # expensive directory normalization
1080 alreadynormed = not normalize or nd == d
1081 alreadynormed = not normalize or nd == d
1081 traverse([d], alreadynormed)
1082 traverse([d], alreadynormed)
1082
1083
1083 for s in subrepos:
1084 for s in subrepos:
1084 del results[s]
1085 del results[s]
1085 del results[b'.hg']
1086 del results[b'.hg']
1086
1087
1087 # step 3: visit remaining files from dmap
1088 # step 3: visit remaining files from dmap
1088 if not skipstep3 and not exact:
1089 if not skipstep3 and not exact:
1089 # If a dmap file is not in results yet, it was either
1090 # If a dmap file is not in results yet, it was either
1090 # a) not matching matchfn b) ignored, c) missing, or d) under a
1091 # a) not matching matchfn b) ignored, c) missing, or d) under a
1091 # symlink directory.
1092 # symlink directory.
1092 if not results and matchalways:
1093 if not results and matchalways:
1093 visit = [f for f in dmap]
1094 visit = [f for f in dmap]
1094 else:
1095 else:
1095 visit = [f for f in dmap if f not in results and matchfn(f)]
1096 visit = [f for f in dmap if f not in results and matchfn(f)]
1096 visit.sort()
1097 visit.sort()
1097
1098
1098 if unknown:
1099 if unknown:
1099 # unknown == True means we walked all dirs under the roots
1100 # unknown == True means we walked all dirs under the roots
1100 # that wasn't ignored, and everything that matched was stat'ed
1101 # that wasn't ignored, and everything that matched was stat'ed
1101 # and is already in results.
1102 # and is already in results.
1102 # The rest must thus be ignored or under a symlink.
1103 # The rest must thus be ignored or under a symlink.
1103 audit_path = pathutil.pathauditor(self._root, cached=True)
1104 audit_path = pathutil.pathauditor(self._root, cached=True)
1104
1105
1105 for nf in iter(visit):
1106 for nf in iter(visit):
1106 # If a stat for the same file was already added with a
1107 # If a stat for the same file was already added with a
1107 # different case, don't add one for this, since that would
1108 # different case, don't add one for this, since that would
1108 # make it appear as if the file exists under both names
1109 # make it appear as if the file exists under both names
1109 # on disk.
1110 # on disk.
1110 if (
1111 if (
1111 normalizefile
1112 normalizefile
1112 and normalizefile(nf, True, True) in results
1113 and normalizefile(nf, True, True) in results
1113 ):
1114 ):
1114 results[nf] = None
1115 results[nf] = None
1115 # Report ignored items in the dmap as long as they are not
1116 # Report ignored items in the dmap as long as they are not
1116 # under a symlink directory.
1117 # under a symlink directory.
1117 elif audit_path.check(nf):
1118 elif audit_path.check(nf):
1118 try:
1119 try:
1119 results[nf] = lstat(join(nf))
1120 results[nf] = lstat(join(nf))
1120 # file was just ignored, no links, and exists
1121 # file was just ignored, no links, and exists
1121 except OSError:
1122 except OSError:
1122 # file doesn't exist
1123 # file doesn't exist
1123 results[nf] = None
1124 results[nf] = None
1124 else:
1125 else:
1125 # It's either missing or under a symlink directory
1126 # It's either missing or under a symlink directory
1126 # which we in this case report as missing
1127 # which we in this case report as missing
1127 results[nf] = None
1128 results[nf] = None
1128 else:
1129 else:
1129 # We may not have walked the full directory tree above,
1130 # We may not have walked the full directory tree above,
1130 # so stat and check everything we missed.
1131 # so stat and check everything we missed.
1131 iv = iter(visit)
1132 iv = iter(visit)
1132 for st in util.statfiles([join(i) for i in visit]):
1133 for st in util.statfiles([join(i) for i in visit]):
1133 results[next(iv)] = st
1134 results[next(iv)] = st
1134 return results
1135 return results
1135
1136
1136 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1137 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1137 # Force Rayon (Rust parallelism library) to respect the number of
1138 # Force Rayon (Rust parallelism library) to respect the number of
1138 # workers. This is a temporary workaround until Rust code knows
1139 # workers. This is a temporary workaround until Rust code knows
1139 # how to read the config file.
1140 # how to read the config file.
1140 numcpus = self._ui.configint(b"worker", b"numcpus")
1141 numcpus = self._ui.configint(b"worker", b"numcpus")
1141 if numcpus is not None:
1142 if numcpus is not None:
1142 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1143 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1143
1144
1144 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1145 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1145 if not workers_enabled:
1146 if not workers_enabled:
1146 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1147 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1147
1148
1148 (
1149 (
1149 lookup,
1150 lookup,
1150 modified,
1151 modified,
1151 added,
1152 added,
1152 removed,
1153 removed,
1153 deleted,
1154 deleted,
1154 clean,
1155 clean,
1155 ignored,
1156 ignored,
1156 unknown,
1157 unknown,
1157 warnings,
1158 warnings,
1158 bad,
1159 bad,
1159 traversed,
1160 traversed,
1160 dirty,
1161 dirty,
1161 ) = rustmod.status(
1162 ) = rustmod.status(
1162 self._map._rustmap,
1163 self._map._rustmap,
1163 matcher,
1164 matcher,
1164 self._rootdir,
1165 self._rootdir,
1165 self._ignorefiles(),
1166 self._ignorefiles(),
1166 self._checkexec,
1167 self._checkexec,
1167 self._lastnormaltime,
1168 self._lastnormaltime,
1168 bool(list_clean),
1169 bool(list_clean),
1169 bool(list_ignored),
1170 bool(list_ignored),
1170 bool(list_unknown),
1171 bool(list_unknown),
1171 bool(matcher.traversedir),
1172 bool(matcher.traversedir),
1172 )
1173 )
1173
1174
1174 self._dirty |= dirty
1175 self._dirty |= dirty
1175
1176
1176 if matcher.traversedir:
1177 if matcher.traversedir:
1177 for dir in traversed:
1178 for dir in traversed:
1178 matcher.traversedir(dir)
1179 matcher.traversedir(dir)
1179
1180
1180 if self._ui.warn:
1181 if self._ui.warn:
1181 for item in warnings:
1182 for item in warnings:
1182 if isinstance(item, tuple):
1183 if isinstance(item, tuple):
1183 file_path, syntax = item
1184 file_path, syntax = item
1184 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1185 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1185 file_path,
1186 file_path,
1186 syntax,
1187 syntax,
1187 )
1188 )
1188 self._ui.warn(msg)
1189 self._ui.warn(msg)
1189 else:
1190 else:
1190 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1191 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1191 self._ui.warn(
1192 self._ui.warn(
1192 msg
1193 msg
1193 % (
1194 % (
1194 pathutil.canonpath(
1195 pathutil.canonpath(
1195 self._rootdir, self._rootdir, item
1196 self._rootdir, self._rootdir, item
1196 ),
1197 ),
1197 b"No such file or directory",
1198 b"No such file or directory",
1198 )
1199 )
1199 )
1200 )
1200
1201
1201 for (fn, message) in bad:
1202 for (fn, message) in bad:
1202 matcher.bad(fn, encoding.strtolocal(message))
1203 matcher.bad(fn, encoding.strtolocal(message))
1203
1204
1204 status = scmutil.status(
1205 status = scmutil.status(
1205 modified=modified,
1206 modified=modified,
1206 added=added,
1207 added=added,
1207 removed=removed,
1208 removed=removed,
1208 deleted=deleted,
1209 deleted=deleted,
1209 unknown=unknown,
1210 unknown=unknown,
1210 ignored=ignored,
1211 ignored=ignored,
1211 clean=clean,
1212 clean=clean,
1212 )
1213 )
1213 return (lookup, status)
1214 return (lookup, status)
1214
1215
1215 def status(self, match, subrepos, ignored, clean, unknown):
1216 def status(self, match, subrepos, ignored, clean, unknown):
1216 """Determine the status of the working copy relative to the
1217 """Determine the status of the working copy relative to the
1217 dirstate and return a pair of (unsure, status), where status is of type
1218 dirstate and return a pair of (unsure, status), where status is of type
1218 scmutil.status and:
1219 scmutil.status and:
1219
1220
1220 unsure:
1221 unsure:
1221 files that might have been modified since the dirstate was
1222 files that might have been modified since the dirstate was
1222 written, but need to be read to be sure (size is the same
1223 written, but need to be read to be sure (size is the same
1223 but mtime differs)
1224 but mtime differs)
1224 status.modified:
1225 status.modified:
1225 files that have definitely been modified since the dirstate
1226 files that have definitely been modified since the dirstate
1226 was written (different size or mode)
1227 was written (different size or mode)
1227 status.clean:
1228 status.clean:
1228 files that have definitely not been modified since the
1229 files that have definitely not been modified since the
1229 dirstate was written
1230 dirstate was written
1230 """
1231 """
1231 listignored, listclean, listunknown = ignored, clean, unknown
1232 listignored, listclean, listunknown = ignored, clean, unknown
1232 lookup, modified, added, unknown, ignored = [], [], [], [], []
1233 lookup, modified, added, unknown, ignored = [], [], [], [], []
1233 removed, deleted, clean = [], [], []
1234 removed, deleted, clean = [], [], []
1234
1235
1235 dmap = self._map
1236 dmap = self._map
1236 dmap.preload()
1237 dmap.preload()
1237
1238
1238 use_rust = True
1239 use_rust = True
1239
1240
1240 allowed_matchers = (
1241 allowed_matchers = (
1241 matchmod.alwaysmatcher,
1242 matchmod.alwaysmatcher,
1242 matchmod.exactmatcher,
1243 matchmod.exactmatcher,
1243 matchmod.includematcher,
1244 matchmod.includematcher,
1244 )
1245 )
1245
1246
1246 if rustmod is None:
1247 if rustmod is None:
1247 use_rust = False
1248 use_rust = False
1248 elif self._checkcase:
1249 elif self._checkcase:
1249 # Case-insensitive filesystems are not handled yet
1250 # Case-insensitive filesystems are not handled yet
1250 use_rust = False
1251 use_rust = False
1251 elif subrepos:
1252 elif subrepos:
1252 use_rust = False
1253 use_rust = False
1253 elif sparse.enabled:
1254 elif sparse.enabled:
1254 use_rust = False
1255 use_rust = False
1255 elif not isinstance(match, allowed_matchers):
1256 elif not isinstance(match, allowed_matchers):
1256 # Some matchers have yet to be implemented
1257 # Some matchers have yet to be implemented
1257 use_rust = False
1258 use_rust = False
1258
1259
1259 if use_rust:
1260 if use_rust:
1260 try:
1261 try:
1261 return self._rust_status(
1262 return self._rust_status(
1262 match, listclean, listignored, listunknown
1263 match, listclean, listignored, listunknown
1263 )
1264 )
1264 except rustmod.FallbackError:
1265 except rustmod.FallbackError:
1265 pass
1266 pass
1266
1267
1267 def noop(f):
1268 def noop(f):
1268 pass
1269 pass
1269
1270
1270 dcontains = dmap.__contains__
1271 dcontains = dmap.__contains__
1271 dget = dmap.__getitem__
1272 dget = dmap.__getitem__
1272 ladd = lookup.append # aka "unsure"
1273 ladd = lookup.append # aka "unsure"
1273 madd = modified.append
1274 madd = modified.append
1274 aadd = added.append
1275 aadd = added.append
1275 uadd = unknown.append if listunknown else noop
1276 uadd = unknown.append if listunknown else noop
1276 iadd = ignored.append if listignored else noop
1277 iadd = ignored.append if listignored else noop
1277 radd = removed.append
1278 radd = removed.append
1278 dadd = deleted.append
1279 dadd = deleted.append
1279 cadd = clean.append if listclean else noop
1280 cadd = clean.append if listclean else noop
1280 mexact = match.exact
1281 mexact = match.exact
1281 dirignore = self._dirignore
1282 dirignore = self._dirignore
1282 checkexec = self._checkexec
1283 checkexec = self._checkexec
1283 copymap = self._map.copymap
1284 copymap = self._map.copymap
1284 lastnormaltime = self._lastnormaltime
1285 lastnormaltime = self._lastnormaltime
1285
1286
1286 # We need to do full walks when either
1287 # We need to do full walks when either
1287 # - we're listing all clean files, or
1288 # - we're listing all clean files, or
1288 # - match.traversedir does something, because match.traversedir should
1289 # - match.traversedir does something, because match.traversedir should
1289 # be called for every dir in the working dir
1290 # be called for every dir in the working dir
1290 full = listclean or match.traversedir is not None
1291 full = listclean or match.traversedir is not None
1291 for fn, st in pycompat.iteritems(
1292 for fn, st in pycompat.iteritems(
1292 self.walk(match, subrepos, listunknown, listignored, full=full)
1293 self.walk(match, subrepos, listunknown, listignored, full=full)
1293 ):
1294 ):
1294 if not dcontains(fn):
1295 if not dcontains(fn):
1295 if (listignored or mexact(fn)) and dirignore(fn):
1296 if (listignored or mexact(fn)) and dirignore(fn):
1296 if listignored:
1297 if listignored:
1297 iadd(fn)
1298 iadd(fn)
1298 else:
1299 else:
1299 uadd(fn)
1300 uadd(fn)
1300 continue
1301 continue
1301
1302
1302 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1303 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1303 # written like that for performance reasons. dmap[fn] is not a
1304 # written like that for performance reasons. dmap[fn] is not a
1304 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1305 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1305 # opcode has fast paths when the value to be unpacked is a tuple or
1306 # opcode has fast paths when the value to be unpacked is a tuple or
1306 # a list, but falls back to creating a full-fledged iterator in
1307 # a list, but falls back to creating a full-fledged iterator in
1307 # general. That is much slower than simply accessing and storing the
1308 # general. That is much slower than simply accessing and storing the
1308 # tuple members one by one.
1309 # tuple members one by one.
1309 t = dget(fn)
1310 t = dget(fn)
1310 state = t.state
1311 state = t.state
1311 mode = t[1]
1312 mode = t[1]
1312 size = t[2]
1313 size = t[2]
1313 time = t[3]
1314 time = t[3]
1314
1315
1315 if not st and state in b"nma":
1316 if not st and state in b"nma":
1316 dadd(fn)
1317 dadd(fn)
1317 elif state == b'n':
1318 elif state == b'n':
1318 if (
1319 if (
1319 size >= 0
1320 size >= 0
1320 and (
1321 and (
1321 (size != st.st_size and size != st.st_size & _rangemask)
1322 (size != st.st_size and size != st.st_size & _rangemask)
1322 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1323 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1323 )
1324 )
1324 or t.from_p2
1325 or t.from_p2
1325 or fn in copymap
1326 or fn in copymap
1326 ):
1327 ):
1327 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1328 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1328 # issue6456: Size returned may be longer due to
1329 # issue6456: Size returned may be longer due to
1329 # encryption on EXT-4 fscrypt, undecided.
1330 # encryption on EXT-4 fscrypt, undecided.
1330 ladd(fn)
1331 ladd(fn)
1331 else:
1332 else:
1332 madd(fn)
1333 madd(fn)
1333 elif (
1334 elif (
1334 time != st[stat.ST_MTIME]
1335 time != st[stat.ST_MTIME]
1335 and time != st[stat.ST_MTIME] & _rangemask
1336 and time != st[stat.ST_MTIME] & _rangemask
1336 ):
1337 ):
1337 ladd(fn)
1338 ladd(fn)
1338 elif st[stat.ST_MTIME] == lastnormaltime:
1339 elif st[stat.ST_MTIME] == lastnormaltime:
1339 # fn may have just been marked as normal and it may have
1340 # fn may have just been marked as normal and it may have
1340 # changed in the same second without changing its size.
1341 # changed in the same second without changing its size.
1341 # This can happen if we quickly do multiple commits.
1342 # This can happen if we quickly do multiple commits.
1342 # Force lookup, so we don't miss such a racy file change.
1343 # Force lookup, so we don't miss such a racy file change.
1343 ladd(fn)
1344 ladd(fn)
1344 elif listclean:
1345 elif listclean:
1345 cadd(fn)
1346 cadd(fn)
1346 elif t.merged:
1347 elif t.merged:
1347 madd(fn)
1348 madd(fn)
1348 elif state == b'a':
1349 elif state == b'a':
1349 aadd(fn)
1350 aadd(fn)
1350 elif t.removed:
1351 elif t.removed:
1351 radd(fn)
1352 radd(fn)
1352 status = scmutil.status(
1353 status = scmutil.status(
1353 modified, added, removed, deleted, unknown, ignored, clean
1354 modified, added, removed, deleted, unknown, ignored, clean
1354 )
1355 )
1355 return (lookup, status)
1356 return (lookup, status)
1356
1357
1357 def matches(self, match):
1358 def matches(self, match):
1358 """
1359 """
1359 return files in the dirstate (in whatever state) filtered by match
1360 return files in the dirstate (in whatever state) filtered by match
1360 """
1361 """
1361 dmap = self._map
1362 dmap = self._map
1362 if rustmod is not None:
1363 if rustmod is not None:
1363 dmap = self._map._rustmap
1364 dmap = self._map._rustmap
1364
1365
1365 if match.always():
1366 if match.always():
1366 return dmap.keys()
1367 return dmap.keys()
1367 files = match.files()
1368 files = match.files()
1368 if match.isexact():
1369 if match.isexact():
1369 # fast path -- filter the other way around, since typically files is
1370 # fast path -- filter the other way around, since typically files is
1370 # much smaller than dmap
1371 # much smaller than dmap
1371 return [f for f in files if f in dmap]
1372 return [f for f in files if f in dmap]
1372 if match.prefix() and all(fn in dmap for fn in files):
1373 if match.prefix() and all(fn in dmap for fn in files):
1373 # fast path -- all the values are known to be files, so just return
1374 # fast path -- all the values are known to be files, so just return
1374 # that
1375 # that
1375 return list(files)
1376 return list(files)
1376 return [f for f in dmap if match(f)]
1377 return [f for f in dmap if match(f)]
1377
1378
1378 def _actualfilename(self, tr):
1379 def _actualfilename(self, tr):
1379 if tr:
1380 if tr:
1380 return self._pendingfilename
1381 return self._pendingfilename
1381 else:
1382 else:
1382 return self._filename
1383 return self._filename
1383
1384
1384 def savebackup(self, tr, backupname):
1385 def savebackup(self, tr, backupname):
1385 '''Save current dirstate into backup file'''
1386 '''Save current dirstate into backup file'''
1386 filename = self._actualfilename(tr)
1387 filename = self._actualfilename(tr)
1387 assert backupname != filename
1388 assert backupname != filename
1388
1389
1389 # use '_writedirstate' instead of 'write' to write changes certainly,
1390 # use '_writedirstate' instead of 'write' to write changes certainly,
1390 # because the latter omits writing out if transaction is running.
1391 # because the latter omits writing out if transaction is running.
1391 # output file will be used to create backup of dirstate at this point.
1392 # output file will be used to create backup of dirstate at this point.
1392 if self._dirty or not self._opener.exists(filename):
1393 if self._dirty or not self._opener.exists(filename):
1393 self._writedirstate(
1394 self._writedirstate(
1394 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1395 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1395 )
1396 )
1396
1397
1397 if tr:
1398 if tr:
1398 # ensure that subsequent tr.writepending returns True for
1399 # ensure that subsequent tr.writepending returns True for
1399 # changes written out above, even if dirstate is never
1400 # changes written out above, even if dirstate is never
1400 # changed after this
1401 # changed after this
1401 tr.addfilegenerator(
1402 tr.addfilegenerator(
1402 b'dirstate',
1403 b'dirstate',
1403 (self._filename,),
1404 (self._filename,),
1404 self._writedirstate,
1405 self._writedirstate,
1405 location=b'plain',
1406 location=b'plain',
1406 )
1407 )
1407
1408
1408 # ensure that pending file written above is unlinked at
1409 # ensure that pending file written above is unlinked at
1409 # failure, even if tr.writepending isn't invoked until the
1410 # failure, even if tr.writepending isn't invoked until the
1410 # end of this transaction
1411 # end of this transaction
1411 tr.registertmp(filename, location=b'plain')
1412 tr.registertmp(filename, location=b'plain')
1412
1413
1413 self._opener.tryunlink(backupname)
1414 self._opener.tryunlink(backupname)
1414 # hardlink backup is okay because _writedirstate is always called
1415 # hardlink backup is okay because _writedirstate is always called
1415 # with an "atomictemp=True" file.
1416 # with an "atomictemp=True" file.
1416 util.copyfile(
1417 util.copyfile(
1417 self._opener.join(filename),
1418 self._opener.join(filename),
1418 self._opener.join(backupname),
1419 self._opener.join(backupname),
1419 hardlink=True,
1420 hardlink=True,
1420 )
1421 )
1421
1422
1422 def restorebackup(self, tr, backupname):
1423 def restorebackup(self, tr, backupname):
1423 '''Restore dirstate by backup file'''
1424 '''Restore dirstate by backup file'''
1424 # this "invalidate()" prevents "wlock.release()" from writing
1425 # this "invalidate()" prevents "wlock.release()" from writing
1425 # changes of dirstate out after restoring from backup file
1426 # changes of dirstate out after restoring from backup file
1426 self.invalidate()
1427 self.invalidate()
1427 filename = self._actualfilename(tr)
1428 filename = self._actualfilename(tr)
1428 o = self._opener
1429 o = self._opener
1429 if util.samefile(o.join(backupname), o.join(filename)):
1430 if util.samefile(o.join(backupname), o.join(filename)):
1430 o.unlink(backupname)
1431 o.unlink(backupname)
1431 else:
1432 else:
1432 o.rename(backupname, filename, checkambig=True)
1433 o.rename(backupname, filename, checkambig=True)
1433
1434
1434 def clearbackup(self, tr, backupname):
1435 def clearbackup(self, tr, backupname):
1435 '''Clear backup file'''
1436 '''Clear backup file'''
1436 self._opener.unlink(backupname)
1437 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now