##// END OF EJS Templates
dirstate: split a not-so-one-liner...
marmoute -
r48274:f93298a4 default
parent child Browse files
Show More
@@ -1,1982 +1,1982 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap
112 self._mapcls = dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304 """
304 """
305 return self._map.get(key, (b"?",))[0]
305 return self._map.get(key, (b"?",))[0]
306
306
307 def __contains__(self, key):
307 def __contains__(self, key):
308 return key in self._map
308 return key in self._map
309
309
310 def __iter__(self):
310 def __iter__(self):
311 return iter(sorted(self._map))
311 return iter(sorted(self._map))
312
312
313 def items(self):
313 def items(self):
314 return pycompat.iteritems(self._map)
314 return pycompat.iteritems(self._map)
315
315
316 iteritems = items
316 iteritems = items
317
317
318 def directories(self):
318 def directories(self):
319 return self._map.directories()
319 return self._map.directories()
320
320
321 def parents(self):
321 def parents(self):
322 return [self._validate(p) for p in self._pl]
322 return [self._validate(p) for p in self._pl]
323
323
324 def p1(self):
324 def p1(self):
325 return self._validate(self._pl[0])
325 return self._validate(self._pl[0])
326
326
327 def p2(self):
327 def p2(self):
328 return self._validate(self._pl[1])
328 return self._validate(self._pl[1])
329
329
330 def branch(self):
330 def branch(self):
331 return encoding.tolocal(self._branch)
331 return encoding.tolocal(self._branch)
332
332
333 def setparents(self, p1, p2=None):
333 def setparents(self, p1, p2=None):
334 """Set dirstate parents to p1 and p2.
334 """Set dirstate parents to p1 and p2.
335
335
336 When moving from two parents to one, 'm' merged entries a
336 When moving from two parents to one, 'm' merged entries a
337 adjusted to normal and previous copy records discarded and
337 adjusted to normal and previous copy records discarded and
338 returned by the call.
338 returned by the call.
339
339
340 See localrepo.setparents()
340 See localrepo.setparents()
341 """
341 """
342 if p2 is None:
342 if p2 is None:
343 p2 = self._nodeconstants.nullid
343 p2 = self._nodeconstants.nullid
344 if self._parentwriters == 0:
344 if self._parentwriters == 0:
345 raise ValueError(
345 raise ValueError(
346 b"cannot set dirstate parent outside of "
346 b"cannot set dirstate parent outside of "
347 b"dirstate.parentchange context manager"
347 b"dirstate.parentchange context manager"
348 )
348 )
349
349
350 self._dirty = True
350 self._dirty = True
351 oldp2 = self._pl[1]
351 oldp2 = self._pl[1]
352 if self._origpl is None:
352 if self._origpl is None:
353 self._origpl = self._pl
353 self._origpl = self._pl
354 self._map.setparents(p1, p2)
354 self._map.setparents(p1, p2)
355 copies = {}
355 copies = {}
356 if (
356 if (
357 oldp2 != self._nodeconstants.nullid
357 oldp2 != self._nodeconstants.nullid
358 and p2 == self._nodeconstants.nullid
358 and p2 == self._nodeconstants.nullid
359 ):
359 ):
360 candidatefiles = self._map.non_normal_or_other_parent_paths()
360 candidatefiles = self._map.non_normal_or_other_parent_paths()
361
361
362 for f in candidatefiles:
362 for f in candidatefiles:
363 s = self._map.get(f)
363 s = self._map.get(f)
364 if s is None:
364 if s is None:
365 continue
365 continue
366
366
367 # Discard 'm' markers when moving away from a merge state
367 # Discard 'm' markers when moving away from a merge state
368 if s[0] == b'm':
368 if s[0] == b'm':
369 source = self._map.copymap.get(f)
369 source = self._map.copymap.get(f)
370 if source:
370 if source:
371 copies[f] = source
371 copies[f] = source
372 self.normallookup(f)
372 self.normallookup(f)
373 # Also fix up otherparent markers
373 # Also fix up otherparent markers
374 elif s[0] == b'n' and s[2] == -2:
374 elif s[0] == b'n' and s[2] == -2:
375 source = self._map.copymap.get(f)
375 source = self._map.copymap.get(f)
376 if source:
376 if source:
377 copies[f] = source
377 copies[f] = source
378 self.add(f)
378 self.add(f)
379 return copies
379 return copies
380
380
381 def setbranch(self, branch):
381 def setbranch(self, branch):
382 self.__class__._branch.set(self, encoding.fromlocal(branch))
382 self.__class__._branch.set(self, encoding.fromlocal(branch))
383 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
383 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
384 try:
384 try:
385 f.write(self._branch + b'\n')
385 f.write(self._branch + b'\n')
386 f.close()
386 f.close()
387
387
388 # make sure filecache has the correct stat info for _branch after
388 # make sure filecache has the correct stat info for _branch after
389 # replacing the underlying file
389 # replacing the underlying file
390 ce = self._filecache[b'_branch']
390 ce = self._filecache[b'_branch']
391 if ce:
391 if ce:
392 ce.refresh()
392 ce.refresh()
393 except: # re-raises
393 except: # re-raises
394 f.discard()
394 f.discard()
395 raise
395 raise
396
396
397 def invalidate(self):
397 def invalidate(self):
398 """Causes the next access to reread the dirstate.
398 """Causes the next access to reread the dirstate.
399
399
400 This is different from localrepo.invalidatedirstate() because it always
400 This is different from localrepo.invalidatedirstate() because it always
401 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
401 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
402 check whether the dirstate has changed before rereading it."""
402 check whether the dirstate has changed before rereading it."""
403
403
404 for a in ("_map", "_branch", "_ignore"):
404 for a in ("_map", "_branch", "_ignore"):
405 if a in self.__dict__:
405 if a in self.__dict__:
406 delattr(self, a)
406 delattr(self, a)
407 self._lastnormaltime = 0
407 self._lastnormaltime = 0
408 self._dirty = False
408 self._dirty = False
409 self._updatedfiles.clear()
409 self._updatedfiles.clear()
410 self._parentwriters = 0
410 self._parentwriters = 0
411 self._origpl = None
411 self._origpl = None
412
412
413 def copy(self, source, dest):
413 def copy(self, source, dest):
414 """Mark dest as a copy of source. Unmark dest if source is None."""
414 """Mark dest as a copy of source. Unmark dest if source is None."""
415 if source == dest:
415 if source == dest:
416 return
416 return
417 self._dirty = True
417 self._dirty = True
418 if source is not None:
418 if source is not None:
419 self._map.copymap[dest] = source
419 self._map.copymap[dest] = source
420 self._updatedfiles.add(source)
420 self._updatedfiles.add(source)
421 self._updatedfiles.add(dest)
421 self._updatedfiles.add(dest)
422 elif self._map.copymap.pop(dest, None):
422 elif self._map.copymap.pop(dest, None):
423 self._updatedfiles.add(dest)
423 self._updatedfiles.add(dest)
424
424
425 def copied(self, file):
425 def copied(self, file):
426 return self._map.copymap.get(file, None)
426 return self._map.copymap.get(file, None)
427
427
428 def copies(self):
428 def copies(self):
429 return self._map.copymap
429 return self._map.copymap
430
430
431 def _addpath(self, f, state, mode, size, mtime):
431 def _addpath(self, f, state, mode, size, mtime):
432 oldstate = self[f]
432 oldstate = self[f]
433 if state == b'a' or oldstate == b'r':
433 if state == b'a' or oldstate == b'r':
434 scmutil.checkfilename(f)
434 scmutil.checkfilename(f)
435 if self._map.hastrackeddir(f):
435 if self._map.hastrackeddir(f):
436 raise error.Abort(
436 msg = _(b'directory %r already in dirstate')
437 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
437 msg %= pycompat.bytestr(f)
438 )
438 raise error.Abort(msg)
439 # shadows
439 # shadows
440 for d in pathutil.finddirs(f):
440 for d in pathutil.finddirs(f):
441 if self._map.hastrackeddir(d):
441 if self._map.hastrackeddir(d):
442 break
442 break
443 entry = self._map.get(d)
443 entry = self._map.get(d)
444 if entry is not None and entry[0] != b'r':
444 if entry is not None and entry[0] != b'r':
445 raise error.Abort(
445 raise error.Abort(
446 _(b'file %r in dirstate clashes with %r')
446 _(b'file %r in dirstate clashes with %r')
447 % (pycompat.bytestr(d), pycompat.bytestr(f))
447 % (pycompat.bytestr(d), pycompat.bytestr(f))
448 )
448 )
449 self._dirty = True
449 self._dirty = True
450 self._updatedfiles.add(f)
450 self._updatedfiles.add(f)
451 self._map.addfile(f, oldstate, state, mode, size, mtime)
451 self._map.addfile(f, oldstate, state, mode, size, mtime)
452
452
453 def normal(self, f, parentfiledata=None):
453 def normal(self, f, parentfiledata=None):
454 """Mark a file normal and clean.
454 """Mark a file normal and clean.
455
455
456 parentfiledata: (mode, size, mtime) of the clean file
456 parentfiledata: (mode, size, mtime) of the clean file
457
457
458 parentfiledata should be computed from memory (for mode,
458 parentfiledata should be computed from memory (for mode,
459 size), as or close as possible from the point where we
459 size), as or close as possible from the point where we
460 determined the file was clean, to limit the risk of the
460 determined the file was clean, to limit the risk of the
461 file having been changed by an external process between the
461 file having been changed by an external process between the
462 moment where the file was determined to be clean and now."""
462 moment where the file was determined to be clean and now."""
463 if parentfiledata:
463 if parentfiledata:
464 (mode, size, mtime) = parentfiledata
464 (mode, size, mtime) = parentfiledata
465 else:
465 else:
466 s = os.lstat(self._join(f))
466 s = os.lstat(self._join(f))
467 mode = s.st_mode
467 mode = s.st_mode
468 size = s.st_size
468 size = s.st_size
469 mtime = s[stat.ST_MTIME]
469 mtime = s[stat.ST_MTIME]
470 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
470 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
471 self._map.copymap.pop(f, None)
471 self._map.copymap.pop(f, None)
472 if f in self._map.nonnormalset:
472 if f in self._map.nonnormalset:
473 self._map.nonnormalset.remove(f)
473 self._map.nonnormalset.remove(f)
474 if mtime > self._lastnormaltime:
474 if mtime > self._lastnormaltime:
475 # Remember the most recent modification timeslot for status(),
475 # Remember the most recent modification timeslot for status(),
476 # to make sure we won't miss future size-preserving file content
476 # to make sure we won't miss future size-preserving file content
477 # modifications that happen within the same timeslot.
477 # modifications that happen within the same timeslot.
478 self._lastnormaltime = mtime
478 self._lastnormaltime = mtime
479
479
480 def normallookup(self, f):
480 def normallookup(self, f):
481 '''Mark a file normal, but possibly dirty.'''
481 '''Mark a file normal, but possibly dirty.'''
482 if self._pl[1] != self._nodeconstants.nullid:
482 if self._pl[1] != self._nodeconstants.nullid:
483 # if there is a merge going on and the file was either
483 # if there is a merge going on and the file was either
484 # in state 'm' (-1) or coming from other parent (-2) before
484 # in state 'm' (-1) or coming from other parent (-2) before
485 # being removed, restore that state.
485 # being removed, restore that state.
486 entry = self._map.get(f)
486 entry = self._map.get(f)
487 if entry is not None:
487 if entry is not None:
488 if entry[0] == b'r' and entry[2] in (-1, -2):
488 if entry[0] == b'r' and entry[2] in (-1, -2):
489 source = self._map.copymap.get(f)
489 source = self._map.copymap.get(f)
490 if entry[2] == -1:
490 if entry[2] == -1:
491 self.merge(f)
491 self.merge(f)
492 elif entry[2] == -2:
492 elif entry[2] == -2:
493 self.otherparent(f)
493 self.otherparent(f)
494 if source:
494 if source:
495 self.copy(source, f)
495 self.copy(source, f)
496 return
496 return
497 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
497 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
498 return
498 return
499 self._addpath(f, b'n', 0, -1, -1)
499 self._addpath(f, b'n', 0, -1, -1)
500 self._map.copymap.pop(f, None)
500 self._map.copymap.pop(f, None)
501
501
502 def otherparent(self, f):
502 def otherparent(self, f):
503 '''Mark as coming from the other parent, always dirty.'''
503 '''Mark as coming from the other parent, always dirty.'''
504 if self._pl[1] == self._nodeconstants.nullid:
504 if self._pl[1] == self._nodeconstants.nullid:
505 msg = _(b"setting %r to other parent only allowed in merges") % f
505 msg = _(b"setting %r to other parent only allowed in merges") % f
506 raise error.Abort(msg)
506 raise error.Abort(msg)
507 if f in self and self[f] == b'n':
507 if f in self and self[f] == b'n':
508 # merge-like
508 # merge-like
509 self._addpath(f, b'm', 0, -2, -1)
509 self._addpath(f, b'm', 0, -2, -1)
510 else:
510 else:
511 # add-like
511 # add-like
512 self._addpath(f, b'n', 0, -2, -1)
512 self._addpath(f, b'n', 0, -2, -1)
513 self._map.copymap.pop(f, None)
513 self._map.copymap.pop(f, None)
514
514
515 def add(self, f):
515 def add(self, f):
516 '''Mark a file added.'''
516 '''Mark a file added.'''
517 self._addpath(f, b'a', 0, -1, -1)
517 self._addpath(f, b'a', 0, -1, -1)
518 self._map.copymap.pop(f, None)
518 self._map.copymap.pop(f, None)
519
519
520 def remove(self, f):
520 def remove(self, f):
521 '''Mark a file removed.'''
521 '''Mark a file removed.'''
522 self._dirty = True
522 self._dirty = True
523 oldstate = self[f]
523 oldstate = self[f]
524 size = 0
524 size = 0
525 if self._pl[1] != self._nodeconstants.nullid:
525 if self._pl[1] != self._nodeconstants.nullid:
526 entry = self._map.get(f)
526 entry = self._map.get(f)
527 if entry is not None:
527 if entry is not None:
528 # backup the previous state
528 # backup the previous state
529 if entry[0] == b'm': # merge
529 if entry[0] == b'm': # merge
530 size = -1
530 size = -1
531 elif entry[0] == b'n' and entry[2] == -2: # other parent
531 elif entry[0] == b'n' and entry[2] == -2: # other parent
532 size = -2
532 size = -2
533 self._map.otherparentset.add(f)
533 self._map.otherparentset.add(f)
534 self._updatedfiles.add(f)
534 self._updatedfiles.add(f)
535 self._map.removefile(f, oldstate, size)
535 self._map.removefile(f, oldstate, size)
536 if size == 0:
536 if size == 0:
537 self._map.copymap.pop(f, None)
537 self._map.copymap.pop(f, None)
538
538
539 def merge(self, f):
539 def merge(self, f):
540 '''Mark a file merged.'''
540 '''Mark a file merged.'''
541 if self._pl[1] == self._nodeconstants.nullid:
541 if self._pl[1] == self._nodeconstants.nullid:
542 return self.normallookup(f)
542 return self.normallookup(f)
543 return self.otherparent(f)
543 return self.otherparent(f)
544
544
545 def drop(self, f):
545 def drop(self, f):
546 '''Drop a file from the dirstate'''
546 '''Drop a file from the dirstate'''
547 oldstate = self[f]
547 oldstate = self[f]
548 if self._map.dropfile(f, oldstate):
548 if self._map.dropfile(f, oldstate):
549 self._dirty = True
549 self._dirty = True
550 self._updatedfiles.add(f)
550 self._updatedfiles.add(f)
551 self._map.copymap.pop(f, None)
551 self._map.copymap.pop(f, None)
552
552
553 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
553 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
554 if exists is None:
554 if exists is None:
555 exists = os.path.lexists(os.path.join(self._root, path))
555 exists = os.path.lexists(os.path.join(self._root, path))
556 if not exists:
556 if not exists:
557 # Maybe a path component exists
557 # Maybe a path component exists
558 if not ignoremissing and b'/' in path:
558 if not ignoremissing and b'/' in path:
559 d, f = path.rsplit(b'/', 1)
559 d, f = path.rsplit(b'/', 1)
560 d = self._normalize(d, False, ignoremissing, None)
560 d = self._normalize(d, False, ignoremissing, None)
561 folded = d + b"/" + f
561 folded = d + b"/" + f
562 else:
562 else:
563 # No path components, preserve original case
563 # No path components, preserve original case
564 folded = path
564 folded = path
565 else:
565 else:
566 # recursively normalize leading directory components
566 # recursively normalize leading directory components
567 # against dirstate
567 # against dirstate
568 if b'/' in normed:
568 if b'/' in normed:
569 d, f = normed.rsplit(b'/', 1)
569 d, f = normed.rsplit(b'/', 1)
570 d = self._normalize(d, False, ignoremissing, True)
570 d = self._normalize(d, False, ignoremissing, True)
571 r = self._root + b"/" + d
571 r = self._root + b"/" + d
572 folded = d + b"/" + util.fspath(f, r)
572 folded = d + b"/" + util.fspath(f, r)
573 else:
573 else:
574 folded = util.fspath(normed, self._root)
574 folded = util.fspath(normed, self._root)
575 storemap[normed] = folded
575 storemap[normed] = folded
576
576
577 return folded
577 return folded
578
578
579 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
579 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
580 normed = util.normcase(path)
580 normed = util.normcase(path)
581 folded = self._map.filefoldmap.get(normed, None)
581 folded = self._map.filefoldmap.get(normed, None)
582 if folded is None:
582 if folded is None:
583 if isknown:
583 if isknown:
584 folded = path
584 folded = path
585 else:
585 else:
586 folded = self._discoverpath(
586 folded = self._discoverpath(
587 path, normed, ignoremissing, exists, self._map.filefoldmap
587 path, normed, ignoremissing, exists, self._map.filefoldmap
588 )
588 )
589 return folded
589 return folded
590
590
591 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
591 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
592 normed = util.normcase(path)
592 normed = util.normcase(path)
593 folded = self._map.filefoldmap.get(normed, None)
593 folded = self._map.filefoldmap.get(normed, None)
594 if folded is None:
594 if folded is None:
595 folded = self._map.dirfoldmap.get(normed, None)
595 folded = self._map.dirfoldmap.get(normed, None)
596 if folded is None:
596 if folded is None:
597 if isknown:
597 if isknown:
598 folded = path
598 folded = path
599 else:
599 else:
600 # store discovered result in dirfoldmap so that future
600 # store discovered result in dirfoldmap so that future
601 # normalizefile calls don't start matching directories
601 # normalizefile calls don't start matching directories
602 folded = self._discoverpath(
602 folded = self._discoverpath(
603 path, normed, ignoremissing, exists, self._map.dirfoldmap
603 path, normed, ignoremissing, exists, self._map.dirfoldmap
604 )
604 )
605 return folded
605 return folded
606
606
607 def normalize(self, path, isknown=False, ignoremissing=False):
607 def normalize(self, path, isknown=False, ignoremissing=False):
608 """
608 """
609 normalize the case of a pathname when on a casefolding filesystem
609 normalize the case of a pathname when on a casefolding filesystem
610
610
611 isknown specifies whether the filename came from walking the
611 isknown specifies whether the filename came from walking the
612 disk, to avoid extra filesystem access.
612 disk, to avoid extra filesystem access.
613
613
614 If ignoremissing is True, missing path are returned
614 If ignoremissing is True, missing path are returned
615 unchanged. Otherwise, we try harder to normalize possibly
615 unchanged. Otherwise, we try harder to normalize possibly
616 existing path components.
616 existing path components.
617
617
618 The normalized case is determined based on the following precedence:
618 The normalized case is determined based on the following precedence:
619
619
620 - version of name already stored in the dirstate
620 - version of name already stored in the dirstate
621 - version of name stored on disk
621 - version of name stored on disk
622 - version provided via command arguments
622 - version provided via command arguments
623 """
623 """
624
624
625 if self._checkcase:
625 if self._checkcase:
626 return self._normalize(path, isknown, ignoremissing)
626 return self._normalize(path, isknown, ignoremissing)
627 return path
627 return path
628
628
629 def clear(self):
629 def clear(self):
630 self._map.clear()
630 self._map.clear()
631 self._lastnormaltime = 0
631 self._lastnormaltime = 0
632 self._updatedfiles.clear()
632 self._updatedfiles.clear()
633 self._dirty = True
633 self._dirty = True
634
634
635 def rebuild(self, parent, allfiles, changedfiles=None):
635 def rebuild(self, parent, allfiles, changedfiles=None):
636 if changedfiles is None:
636 if changedfiles is None:
637 # Rebuild entire dirstate
637 # Rebuild entire dirstate
638 to_lookup = allfiles
638 to_lookup = allfiles
639 to_drop = []
639 to_drop = []
640 lastnormaltime = self._lastnormaltime
640 lastnormaltime = self._lastnormaltime
641 self.clear()
641 self.clear()
642 self._lastnormaltime = lastnormaltime
642 self._lastnormaltime = lastnormaltime
643 elif len(changedfiles) < 10:
643 elif len(changedfiles) < 10:
644 # Avoid turning allfiles into a set, which can be expensive if it's
644 # Avoid turning allfiles into a set, which can be expensive if it's
645 # large.
645 # large.
646 to_lookup = []
646 to_lookup = []
647 to_drop = []
647 to_drop = []
648 for f in changedfiles:
648 for f in changedfiles:
649 if f in allfiles:
649 if f in allfiles:
650 to_lookup.append(f)
650 to_lookup.append(f)
651 else:
651 else:
652 to_drop.append(f)
652 to_drop.append(f)
653 else:
653 else:
654 changedfilesset = set(changedfiles)
654 changedfilesset = set(changedfiles)
655 to_lookup = changedfilesset & set(allfiles)
655 to_lookup = changedfilesset & set(allfiles)
656 to_drop = changedfilesset - to_lookup
656 to_drop = changedfilesset - to_lookup
657
657
658 if self._origpl is None:
658 if self._origpl is None:
659 self._origpl = self._pl
659 self._origpl = self._pl
660 self._map.setparents(parent, self._nodeconstants.nullid)
660 self._map.setparents(parent, self._nodeconstants.nullid)
661
661
662 for f in to_lookup:
662 for f in to_lookup:
663 self.normallookup(f)
663 self.normallookup(f)
664 for f in to_drop:
664 for f in to_drop:
665 self.drop(f)
665 self.drop(f)
666
666
667 self._dirty = True
667 self._dirty = True
668
668
669 def identity(self):
669 def identity(self):
670 """Return identity of dirstate itself to detect changing in storage
670 """Return identity of dirstate itself to detect changing in storage
671
671
672 If identity of previous dirstate is equal to this, writing
672 If identity of previous dirstate is equal to this, writing
673 changes based on the former dirstate out can keep consistency.
673 changes based on the former dirstate out can keep consistency.
674 """
674 """
675 return self._map.identity
675 return self._map.identity
676
676
677 def write(self, tr):
677 def write(self, tr):
678 if not self._dirty:
678 if not self._dirty:
679 return
679 return
680
680
681 filename = self._filename
681 filename = self._filename
682 if tr:
682 if tr:
683 # 'dirstate.write()' is not only for writing in-memory
683 # 'dirstate.write()' is not only for writing in-memory
684 # changes out, but also for dropping ambiguous timestamp.
684 # changes out, but also for dropping ambiguous timestamp.
685 # delayed writing re-raise "ambiguous timestamp issue".
685 # delayed writing re-raise "ambiguous timestamp issue".
686 # See also the wiki page below for detail:
686 # See also the wiki page below for detail:
687 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
687 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
688
688
689 # emulate dropping timestamp in 'parsers.pack_dirstate'
689 # emulate dropping timestamp in 'parsers.pack_dirstate'
690 now = _getfsnow(self._opener)
690 now = _getfsnow(self._opener)
691 self._map.clearambiguoustimes(self._updatedfiles, now)
691 self._map.clearambiguoustimes(self._updatedfiles, now)
692
692
693 # emulate that all 'dirstate.normal' results are written out
693 # emulate that all 'dirstate.normal' results are written out
694 self._lastnormaltime = 0
694 self._lastnormaltime = 0
695 self._updatedfiles.clear()
695 self._updatedfiles.clear()
696
696
697 # delay writing in-memory changes out
697 # delay writing in-memory changes out
698 tr.addfilegenerator(
698 tr.addfilegenerator(
699 b'dirstate',
699 b'dirstate',
700 (self._filename,),
700 (self._filename,),
701 self._writedirstate,
701 self._writedirstate,
702 location=b'plain',
702 location=b'plain',
703 )
703 )
704 return
704 return
705
705
706 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
706 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
707 self._writedirstate(st)
707 self._writedirstate(st)
708
708
709 def addparentchangecallback(self, category, callback):
709 def addparentchangecallback(self, category, callback):
710 """add a callback to be called when the wd parents are changed
710 """add a callback to be called when the wd parents are changed
711
711
712 Callback will be called with the following arguments:
712 Callback will be called with the following arguments:
713 dirstate, (oldp1, oldp2), (newp1, newp2)
713 dirstate, (oldp1, oldp2), (newp1, newp2)
714
714
715 Category is a unique identifier to allow overwriting an old callback
715 Category is a unique identifier to allow overwriting an old callback
716 with a newer callback.
716 with a newer callback.
717 """
717 """
718 self._plchangecallbacks[category] = callback
718 self._plchangecallbacks[category] = callback
719
719
720 def _writedirstate(self, st):
720 def _writedirstate(self, st):
721 # notify callbacks about parents change
721 # notify callbacks about parents change
722 if self._origpl is not None and self._origpl != self._pl:
722 if self._origpl is not None and self._origpl != self._pl:
723 for c, callback in sorted(
723 for c, callback in sorted(
724 pycompat.iteritems(self._plchangecallbacks)
724 pycompat.iteritems(self._plchangecallbacks)
725 ):
725 ):
726 callback(self, self._origpl, self._pl)
726 callback(self, self._origpl, self._pl)
727 self._origpl = None
727 self._origpl = None
728 # use the modification time of the newly created temporary file as the
728 # use the modification time of the newly created temporary file as the
729 # filesystem's notion of 'now'
729 # filesystem's notion of 'now'
730 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
730 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
731
731
732 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
732 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
733 # timestamp of each entries in dirstate, because of 'now > mtime'
733 # timestamp of each entries in dirstate, because of 'now > mtime'
734 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
734 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
735 if delaywrite > 0:
735 if delaywrite > 0:
736 # do we have any files to delay for?
736 # do we have any files to delay for?
737 for f, e in pycompat.iteritems(self._map):
737 for f, e in pycompat.iteritems(self._map):
738 if e[0] == b'n' and e[3] == now:
738 if e[0] == b'n' and e[3] == now:
739 import time # to avoid useless import
739 import time # to avoid useless import
740
740
741 # rather than sleep n seconds, sleep until the next
741 # rather than sleep n seconds, sleep until the next
742 # multiple of n seconds
742 # multiple of n seconds
743 clock = time.time()
743 clock = time.time()
744 start = int(clock) - (int(clock) % delaywrite)
744 start = int(clock) - (int(clock) % delaywrite)
745 end = start + delaywrite
745 end = start + delaywrite
746 time.sleep(end - clock)
746 time.sleep(end - clock)
747 now = end # trust our estimate that the end is near now
747 now = end # trust our estimate that the end is near now
748 break
748 break
749
749
750 self._map.write(st, now)
750 self._map.write(st, now)
751 self._lastnormaltime = 0
751 self._lastnormaltime = 0
752 self._dirty = False
752 self._dirty = False
753
753
754 def _dirignore(self, f):
754 def _dirignore(self, f):
755 if self._ignore(f):
755 if self._ignore(f):
756 return True
756 return True
757 for p in pathutil.finddirs(f):
757 for p in pathutil.finddirs(f):
758 if self._ignore(p):
758 if self._ignore(p):
759 return True
759 return True
760 return False
760 return False
761
761
762 def _ignorefiles(self):
762 def _ignorefiles(self):
763 files = []
763 files = []
764 if os.path.exists(self._join(b'.hgignore')):
764 if os.path.exists(self._join(b'.hgignore')):
765 files.append(self._join(b'.hgignore'))
765 files.append(self._join(b'.hgignore'))
766 for name, path in self._ui.configitems(b"ui"):
766 for name, path in self._ui.configitems(b"ui"):
767 if name == b'ignore' or name.startswith(b'ignore.'):
767 if name == b'ignore' or name.startswith(b'ignore.'):
768 # we need to use os.path.join here rather than self._join
768 # we need to use os.path.join here rather than self._join
769 # because path is arbitrary and user-specified
769 # because path is arbitrary and user-specified
770 files.append(os.path.join(self._rootdir, util.expandpath(path)))
770 files.append(os.path.join(self._rootdir, util.expandpath(path)))
771 return files
771 return files
772
772
773 def _ignorefileandline(self, f):
773 def _ignorefileandline(self, f):
774 files = collections.deque(self._ignorefiles())
774 files = collections.deque(self._ignorefiles())
775 visited = set()
775 visited = set()
776 while files:
776 while files:
777 i = files.popleft()
777 i = files.popleft()
778 patterns = matchmod.readpatternfile(
778 patterns = matchmod.readpatternfile(
779 i, self._ui.warn, sourceinfo=True
779 i, self._ui.warn, sourceinfo=True
780 )
780 )
781 for pattern, lineno, line in patterns:
781 for pattern, lineno, line in patterns:
782 kind, p = matchmod._patsplit(pattern, b'glob')
782 kind, p = matchmod._patsplit(pattern, b'glob')
783 if kind == b"subinclude":
783 if kind == b"subinclude":
784 if p not in visited:
784 if p not in visited:
785 files.append(p)
785 files.append(p)
786 continue
786 continue
787 m = matchmod.match(
787 m = matchmod.match(
788 self._root, b'', [], [pattern], warn=self._ui.warn
788 self._root, b'', [], [pattern], warn=self._ui.warn
789 )
789 )
790 if m(f):
790 if m(f):
791 return (i, lineno, line)
791 return (i, lineno, line)
792 visited.add(i)
792 visited.add(i)
793 return (None, -1, b"")
793 return (None, -1, b"")
794
794
795 def _walkexplicit(self, match, subrepos):
795 def _walkexplicit(self, match, subrepos):
796 """Get stat data about the files explicitly specified by match.
796 """Get stat data about the files explicitly specified by match.
797
797
798 Return a triple (results, dirsfound, dirsnotfound).
798 Return a triple (results, dirsfound, dirsnotfound).
799 - results is a mapping from filename to stat result. It also contains
799 - results is a mapping from filename to stat result. It also contains
800 listings mapping subrepos and .hg to None.
800 listings mapping subrepos and .hg to None.
801 - dirsfound is a list of files found to be directories.
801 - dirsfound is a list of files found to be directories.
802 - dirsnotfound is a list of files that the dirstate thinks are
802 - dirsnotfound is a list of files that the dirstate thinks are
803 directories and that were not found."""
803 directories and that were not found."""
804
804
805 def badtype(mode):
805 def badtype(mode):
806 kind = _(b'unknown')
806 kind = _(b'unknown')
807 if stat.S_ISCHR(mode):
807 if stat.S_ISCHR(mode):
808 kind = _(b'character device')
808 kind = _(b'character device')
809 elif stat.S_ISBLK(mode):
809 elif stat.S_ISBLK(mode):
810 kind = _(b'block device')
810 kind = _(b'block device')
811 elif stat.S_ISFIFO(mode):
811 elif stat.S_ISFIFO(mode):
812 kind = _(b'fifo')
812 kind = _(b'fifo')
813 elif stat.S_ISSOCK(mode):
813 elif stat.S_ISSOCK(mode):
814 kind = _(b'socket')
814 kind = _(b'socket')
815 elif stat.S_ISDIR(mode):
815 elif stat.S_ISDIR(mode):
816 kind = _(b'directory')
816 kind = _(b'directory')
817 return _(b'unsupported file type (type is %s)') % kind
817 return _(b'unsupported file type (type is %s)') % kind
818
818
819 badfn = match.bad
819 badfn = match.bad
820 dmap = self._map
820 dmap = self._map
821 lstat = os.lstat
821 lstat = os.lstat
822 getkind = stat.S_IFMT
822 getkind = stat.S_IFMT
823 dirkind = stat.S_IFDIR
823 dirkind = stat.S_IFDIR
824 regkind = stat.S_IFREG
824 regkind = stat.S_IFREG
825 lnkkind = stat.S_IFLNK
825 lnkkind = stat.S_IFLNK
826 join = self._join
826 join = self._join
827 dirsfound = []
827 dirsfound = []
828 foundadd = dirsfound.append
828 foundadd = dirsfound.append
829 dirsnotfound = []
829 dirsnotfound = []
830 notfoundadd = dirsnotfound.append
830 notfoundadd = dirsnotfound.append
831
831
832 if not match.isexact() and self._checkcase:
832 if not match.isexact() and self._checkcase:
833 normalize = self._normalize
833 normalize = self._normalize
834 else:
834 else:
835 normalize = None
835 normalize = None
836
836
837 files = sorted(match.files())
837 files = sorted(match.files())
838 subrepos.sort()
838 subrepos.sort()
839 i, j = 0, 0
839 i, j = 0, 0
840 while i < len(files) and j < len(subrepos):
840 while i < len(files) and j < len(subrepos):
841 subpath = subrepos[j] + b"/"
841 subpath = subrepos[j] + b"/"
842 if files[i] < subpath:
842 if files[i] < subpath:
843 i += 1
843 i += 1
844 continue
844 continue
845 while i < len(files) and files[i].startswith(subpath):
845 while i < len(files) and files[i].startswith(subpath):
846 del files[i]
846 del files[i]
847 j += 1
847 j += 1
848
848
849 if not files or b'' in files:
849 if not files or b'' in files:
850 files = [b'']
850 files = [b'']
851 # constructing the foldmap is expensive, so don't do it for the
851 # constructing the foldmap is expensive, so don't do it for the
852 # common case where files is ['']
852 # common case where files is ['']
853 normalize = None
853 normalize = None
854 results = dict.fromkeys(subrepos)
854 results = dict.fromkeys(subrepos)
855 results[b'.hg'] = None
855 results[b'.hg'] = None
856
856
857 for ff in files:
857 for ff in files:
858 if normalize:
858 if normalize:
859 nf = normalize(ff, False, True)
859 nf = normalize(ff, False, True)
860 else:
860 else:
861 nf = ff
861 nf = ff
862 if nf in results:
862 if nf in results:
863 continue
863 continue
864
864
865 try:
865 try:
866 st = lstat(join(nf))
866 st = lstat(join(nf))
867 kind = getkind(st.st_mode)
867 kind = getkind(st.st_mode)
868 if kind == dirkind:
868 if kind == dirkind:
869 if nf in dmap:
869 if nf in dmap:
870 # file replaced by dir on disk but still in dirstate
870 # file replaced by dir on disk but still in dirstate
871 results[nf] = None
871 results[nf] = None
872 foundadd((nf, ff))
872 foundadd((nf, ff))
873 elif kind == regkind or kind == lnkkind:
873 elif kind == regkind or kind == lnkkind:
874 results[nf] = st
874 results[nf] = st
875 else:
875 else:
876 badfn(ff, badtype(kind))
876 badfn(ff, badtype(kind))
877 if nf in dmap:
877 if nf in dmap:
878 results[nf] = None
878 results[nf] = None
879 except OSError as inst: # nf not found on disk - it is dirstate only
879 except OSError as inst: # nf not found on disk - it is dirstate only
880 if nf in dmap: # does it exactly match a missing file?
880 if nf in dmap: # does it exactly match a missing file?
881 results[nf] = None
881 results[nf] = None
882 else: # does it match a missing directory?
882 else: # does it match a missing directory?
883 if self._map.hasdir(nf):
883 if self._map.hasdir(nf):
884 notfoundadd(nf)
884 notfoundadd(nf)
885 else:
885 else:
886 badfn(ff, encoding.strtolocal(inst.strerror))
886 badfn(ff, encoding.strtolocal(inst.strerror))
887
887
888 # match.files() may contain explicitly-specified paths that shouldn't
888 # match.files() may contain explicitly-specified paths that shouldn't
889 # be taken; drop them from the list of files found. dirsfound/notfound
889 # be taken; drop them from the list of files found. dirsfound/notfound
890 # aren't filtered here because they will be tested later.
890 # aren't filtered here because they will be tested later.
891 if match.anypats():
891 if match.anypats():
892 for f in list(results):
892 for f in list(results):
893 if f == b'.hg' or f in subrepos:
893 if f == b'.hg' or f in subrepos:
894 # keep sentinel to disable further out-of-repo walks
894 # keep sentinel to disable further out-of-repo walks
895 continue
895 continue
896 if not match(f):
896 if not match(f):
897 del results[f]
897 del results[f]
898
898
899 # Case insensitive filesystems cannot rely on lstat() failing to detect
899 # Case insensitive filesystems cannot rely on lstat() failing to detect
900 # a case-only rename. Prune the stat object for any file that does not
900 # a case-only rename. Prune the stat object for any file that does not
901 # match the case in the filesystem, if there are multiple files that
901 # match the case in the filesystem, if there are multiple files that
902 # normalize to the same path.
902 # normalize to the same path.
903 if match.isexact() and self._checkcase:
903 if match.isexact() and self._checkcase:
904 normed = {}
904 normed = {}
905
905
906 for f, st in pycompat.iteritems(results):
906 for f, st in pycompat.iteritems(results):
907 if st is None:
907 if st is None:
908 continue
908 continue
909
909
910 nc = util.normcase(f)
910 nc = util.normcase(f)
911 paths = normed.get(nc)
911 paths = normed.get(nc)
912
912
913 if paths is None:
913 if paths is None:
914 paths = set()
914 paths = set()
915 normed[nc] = paths
915 normed[nc] = paths
916
916
917 paths.add(f)
917 paths.add(f)
918
918
919 for norm, paths in pycompat.iteritems(normed):
919 for norm, paths in pycompat.iteritems(normed):
920 if len(paths) > 1:
920 if len(paths) > 1:
921 for path in paths:
921 for path in paths:
922 folded = self._discoverpath(
922 folded = self._discoverpath(
923 path, norm, True, None, self._map.dirfoldmap
923 path, norm, True, None, self._map.dirfoldmap
924 )
924 )
925 if path != folded:
925 if path != folded:
926 results[path] = None
926 results[path] = None
927
927
928 return results, dirsfound, dirsnotfound
928 return results, dirsfound, dirsnotfound
929
929
930 def walk(self, match, subrepos, unknown, ignored, full=True):
930 def walk(self, match, subrepos, unknown, ignored, full=True):
931 """
931 """
932 Walk recursively through the directory tree, finding all files
932 Walk recursively through the directory tree, finding all files
933 matched by match.
933 matched by match.
934
934
935 If full is False, maybe skip some known-clean files.
935 If full is False, maybe skip some known-clean files.
936
936
937 Return a dict mapping filename to stat-like object (either
937 Return a dict mapping filename to stat-like object (either
938 mercurial.osutil.stat instance or return value of os.stat()).
938 mercurial.osutil.stat instance or return value of os.stat()).
939
939
940 """
940 """
941 # full is a flag that extensions that hook into walk can use -- this
941 # full is a flag that extensions that hook into walk can use -- this
942 # implementation doesn't use it at all. This satisfies the contract
942 # implementation doesn't use it at all. This satisfies the contract
943 # because we only guarantee a "maybe".
943 # because we only guarantee a "maybe".
944
944
945 if ignored:
945 if ignored:
946 ignore = util.never
946 ignore = util.never
947 dirignore = util.never
947 dirignore = util.never
948 elif unknown:
948 elif unknown:
949 ignore = self._ignore
949 ignore = self._ignore
950 dirignore = self._dirignore
950 dirignore = self._dirignore
951 else:
951 else:
952 # if not unknown and not ignored, drop dir recursion and step 2
952 # if not unknown and not ignored, drop dir recursion and step 2
953 ignore = util.always
953 ignore = util.always
954 dirignore = util.always
954 dirignore = util.always
955
955
956 matchfn = match.matchfn
956 matchfn = match.matchfn
957 matchalways = match.always()
957 matchalways = match.always()
958 matchtdir = match.traversedir
958 matchtdir = match.traversedir
959 dmap = self._map
959 dmap = self._map
960 listdir = util.listdir
960 listdir = util.listdir
961 lstat = os.lstat
961 lstat = os.lstat
962 dirkind = stat.S_IFDIR
962 dirkind = stat.S_IFDIR
963 regkind = stat.S_IFREG
963 regkind = stat.S_IFREG
964 lnkkind = stat.S_IFLNK
964 lnkkind = stat.S_IFLNK
965 join = self._join
965 join = self._join
966
966
967 exact = skipstep3 = False
967 exact = skipstep3 = False
968 if match.isexact(): # match.exact
968 if match.isexact(): # match.exact
969 exact = True
969 exact = True
970 dirignore = util.always # skip step 2
970 dirignore = util.always # skip step 2
971 elif match.prefix(): # match.match, no patterns
971 elif match.prefix(): # match.match, no patterns
972 skipstep3 = True
972 skipstep3 = True
973
973
974 if not exact and self._checkcase:
974 if not exact and self._checkcase:
975 normalize = self._normalize
975 normalize = self._normalize
976 normalizefile = self._normalizefile
976 normalizefile = self._normalizefile
977 skipstep3 = False
977 skipstep3 = False
978 else:
978 else:
979 normalize = self._normalize
979 normalize = self._normalize
980 normalizefile = None
980 normalizefile = None
981
981
982 # step 1: find all explicit files
982 # step 1: find all explicit files
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
984 if matchtdir:
984 if matchtdir:
985 for d in work:
985 for d in work:
986 matchtdir(d[0])
986 matchtdir(d[0])
987 for d in dirsnotfound:
987 for d in dirsnotfound:
988 matchtdir(d)
988 matchtdir(d)
989
989
990 skipstep3 = skipstep3 and not (work or dirsnotfound)
990 skipstep3 = skipstep3 and not (work or dirsnotfound)
991 work = [d for d in work if not dirignore(d[0])]
991 work = [d for d in work if not dirignore(d[0])]
992
992
993 # step 2: visit subdirectories
993 # step 2: visit subdirectories
994 def traverse(work, alreadynormed):
994 def traverse(work, alreadynormed):
995 wadd = work.append
995 wadd = work.append
996 while work:
996 while work:
997 tracing.counter('dirstate.walk work', len(work))
997 tracing.counter('dirstate.walk work', len(work))
998 nd = work.pop()
998 nd = work.pop()
999 visitentries = match.visitchildrenset(nd)
999 visitentries = match.visitchildrenset(nd)
1000 if not visitentries:
1000 if not visitentries:
1001 continue
1001 continue
1002 if visitentries == b'this' or visitentries == b'all':
1002 if visitentries == b'this' or visitentries == b'all':
1003 visitentries = None
1003 visitentries = None
1004 skip = None
1004 skip = None
1005 if nd != b'':
1005 if nd != b'':
1006 skip = b'.hg'
1006 skip = b'.hg'
1007 try:
1007 try:
1008 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1008 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1009 entries = listdir(join(nd), stat=True, skip=skip)
1009 entries = listdir(join(nd), stat=True, skip=skip)
1010 except OSError as inst:
1010 except OSError as inst:
1011 if inst.errno in (errno.EACCES, errno.ENOENT):
1011 if inst.errno in (errno.EACCES, errno.ENOENT):
1012 match.bad(
1012 match.bad(
1013 self.pathto(nd), encoding.strtolocal(inst.strerror)
1013 self.pathto(nd), encoding.strtolocal(inst.strerror)
1014 )
1014 )
1015 continue
1015 continue
1016 raise
1016 raise
1017 for f, kind, st in entries:
1017 for f, kind, st in entries:
1018 # Some matchers may return files in the visitentries set,
1018 # Some matchers may return files in the visitentries set,
1019 # instead of 'this', if the matcher explicitly mentions them
1019 # instead of 'this', if the matcher explicitly mentions them
1020 # and is not an exactmatcher. This is acceptable; we do not
1020 # and is not an exactmatcher. This is acceptable; we do not
1021 # make any hard assumptions about file-or-directory below
1021 # make any hard assumptions about file-or-directory below
1022 # based on the presence of `f` in visitentries. If
1022 # based on the presence of `f` in visitentries. If
1023 # visitchildrenset returned a set, we can always skip the
1023 # visitchildrenset returned a set, we can always skip the
1024 # entries *not* in the set it provided regardless of whether
1024 # entries *not* in the set it provided regardless of whether
1025 # they're actually a file or a directory.
1025 # they're actually a file or a directory.
1026 if visitentries and f not in visitentries:
1026 if visitentries and f not in visitentries:
1027 continue
1027 continue
1028 if normalizefile:
1028 if normalizefile:
1029 # even though f might be a directory, we're only
1029 # even though f might be a directory, we're only
1030 # interested in comparing it to files currently in the
1030 # interested in comparing it to files currently in the
1031 # dmap -- therefore normalizefile is enough
1031 # dmap -- therefore normalizefile is enough
1032 nf = normalizefile(
1032 nf = normalizefile(
1033 nd and (nd + b"/" + f) or f, True, True
1033 nd and (nd + b"/" + f) or f, True, True
1034 )
1034 )
1035 else:
1035 else:
1036 nf = nd and (nd + b"/" + f) or f
1036 nf = nd and (nd + b"/" + f) or f
1037 if nf not in results:
1037 if nf not in results:
1038 if kind == dirkind:
1038 if kind == dirkind:
1039 if not ignore(nf):
1039 if not ignore(nf):
1040 if matchtdir:
1040 if matchtdir:
1041 matchtdir(nf)
1041 matchtdir(nf)
1042 wadd(nf)
1042 wadd(nf)
1043 if nf in dmap and (matchalways or matchfn(nf)):
1043 if nf in dmap and (matchalways or matchfn(nf)):
1044 results[nf] = None
1044 results[nf] = None
1045 elif kind == regkind or kind == lnkkind:
1045 elif kind == regkind or kind == lnkkind:
1046 if nf in dmap:
1046 if nf in dmap:
1047 if matchalways or matchfn(nf):
1047 if matchalways or matchfn(nf):
1048 results[nf] = st
1048 results[nf] = st
1049 elif (matchalways or matchfn(nf)) and not ignore(
1049 elif (matchalways or matchfn(nf)) and not ignore(
1050 nf
1050 nf
1051 ):
1051 ):
1052 # unknown file -- normalize if necessary
1052 # unknown file -- normalize if necessary
1053 if not alreadynormed:
1053 if not alreadynormed:
1054 nf = normalize(nf, False, True)
1054 nf = normalize(nf, False, True)
1055 results[nf] = st
1055 results[nf] = st
1056 elif nf in dmap and (matchalways or matchfn(nf)):
1056 elif nf in dmap and (matchalways or matchfn(nf)):
1057 results[nf] = None
1057 results[nf] = None
1058
1058
1059 for nd, d in work:
1059 for nd, d in work:
1060 # alreadynormed means that processwork doesn't have to do any
1060 # alreadynormed means that processwork doesn't have to do any
1061 # expensive directory normalization
1061 # expensive directory normalization
1062 alreadynormed = not normalize or nd == d
1062 alreadynormed = not normalize or nd == d
1063 traverse([d], alreadynormed)
1063 traverse([d], alreadynormed)
1064
1064
1065 for s in subrepos:
1065 for s in subrepos:
1066 del results[s]
1066 del results[s]
1067 del results[b'.hg']
1067 del results[b'.hg']
1068
1068
1069 # step 3: visit remaining files from dmap
1069 # step 3: visit remaining files from dmap
1070 if not skipstep3 and not exact:
1070 if not skipstep3 and not exact:
1071 # If a dmap file is not in results yet, it was either
1071 # If a dmap file is not in results yet, it was either
1072 # a) not matching matchfn b) ignored, c) missing, or d) under a
1072 # a) not matching matchfn b) ignored, c) missing, or d) under a
1073 # symlink directory.
1073 # symlink directory.
1074 if not results and matchalways:
1074 if not results and matchalways:
1075 visit = [f for f in dmap]
1075 visit = [f for f in dmap]
1076 else:
1076 else:
1077 visit = [f for f in dmap if f not in results and matchfn(f)]
1077 visit = [f for f in dmap if f not in results and matchfn(f)]
1078 visit.sort()
1078 visit.sort()
1079
1079
1080 if unknown:
1080 if unknown:
1081 # unknown == True means we walked all dirs under the roots
1081 # unknown == True means we walked all dirs under the roots
1082 # that wasn't ignored, and everything that matched was stat'ed
1082 # that wasn't ignored, and everything that matched was stat'ed
1083 # and is already in results.
1083 # and is already in results.
1084 # The rest must thus be ignored or under a symlink.
1084 # The rest must thus be ignored or under a symlink.
1085 audit_path = pathutil.pathauditor(self._root, cached=True)
1085 audit_path = pathutil.pathauditor(self._root, cached=True)
1086
1086
1087 for nf in iter(visit):
1087 for nf in iter(visit):
1088 # If a stat for the same file was already added with a
1088 # If a stat for the same file was already added with a
1089 # different case, don't add one for this, since that would
1089 # different case, don't add one for this, since that would
1090 # make it appear as if the file exists under both names
1090 # make it appear as if the file exists under both names
1091 # on disk.
1091 # on disk.
1092 if (
1092 if (
1093 normalizefile
1093 normalizefile
1094 and normalizefile(nf, True, True) in results
1094 and normalizefile(nf, True, True) in results
1095 ):
1095 ):
1096 results[nf] = None
1096 results[nf] = None
1097 # Report ignored items in the dmap as long as they are not
1097 # Report ignored items in the dmap as long as they are not
1098 # under a symlink directory.
1098 # under a symlink directory.
1099 elif audit_path.check(nf):
1099 elif audit_path.check(nf):
1100 try:
1100 try:
1101 results[nf] = lstat(join(nf))
1101 results[nf] = lstat(join(nf))
1102 # file was just ignored, no links, and exists
1102 # file was just ignored, no links, and exists
1103 except OSError:
1103 except OSError:
1104 # file doesn't exist
1104 # file doesn't exist
1105 results[nf] = None
1105 results[nf] = None
1106 else:
1106 else:
1107 # It's either missing or under a symlink directory
1107 # It's either missing or under a symlink directory
1108 # which we in this case report as missing
1108 # which we in this case report as missing
1109 results[nf] = None
1109 results[nf] = None
1110 else:
1110 else:
1111 # We may not have walked the full directory tree above,
1111 # We may not have walked the full directory tree above,
1112 # so stat and check everything we missed.
1112 # so stat and check everything we missed.
1113 iv = iter(visit)
1113 iv = iter(visit)
1114 for st in util.statfiles([join(i) for i in visit]):
1114 for st in util.statfiles([join(i) for i in visit]):
1115 results[next(iv)] = st
1115 results[next(iv)] = st
1116 return results
1116 return results
1117
1117
1118 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1118 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1119 # Force Rayon (Rust parallelism library) to respect the number of
1119 # Force Rayon (Rust parallelism library) to respect the number of
1120 # workers. This is a temporary workaround until Rust code knows
1120 # workers. This is a temporary workaround until Rust code knows
1121 # how to read the config file.
1121 # how to read the config file.
1122 numcpus = self._ui.configint(b"worker", b"numcpus")
1122 numcpus = self._ui.configint(b"worker", b"numcpus")
1123 if numcpus is not None:
1123 if numcpus is not None:
1124 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1124 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1125
1125
1126 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1126 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1127 if not workers_enabled:
1127 if not workers_enabled:
1128 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1128 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1129
1129
1130 (
1130 (
1131 lookup,
1131 lookup,
1132 modified,
1132 modified,
1133 added,
1133 added,
1134 removed,
1134 removed,
1135 deleted,
1135 deleted,
1136 clean,
1136 clean,
1137 ignored,
1137 ignored,
1138 unknown,
1138 unknown,
1139 warnings,
1139 warnings,
1140 bad,
1140 bad,
1141 traversed,
1141 traversed,
1142 dirty,
1142 dirty,
1143 ) = rustmod.status(
1143 ) = rustmod.status(
1144 self._map._rustmap,
1144 self._map._rustmap,
1145 matcher,
1145 matcher,
1146 self._rootdir,
1146 self._rootdir,
1147 self._ignorefiles(),
1147 self._ignorefiles(),
1148 self._checkexec,
1148 self._checkexec,
1149 self._lastnormaltime,
1149 self._lastnormaltime,
1150 bool(list_clean),
1150 bool(list_clean),
1151 bool(list_ignored),
1151 bool(list_ignored),
1152 bool(list_unknown),
1152 bool(list_unknown),
1153 bool(matcher.traversedir),
1153 bool(matcher.traversedir),
1154 )
1154 )
1155
1155
1156 self._dirty |= dirty
1156 self._dirty |= dirty
1157
1157
1158 if matcher.traversedir:
1158 if matcher.traversedir:
1159 for dir in traversed:
1159 for dir in traversed:
1160 matcher.traversedir(dir)
1160 matcher.traversedir(dir)
1161
1161
1162 if self._ui.warn:
1162 if self._ui.warn:
1163 for item in warnings:
1163 for item in warnings:
1164 if isinstance(item, tuple):
1164 if isinstance(item, tuple):
1165 file_path, syntax = item
1165 file_path, syntax = item
1166 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1166 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1167 file_path,
1167 file_path,
1168 syntax,
1168 syntax,
1169 )
1169 )
1170 self._ui.warn(msg)
1170 self._ui.warn(msg)
1171 else:
1171 else:
1172 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1172 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1173 self._ui.warn(
1173 self._ui.warn(
1174 msg
1174 msg
1175 % (
1175 % (
1176 pathutil.canonpath(
1176 pathutil.canonpath(
1177 self._rootdir, self._rootdir, item
1177 self._rootdir, self._rootdir, item
1178 ),
1178 ),
1179 b"No such file or directory",
1179 b"No such file or directory",
1180 )
1180 )
1181 )
1181 )
1182
1182
1183 for (fn, message) in bad:
1183 for (fn, message) in bad:
1184 matcher.bad(fn, encoding.strtolocal(message))
1184 matcher.bad(fn, encoding.strtolocal(message))
1185
1185
1186 status = scmutil.status(
1186 status = scmutil.status(
1187 modified=modified,
1187 modified=modified,
1188 added=added,
1188 added=added,
1189 removed=removed,
1189 removed=removed,
1190 deleted=deleted,
1190 deleted=deleted,
1191 unknown=unknown,
1191 unknown=unknown,
1192 ignored=ignored,
1192 ignored=ignored,
1193 clean=clean,
1193 clean=clean,
1194 )
1194 )
1195 return (lookup, status)
1195 return (lookup, status)
1196
1196
1197 def status(self, match, subrepos, ignored, clean, unknown):
1197 def status(self, match, subrepos, ignored, clean, unknown):
1198 """Determine the status of the working copy relative to the
1198 """Determine the status of the working copy relative to the
1199 dirstate and return a pair of (unsure, status), where status is of type
1199 dirstate and return a pair of (unsure, status), where status is of type
1200 scmutil.status and:
1200 scmutil.status and:
1201
1201
1202 unsure:
1202 unsure:
1203 files that might have been modified since the dirstate was
1203 files that might have been modified since the dirstate was
1204 written, but need to be read to be sure (size is the same
1204 written, but need to be read to be sure (size is the same
1205 but mtime differs)
1205 but mtime differs)
1206 status.modified:
1206 status.modified:
1207 files that have definitely been modified since the dirstate
1207 files that have definitely been modified since the dirstate
1208 was written (different size or mode)
1208 was written (different size or mode)
1209 status.clean:
1209 status.clean:
1210 files that have definitely not been modified since the
1210 files that have definitely not been modified since the
1211 dirstate was written
1211 dirstate was written
1212 """
1212 """
1213 listignored, listclean, listunknown = ignored, clean, unknown
1213 listignored, listclean, listunknown = ignored, clean, unknown
1214 lookup, modified, added, unknown, ignored = [], [], [], [], []
1214 lookup, modified, added, unknown, ignored = [], [], [], [], []
1215 removed, deleted, clean = [], [], []
1215 removed, deleted, clean = [], [], []
1216
1216
1217 dmap = self._map
1217 dmap = self._map
1218 dmap.preload()
1218 dmap.preload()
1219
1219
1220 use_rust = True
1220 use_rust = True
1221
1221
1222 allowed_matchers = (
1222 allowed_matchers = (
1223 matchmod.alwaysmatcher,
1223 matchmod.alwaysmatcher,
1224 matchmod.exactmatcher,
1224 matchmod.exactmatcher,
1225 matchmod.includematcher,
1225 matchmod.includematcher,
1226 )
1226 )
1227
1227
1228 if rustmod is None:
1228 if rustmod is None:
1229 use_rust = False
1229 use_rust = False
1230 elif self._checkcase:
1230 elif self._checkcase:
1231 # Case-insensitive filesystems are not handled yet
1231 # Case-insensitive filesystems are not handled yet
1232 use_rust = False
1232 use_rust = False
1233 elif subrepos:
1233 elif subrepos:
1234 use_rust = False
1234 use_rust = False
1235 elif sparse.enabled:
1235 elif sparse.enabled:
1236 use_rust = False
1236 use_rust = False
1237 elif not isinstance(match, allowed_matchers):
1237 elif not isinstance(match, allowed_matchers):
1238 # Some matchers have yet to be implemented
1238 # Some matchers have yet to be implemented
1239 use_rust = False
1239 use_rust = False
1240
1240
1241 if use_rust:
1241 if use_rust:
1242 try:
1242 try:
1243 return self._rust_status(
1243 return self._rust_status(
1244 match, listclean, listignored, listunknown
1244 match, listclean, listignored, listunknown
1245 )
1245 )
1246 except rustmod.FallbackError:
1246 except rustmod.FallbackError:
1247 pass
1247 pass
1248
1248
1249 def noop(f):
1249 def noop(f):
1250 pass
1250 pass
1251
1251
1252 dcontains = dmap.__contains__
1252 dcontains = dmap.__contains__
1253 dget = dmap.__getitem__
1253 dget = dmap.__getitem__
1254 ladd = lookup.append # aka "unsure"
1254 ladd = lookup.append # aka "unsure"
1255 madd = modified.append
1255 madd = modified.append
1256 aadd = added.append
1256 aadd = added.append
1257 uadd = unknown.append if listunknown else noop
1257 uadd = unknown.append if listunknown else noop
1258 iadd = ignored.append if listignored else noop
1258 iadd = ignored.append if listignored else noop
1259 radd = removed.append
1259 radd = removed.append
1260 dadd = deleted.append
1260 dadd = deleted.append
1261 cadd = clean.append if listclean else noop
1261 cadd = clean.append if listclean else noop
1262 mexact = match.exact
1262 mexact = match.exact
1263 dirignore = self._dirignore
1263 dirignore = self._dirignore
1264 checkexec = self._checkexec
1264 checkexec = self._checkexec
1265 copymap = self._map.copymap
1265 copymap = self._map.copymap
1266 lastnormaltime = self._lastnormaltime
1266 lastnormaltime = self._lastnormaltime
1267
1267
1268 # We need to do full walks when either
1268 # We need to do full walks when either
1269 # - we're listing all clean files, or
1269 # - we're listing all clean files, or
1270 # - match.traversedir does something, because match.traversedir should
1270 # - match.traversedir does something, because match.traversedir should
1271 # be called for every dir in the working dir
1271 # be called for every dir in the working dir
1272 full = listclean or match.traversedir is not None
1272 full = listclean or match.traversedir is not None
1273 for fn, st in pycompat.iteritems(
1273 for fn, st in pycompat.iteritems(
1274 self.walk(match, subrepos, listunknown, listignored, full=full)
1274 self.walk(match, subrepos, listunknown, listignored, full=full)
1275 ):
1275 ):
1276 if not dcontains(fn):
1276 if not dcontains(fn):
1277 if (listignored or mexact(fn)) and dirignore(fn):
1277 if (listignored or mexact(fn)) and dirignore(fn):
1278 if listignored:
1278 if listignored:
1279 iadd(fn)
1279 iadd(fn)
1280 else:
1280 else:
1281 uadd(fn)
1281 uadd(fn)
1282 continue
1282 continue
1283
1283
1284 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1284 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1285 # written like that for performance reasons. dmap[fn] is not a
1285 # written like that for performance reasons. dmap[fn] is not a
1286 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1286 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1287 # opcode has fast paths when the value to be unpacked is a tuple or
1287 # opcode has fast paths when the value to be unpacked is a tuple or
1288 # a list, but falls back to creating a full-fledged iterator in
1288 # a list, but falls back to creating a full-fledged iterator in
1289 # general. That is much slower than simply accessing and storing the
1289 # general. That is much slower than simply accessing and storing the
1290 # tuple members one by one.
1290 # tuple members one by one.
1291 t = dget(fn)
1291 t = dget(fn)
1292 state = t[0]
1292 state = t[0]
1293 mode = t[1]
1293 mode = t[1]
1294 size = t[2]
1294 size = t[2]
1295 time = t[3]
1295 time = t[3]
1296
1296
1297 if not st and state in b"nma":
1297 if not st and state in b"nma":
1298 dadd(fn)
1298 dadd(fn)
1299 elif state == b'n':
1299 elif state == b'n':
1300 if (
1300 if (
1301 size >= 0
1301 size >= 0
1302 and (
1302 and (
1303 (size != st.st_size and size != st.st_size & _rangemask)
1303 (size != st.st_size and size != st.st_size & _rangemask)
1304 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1304 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1305 )
1305 )
1306 or size == -2 # other parent
1306 or size == -2 # other parent
1307 or fn in copymap
1307 or fn in copymap
1308 ):
1308 ):
1309 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1309 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1310 # issue6456: Size returned may be longer due to
1310 # issue6456: Size returned may be longer due to
1311 # encryption on EXT-4 fscrypt, undecided.
1311 # encryption on EXT-4 fscrypt, undecided.
1312 ladd(fn)
1312 ladd(fn)
1313 else:
1313 else:
1314 madd(fn)
1314 madd(fn)
1315 elif (
1315 elif (
1316 time != st[stat.ST_MTIME]
1316 time != st[stat.ST_MTIME]
1317 and time != st[stat.ST_MTIME] & _rangemask
1317 and time != st[stat.ST_MTIME] & _rangemask
1318 ):
1318 ):
1319 ladd(fn)
1319 ladd(fn)
1320 elif st[stat.ST_MTIME] == lastnormaltime:
1320 elif st[stat.ST_MTIME] == lastnormaltime:
1321 # fn may have just been marked as normal and it may have
1321 # fn may have just been marked as normal and it may have
1322 # changed in the same second without changing its size.
1322 # changed in the same second without changing its size.
1323 # This can happen if we quickly do multiple commits.
1323 # This can happen if we quickly do multiple commits.
1324 # Force lookup, so we don't miss such a racy file change.
1324 # Force lookup, so we don't miss such a racy file change.
1325 ladd(fn)
1325 ladd(fn)
1326 elif listclean:
1326 elif listclean:
1327 cadd(fn)
1327 cadd(fn)
1328 elif state == b'm':
1328 elif state == b'm':
1329 madd(fn)
1329 madd(fn)
1330 elif state == b'a':
1330 elif state == b'a':
1331 aadd(fn)
1331 aadd(fn)
1332 elif state == b'r':
1332 elif state == b'r':
1333 radd(fn)
1333 radd(fn)
1334 status = scmutil.status(
1334 status = scmutil.status(
1335 modified, added, removed, deleted, unknown, ignored, clean
1335 modified, added, removed, deleted, unknown, ignored, clean
1336 )
1336 )
1337 return (lookup, status)
1337 return (lookup, status)
1338
1338
1339 def matches(self, match):
1339 def matches(self, match):
1340 """
1340 """
1341 return files in the dirstate (in whatever state) filtered by match
1341 return files in the dirstate (in whatever state) filtered by match
1342 """
1342 """
1343 dmap = self._map
1343 dmap = self._map
1344 if rustmod is not None:
1344 if rustmod is not None:
1345 dmap = self._map._rustmap
1345 dmap = self._map._rustmap
1346
1346
1347 if match.always():
1347 if match.always():
1348 return dmap.keys()
1348 return dmap.keys()
1349 files = match.files()
1349 files = match.files()
1350 if match.isexact():
1350 if match.isexact():
1351 # fast path -- filter the other way around, since typically files is
1351 # fast path -- filter the other way around, since typically files is
1352 # much smaller than dmap
1352 # much smaller than dmap
1353 return [f for f in files if f in dmap]
1353 return [f for f in files if f in dmap]
1354 if match.prefix() and all(fn in dmap for fn in files):
1354 if match.prefix() and all(fn in dmap for fn in files):
1355 # fast path -- all the values are known to be files, so just return
1355 # fast path -- all the values are known to be files, so just return
1356 # that
1356 # that
1357 return list(files)
1357 return list(files)
1358 return [f for f in dmap if match(f)]
1358 return [f for f in dmap if match(f)]
1359
1359
1360 def _actualfilename(self, tr):
1360 def _actualfilename(self, tr):
1361 if tr:
1361 if tr:
1362 return self._pendingfilename
1362 return self._pendingfilename
1363 else:
1363 else:
1364 return self._filename
1364 return self._filename
1365
1365
1366 def savebackup(self, tr, backupname):
1366 def savebackup(self, tr, backupname):
1367 '''Save current dirstate into backup file'''
1367 '''Save current dirstate into backup file'''
1368 filename = self._actualfilename(tr)
1368 filename = self._actualfilename(tr)
1369 assert backupname != filename
1369 assert backupname != filename
1370
1370
1371 # use '_writedirstate' instead of 'write' to write changes certainly,
1371 # use '_writedirstate' instead of 'write' to write changes certainly,
1372 # because the latter omits writing out if transaction is running.
1372 # because the latter omits writing out if transaction is running.
1373 # output file will be used to create backup of dirstate at this point.
1373 # output file will be used to create backup of dirstate at this point.
1374 if self._dirty or not self._opener.exists(filename):
1374 if self._dirty or not self._opener.exists(filename):
1375 self._writedirstate(
1375 self._writedirstate(
1376 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1376 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1377 )
1377 )
1378
1378
1379 if tr:
1379 if tr:
1380 # ensure that subsequent tr.writepending returns True for
1380 # ensure that subsequent tr.writepending returns True for
1381 # changes written out above, even if dirstate is never
1381 # changes written out above, even if dirstate is never
1382 # changed after this
1382 # changed after this
1383 tr.addfilegenerator(
1383 tr.addfilegenerator(
1384 b'dirstate',
1384 b'dirstate',
1385 (self._filename,),
1385 (self._filename,),
1386 self._writedirstate,
1386 self._writedirstate,
1387 location=b'plain',
1387 location=b'plain',
1388 )
1388 )
1389
1389
1390 # ensure that pending file written above is unlinked at
1390 # ensure that pending file written above is unlinked at
1391 # failure, even if tr.writepending isn't invoked until the
1391 # failure, even if tr.writepending isn't invoked until the
1392 # end of this transaction
1392 # end of this transaction
1393 tr.registertmp(filename, location=b'plain')
1393 tr.registertmp(filename, location=b'plain')
1394
1394
1395 self._opener.tryunlink(backupname)
1395 self._opener.tryunlink(backupname)
1396 # hardlink backup is okay because _writedirstate is always called
1396 # hardlink backup is okay because _writedirstate is always called
1397 # with an "atomictemp=True" file.
1397 # with an "atomictemp=True" file.
1398 util.copyfile(
1398 util.copyfile(
1399 self._opener.join(filename),
1399 self._opener.join(filename),
1400 self._opener.join(backupname),
1400 self._opener.join(backupname),
1401 hardlink=True,
1401 hardlink=True,
1402 )
1402 )
1403
1403
1404 def restorebackup(self, tr, backupname):
1404 def restorebackup(self, tr, backupname):
1405 '''Restore dirstate by backup file'''
1405 '''Restore dirstate by backup file'''
1406 # this "invalidate()" prevents "wlock.release()" from writing
1406 # this "invalidate()" prevents "wlock.release()" from writing
1407 # changes of dirstate out after restoring from backup file
1407 # changes of dirstate out after restoring from backup file
1408 self.invalidate()
1408 self.invalidate()
1409 filename = self._actualfilename(tr)
1409 filename = self._actualfilename(tr)
1410 o = self._opener
1410 o = self._opener
1411 if util.samefile(o.join(backupname), o.join(filename)):
1411 if util.samefile(o.join(backupname), o.join(filename)):
1412 o.unlink(backupname)
1412 o.unlink(backupname)
1413 else:
1413 else:
1414 o.rename(backupname, filename, checkambig=True)
1414 o.rename(backupname, filename, checkambig=True)
1415
1415
1416 def clearbackup(self, tr, backupname):
1416 def clearbackup(self, tr, backupname):
1417 '''Clear backup file'''
1417 '''Clear backup file'''
1418 self._opener.unlink(backupname)
1418 self._opener.unlink(backupname)
1419
1419
1420
1420
1421 class dirstatemap(object):
1421 class dirstatemap(object):
1422 """Map encapsulating the dirstate's contents.
1422 """Map encapsulating the dirstate's contents.
1423
1423
1424 The dirstate contains the following state:
1424 The dirstate contains the following state:
1425
1425
1426 - `identity` is the identity of the dirstate file, which can be used to
1426 - `identity` is the identity of the dirstate file, which can be used to
1427 detect when changes have occurred to the dirstate file.
1427 detect when changes have occurred to the dirstate file.
1428
1428
1429 - `parents` is a pair containing the parents of the working copy. The
1429 - `parents` is a pair containing the parents of the working copy. The
1430 parents are updated by calling `setparents`.
1430 parents are updated by calling `setparents`.
1431
1431
1432 - the state map maps filenames to tuples of (state, mode, size, mtime),
1432 - the state map maps filenames to tuples of (state, mode, size, mtime),
1433 where state is a single character representing 'normal', 'added',
1433 where state is a single character representing 'normal', 'added',
1434 'removed', or 'merged'. It is read by treating the dirstate as a
1434 'removed', or 'merged'. It is read by treating the dirstate as a
1435 dict. File state is updated by calling the `addfile`, `removefile` and
1435 dict. File state is updated by calling the `addfile`, `removefile` and
1436 `dropfile` methods.
1436 `dropfile` methods.
1437
1437
1438 - `copymap` maps destination filenames to their source filename.
1438 - `copymap` maps destination filenames to their source filename.
1439
1439
1440 The dirstate also provides the following views onto the state:
1440 The dirstate also provides the following views onto the state:
1441
1441
1442 - `nonnormalset` is a set of the filenames that have state other
1442 - `nonnormalset` is a set of the filenames that have state other
1443 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1443 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1444
1444
1445 - `otherparentset` is a set of the filenames that are marked as coming
1445 - `otherparentset` is a set of the filenames that are marked as coming
1446 from the second parent when the dirstate is currently being merged.
1446 from the second parent when the dirstate is currently being merged.
1447
1447
1448 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1448 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1449 form that they appear as in the dirstate.
1449 form that they appear as in the dirstate.
1450
1450
1451 - `dirfoldmap` is a dict mapping normalized directory names to the
1451 - `dirfoldmap` is a dict mapping normalized directory names to the
1452 denormalized form that they appear as in the dirstate.
1452 denormalized form that they appear as in the dirstate.
1453 """
1453 """
1454
1454
1455 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1455 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1456 self._ui = ui
1456 self._ui = ui
1457 self._opener = opener
1457 self._opener = opener
1458 self._root = root
1458 self._root = root
1459 self._filename = b'dirstate'
1459 self._filename = b'dirstate'
1460 self._nodelen = 20
1460 self._nodelen = 20
1461 self._nodeconstants = nodeconstants
1461 self._nodeconstants = nodeconstants
1462 assert (
1462 assert (
1463 not use_dirstate_v2
1463 not use_dirstate_v2
1464 ), "should have detected unsupported requirement"
1464 ), "should have detected unsupported requirement"
1465
1465
1466 self._parents = None
1466 self._parents = None
1467 self._dirtyparents = False
1467 self._dirtyparents = False
1468
1468
1469 # for consistent view between _pl() and _read() invocations
1469 # for consistent view between _pl() and _read() invocations
1470 self._pendingmode = None
1470 self._pendingmode = None
1471
1471
1472 @propertycache
1472 @propertycache
1473 def _map(self):
1473 def _map(self):
1474 self._map = {}
1474 self._map = {}
1475 self.read()
1475 self.read()
1476 return self._map
1476 return self._map
1477
1477
1478 @propertycache
1478 @propertycache
1479 def copymap(self):
1479 def copymap(self):
1480 self.copymap = {}
1480 self.copymap = {}
1481 self._map
1481 self._map
1482 return self.copymap
1482 return self.copymap
1483
1483
1484 def directories(self):
1484 def directories(self):
1485 # Rust / dirstate-v2 only
1485 # Rust / dirstate-v2 only
1486 return []
1486 return []
1487
1487
1488 def clear(self):
1488 def clear(self):
1489 self._map.clear()
1489 self._map.clear()
1490 self.copymap.clear()
1490 self.copymap.clear()
1491 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1491 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1492 util.clearcachedproperty(self, b"_dirs")
1492 util.clearcachedproperty(self, b"_dirs")
1493 util.clearcachedproperty(self, b"_alldirs")
1493 util.clearcachedproperty(self, b"_alldirs")
1494 util.clearcachedproperty(self, b"filefoldmap")
1494 util.clearcachedproperty(self, b"filefoldmap")
1495 util.clearcachedproperty(self, b"dirfoldmap")
1495 util.clearcachedproperty(self, b"dirfoldmap")
1496 util.clearcachedproperty(self, b"nonnormalset")
1496 util.clearcachedproperty(self, b"nonnormalset")
1497 util.clearcachedproperty(self, b"otherparentset")
1497 util.clearcachedproperty(self, b"otherparentset")
1498
1498
1499 def items(self):
1499 def items(self):
1500 return pycompat.iteritems(self._map)
1500 return pycompat.iteritems(self._map)
1501
1501
1502 # forward for python2,3 compat
1502 # forward for python2,3 compat
1503 iteritems = items
1503 iteritems = items
1504
1504
1505 def __len__(self):
1505 def __len__(self):
1506 return len(self._map)
1506 return len(self._map)
1507
1507
1508 def __iter__(self):
1508 def __iter__(self):
1509 return iter(self._map)
1509 return iter(self._map)
1510
1510
1511 def get(self, key, default=None):
1511 def get(self, key, default=None):
1512 return self._map.get(key, default)
1512 return self._map.get(key, default)
1513
1513
1514 def __contains__(self, key):
1514 def __contains__(self, key):
1515 return key in self._map
1515 return key in self._map
1516
1516
1517 def __getitem__(self, key):
1517 def __getitem__(self, key):
1518 return self._map[key]
1518 return self._map[key]
1519
1519
1520 def keys(self):
1520 def keys(self):
1521 return self._map.keys()
1521 return self._map.keys()
1522
1522
1523 def preload(self):
1523 def preload(self):
1524 """Loads the underlying data, if it's not already loaded"""
1524 """Loads the underlying data, if it's not already loaded"""
1525 self._map
1525 self._map
1526
1526
1527 def addfile(self, f, oldstate, state, mode, size, mtime):
1527 def addfile(self, f, oldstate, state, mode, size, mtime):
1528 """Add a tracked file to the dirstate."""
1528 """Add a tracked file to the dirstate."""
1529 if oldstate in b"?r" and "_dirs" in self.__dict__:
1529 if oldstate in b"?r" and "_dirs" in self.__dict__:
1530 self._dirs.addpath(f)
1530 self._dirs.addpath(f)
1531 if oldstate == b"?" and "_alldirs" in self.__dict__:
1531 if oldstate == b"?" and "_alldirs" in self.__dict__:
1532 self._alldirs.addpath(f)
1532 self._alldirs.addpath(f)
1533 self._map[f] = dirstatetuple(state, mode, size, mtime)
1533 self._map[f] = dirstatetuple(state, mode, size, mtime)
1534 if state != b'n' or mtime == -1:
1534 if state != b'n' or mtime == -1:
1535 self.nonnormalset.add(f)
1535 self.nonnormalset.add(f)
1536 if size == -2:
1536 if size == -2:
1537 self.otherparentset.add(f)
1537 self.otherparentset.add(f)
1538
1538
1539 def removefile(self, f, oldstate, size):
1539 def removefile(self, f, oldstate, size):
1540 """
1540 """
1541 Mark a file as removed in the dirstate.
1541 Mark a file as removed in the dirstate.
1542
1542
1543 The `size` parameter is used to store sentinel values that indicate
1543 The `size` parameter is used to store sentinel values that indicate
1544 the file's previous state. In the future, we should refactor this
1544 the file's previous state. In the future, we should refactor this
1545 to be more explicit about what that state is.
1545 to be more explicit about what that state is.
1546 """
1546 """
1547 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1547 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1548 self._dirs.delpath(f)
1548 self._dirs.delpath(f)
1549 if oldstate == b"?" and "_alldirs" in self.__dict__:
1549 if oldstate == b"?" and "_alldirs" in self.__dict__:
1550 self._alldirs.addpath(f)
1550 self._alldirs.addpath(f)
1551 if "filefoldmap" in self.__dict__:
1551 if "filefoldmap" in self.__dict__:
1552 normed = util.normcase(f)
1552 normed = util.normcase(f)
1553 self.filefoldmap.pop(normed, None)
1553 self.filefoldmap.pop(normed, None)
1554 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1554 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1555 self.nonnormalset.add(f)
1555 self.nonnormalset.add(f)
1556
1556
1557 def dropfile(self, f, oldstate):
1557 def dropfile(self, f, oldstate):
1558 """
1558 """
1559 Remove a file from the dirstate. Returns True if the file was
1559 Remove a file from the dirstate. Returns True if the file was
1560 previously recorded.
1560 previously recorded.
1561 """
1561 """
1562 exists = self._map.pop(f, None) is not None
1562 exists = self._map.pop(f, None) is not None
1563 if exists:
1563 if exists:
1564 if oldstate != b"r" and "_dirs" in self.__dict__:
1564 if oldstate != b"r" and "_dirs" in self.__dict__:
1565 self._dirs.delpath(f)
1565 self._dirs.delpath(f)
1566 if "_alldirs" in self.__dict__:
1566 if "_alldirs" in self.__dict__:
1567 self._alldirs.delpath(f)
1567 self._alldirs.delpath(f)
1568 if "filefoldmap" in self.__dict__:
1568 if "filefoldmap" in self.__dict__:
1569 normed = util.normcase(f)
1569 normed = util.normcase(f)
1570 self.filefoldmap.pop(normed, None)
1570 self.filefoldmap.pop(normed, None)
1571 self.nonnormalset.discard(f)
1571 self.nonnormalset.discard(f)
1572 return exists
1572 return exists
1573
1573
1574 def clearambiguoustimes(self, files, now):
1574 def clearambiguoustimes(self, files, now):
1575 for f in files:
1575 for f in files:
1576 e = self.get(f)
1576 e = self.get(f)
1577 if e is not None and e[0] == b'n' and e[3] == now:
1577 if e is not None and e[0] == b'n' and e[3] == now:
1578 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1578 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1579 self.nonnormalset.add(f)
1579 self.nonnormalset.add(f)
1580
1580
1581 def nonnormalentries(self):
1581 def nonnormalentries(self):
1582 '''Compute the nonnormal dirstate entries from the dmap'''
1582 '''Compute the nonnormal dirstate entries from the dmap'''
1583 try:
1583 try:
1584 return parsers.nonnormalotherparententries(self._map)
1584 return parsers.nonnormalotherparententries(self._map)
1585 except AttributeError:
1585 except AttributeError:
1586 nonnorm = set()
1586 nonnorm = set()
1587 otherparent = set()
1587 otherparent = set()
1588 for fname, e in pycompat.iteritems(self._map):
1588 for fname, e in pycompat.iteritems(self._map):
1589 if e[0] != b'n' or e[3] == -1:
1589 if e[0] != b'n' or e[3] == -1:
1590 nonnorm.add(fname)
1590 nonnorm.add(fname)
1591 if e[0] == b'n' and e[2] == -2:
1591 if e[0] == b'n' and e[2] == -2:
1592 otherparent.add(fname)
1592 otherparent.add(fname)
1593 return nonnorm, otherparent
1593 return nonnorm, otherparent
1594
1594
1595 @propertycache
1595 @propertycache
1596 def filefoldmap(self):
1596 def filefoldmap(self):
1597 """Returns a dictionary mapping normalized case paths to their
1597 """Returns a dictionary mapping normalized case paths to their
1598 non-normalized versions.
1598 non-normalized versions.
1599 """
1599 """
1600 try:
1600 try:
1601 makefilefoldmap = parsers.make_file_foldmap
1601 makefilefoldmap = parsers.make_file_foldmap
1602 except AttributeError:
1602 except AttributeError:
1603 pass
1603 pass
1604 else:
1604 else:
1605 return makefilefoldmap(
1605 return makefilefoldmap(
1606 self._map, util.normcasespec, util.normcasefallback
1606 self._map, util.normcasespec, util.normcasefallback
1607 )
1607 )
1608
1608
1609 f = {}
1609 f = {}
1610 normcase = util.normcase
1610 normcase = util.normcase
1611 for name, s in pycompat.iteritems(self._map):
1611 for name, s in pycompat.iteritems(self._map):
1612 if s[0] != b'r':
1612 if s[0] != b'r':
1613 f[normcase(name)] = name
1613 f[normcase(name)] = name
1614 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1614 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1615 return f
1615 return f
1616
1616
1617 def hastrackeddir(self, d):
1617 def hastrackeddir(self, d):
1618 """
1618 """
1619 Returns True if the dirstate contains a tracked (not removed) file
1619 Returns True if the dirstate contains a tracked (not removed) file
1620 in this directory.
1620 in this directory.
1621 """
1621 """
1622 return d in self._dirs
1622 return d in self._dirs
1623
1623
1624 def hasdir(self, d):
1624 def hasdir(self, d):
1625 """
1625 """
1626 Returns True if the dirstate contains a file (tracked or removed)
1626 Returns True if the dirstate contains a file (tracked or removed)
1627 in this directory.
1627 in this directory.
1628 """
1628 """
1629 return d in self._alldirs
1629 return d in self._alldirs
1630
1630
1631 @propertycache
1631 @propertycache
1632 def _dirs(self):
1632 def _dirs(self):
1633 return pathutil.dirs(self._map, b'r')
1633 return pathutil.dirs(self._map, b'r')
1634
1634
1635 @propertycache
1635 @propertycache
1636 def _alldirs(self):
1636 def _alldirs(self):
1637 return pathutil.dirs(self._map)
1637 return pathutil.dirs(self._map)
1638
1638
1639 def _opendirstatefile(self):
1639 def _opendirstatefile(self):
1640 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1640 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1641 if self._pendingmode is not None and self._pendingmode != mode:
1641 if self._pendingmode is not None and self._pendingmode != mode:
1642 fp.close()
1642 fp.close()
1643 raise error.Abort(
1643 raise error.Abort(
1644 _(b'working directory state may be changed parallelly')
1644 _(b'working directory state may be changed parallelly')
1645 )
1645 )
1646 self._pendingmode = mode
1646 self._pendingmode = mode
1647 return fp
1647 return fp
1648
1648
1649 def parents(self):
1649 def parents(self):
1650 if not self._parents:
1650 if not self._parents:
1651 try:
1651 try:
1652 fp = self._opendirstatefile()
1652 fp = self._opendirstatefile()
1653 st = fp.read(2 * self._nodelen)
1653 st = fp.read(2 * self._nodelen)
1654 fp.close()
1654 fp.close()
1655 except IOError as err:
1655 except IOError as err:
1656 if err.errno != errno.ENOENT:
1656 if err.errno != errno.ENOENT:
1657 raise
1657 raise
1658 # File doesn't exist, so the current state is empty
1658 # File doesn't exist, so the current state is empty
1659 st = b''
1659 st = b''
1660
1660
1661 l = len(st)
1661 l = len(st)
1662 if l == self._nodelen * 2:
1662 if l == self._nodelen * 2:
1663 self._parents = (
1663 self._parents = (
1664 st[: self._nodelen],
1664 st[: self._nodelen],
1665 st[self._nodelen : 2 * self._nodelen],
1665 st[self._nodelen : 2 * self._nodelen],
1666 )
1666 )
1667 elif l == 0:
1667 elif l == 0:
1668 self._parents = (
1668 self._parents = (
1669 self._nodeconstants.nullid,
1669 self._nodeconstants.nullid,
1670 self._nodeconstants.nullid,
1670 self._nodeconstants.nullid,
1671 )
1671 )
1672 else:
1672 else:
1673 raise error.Abort(
1673 raise error.Abort(
1674 _(b'working directory state appears damaged!')
1674 _(b'working directory state appears damaged!')
1675 )
1675 )
1676
1676
1677 return self._parents
1677 return self._parents
1678
1678
1679 def setparents(self, p1, p2):
1679 def setparents(self, p1, p2):
1680 self._parents = (p1, p2)
1680 self._parents = (p1, p2)
1681 self._dirtyparents = True
1681 self._dirtyparents = True
1682
1682
1683 def read(self):
1683 def read(self):
1684 # ignore HG_PENDING because identity is used only for writing
1684 # ignore HG_PENDING because identity is used only for writing
1685 self.identity = util.filestat.frompath(
1685 self.identity = util.filestat.frompath(
1686 self._opener.join(self._filename)
1686 self._opener.join(self._filename)
1687 )
1687 )
1688
1688
1689 try:
1689 try:
1690 fp = self._opendirstatefile()
1690 fp = self._opendirstatefile()
1691 try:
1691 try:
1692 st = fp.read()
1692 st = fp.read()
1693 finally:
1693 finally:
1694 fp.close()
1694 fp.close()
1695 except IOError as err:
1695 except IOError as err:
1696 if err.errno != errno.ENOENT:
1696 if err.errno != errno.ENOENT:
1697 raise
1697 raise
1698 return
1698 return
1699 if not st:
1699 if not st:
1700 return
1700 return
1701
1701
1702 if util.safehasattr(parsers, b'dict_new_presized'):
1702 if util.safehasattr(parsers, b'dict_new_presized'):
1703 # Make an estimate of the number of files in the dirstate based on
1703 # Make an estimate of the number of files in the dirstate based on
1704 # its size. This trades wasting some memory for avoiding costly
1704 # its size. This trades wasting some memory for avoiding costly
1705 # resizes. Each entry have a prefix of 17 bytes followed by one or
1705 # resizes. Each entry have a prefix of 17 bytes followed by one or
1706 # two path names. Studies on various large-scale real-world repositories
1706 # two path names. Studies on various large-scale real-world repositories
1707 # found 54 bytes a reasonable upper limit for the average path names.
1707 # found 54 bytes a reasonable upper limit for the average path names.
1708 # Copy entries are ignored for the sake of this estimate.
1708 # Copy entries are ignored for the sake of this estimate.
1709 self._map = parsers.dict_new_presized(len(st) // 71)
1709 self._map = parsers.dict_new_presized(len(st) // 71)
1710
1710
1711 # Python's garbage collector triggers a GC each time a certain number
1711 # Python's garbage collector triggers a GC each time a certain number
1712 # of container objects (the number being defined by
1712 # of container objects (the number being defined by
1713 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1713 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1714 # for each file in the dirstate. The C version then immediately marks
1714 # for each file in the dirstate. The C version then immediately marks
1715 # them as not to be tracked by the collector. However, this has no
1715 # them as not to be tracked by the collector. However, this has no
1716 # effect on when GCs are triggered, only on what objects the GC looks
1716 # effect on when GCs are triggered, only on what objects the GC looks
1717 # into. This means that O(number of files) GCs are unavoidable.
1717 # into. This means that O(number of files) GCs are unavoidable.
1718 # Depending on when in the process's lifetime the dirstate is parsed,
1718 # Depending on when in the process's lifetime the dirstate is parsed,
1719 # this can get very expensive. As a workaround, disable GC while
1719 # this can get very expensive. As a workaround, disable GC while
1720 # parsing the dirstate.
1720 # parsing the dirstate.
1721 #
1721 #
1722 # (we cannot decorate the function directly since it is in a C module)
1722 # (we cannot decorate the function directly since it is in a C module)
1723 parse_dirstate = util.nogc(parsers.parse_dirstate)
1723 parse_dirstate = util.nogc(parsers.parse_dirstate)
1724 p = parse_dirstate(self._map, self.copymap, st)
1724 p = parse_dirstate(self._map, self.copymap, st)
1725 if not self._dirtyparents:
1725 if not self._dirtyparents:
1726 self.setparents(*p)
1726 self.setparents(*p)
1727
1727
1728 # Avoid excess attribute lookups by fast pathing certain checks
1728 # Avoid excess attribute lookups by fast pathing certain checks
1729 self.__contains__ = self._map.__contains__
1729 self.__contains__ = self._map.__contains__
1730 self.__getitem__ = self._map.__getitem__
1730 self.__getitem__ = self._map.__getitem__
1731 self.get = self._map.get
1731 self.get = self._map.get
1732
1732
1733 def write(self, st, now):
1733 def write(self, st, now):
1734 st.write(
1734 st.write(
1735 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1735 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1736 )
1736 )
1737 st.close()
1737 st.close()
1738 self._dirtyparents = False
1738 self._dirtyparents = False
1739 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1739 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1740
1740
1741 @propertycache
1741 @propertycache
1742 def nonnormalset(self):
1742 def nonnormalset(self):
1743 nonnorm, otherparents = self.nonnormalentries()
1743 nonnorm, otherparents = self.nonnormalentries()
1744 self.otherparentset = otherparents
1744 self.otherparentset = otherparents
1745 return nonnorm
1745 return nonnorm
1746
1746
1747 @propertycache
1747 @propertycache
1748 def otherparentset(self):
1748 def otherparentset(self):
1749 nonnorm, otherparents = self.nonnormalentries()
1749 nonnorm, otherparents = self.nonnormalentries()
1750 self.nonnormalset = nonnorm
1750 self.nonnormalset = nonnorm
1751 return otherparents
1751 return otherparents
1752
1752
1753 def non_normal_or_other_parent_paths(self):
1753 def non_normal_or_other_parent_paths(self):
1754 return self.nonnormalset.union(self.otherparentset)
1754 return self.nonnormalset.union(self.otherparentset)
1755
1755
1756 @propertycache
1756 @propertycache
1757 def identity(self):
1757 def identity(self):
1758 self._map
1758 self._map
1759 return self.identity
1759 return self.identity
1760
1760
1761 @propertycache
1761 @propertycache
1762 def dirfoldmap(self):
1762 def dirfoldmap(self):
1763 f = {}
1763 f = {}
1764 normcase = util.normcase
1764 normcase = util.normcase
1765 for name in self._dirs:
1765 for name in self._dirs:
1766 f[normcase(name)] = name
1766 f[normcase(name)] = name
1767 return f
1767 return f
1768
1768
1769
1769
1770 if rustmod is not None:
1770 if rustmod is not None:
1771
1771
1772 class dirstatemap(object):
1772 class dirstatemap(object):
1773 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1773 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1774 self._use_dirstate_v2 = use_dirstate_v2
1774 self._use_dirstate_v2 = use_dirstate_v2
1775 self._nodeconstants = nodeconstants
1775 self._nodeconstants = nodeconstants
1776 self._ui = ui
1776 self._ui = ui
1777 self._opener = opener
1777 self._opener = opener
1778 self._root = root
1778 self._root = root
1779 self._filename = b'dirstate'
1779 self._filename = b'dirstate'
1780 self._nodelen = 20 # Also update Rust code when changing this!
1780 self._nodelen = 20 # Also update Rust code when changing this!
1781 self._parents = None
1781 self._parents = None
1782 self._dirtyparents = False
1782 self._dirtyparents = False
1783
1783
1784 # for consistent view between _pl() and _read() invocations
1784 # for consistent view between _pl() and _read() invocations
1785 self._pendingmode = None
1785 self._pendingmode = None
1786
1786
1787 self._use_dirstate_tree = self._ui.configbool(
1787 self._use_dirstate_tree = self._ui.configbool(
1788 b"experimental",
1788 b"experimental",
1789 b"dirstate-tree.in-memory",
1789 b"dirstate-tree.in-memory",
1790 False,
1790 False,
1791 )
1791 )
1792
1792
1793 def addfile(self, *args, **kwargs):
1793 def addfile(self, *args, **kwargs):
1794 return self._rustmap.addfile(*args, **kwargs)
1794 return self._rustmap.addfile(*args, **kwargs)
1795
1795
1796 def removefile(self, *args, **kwargs):
1796 def removefile(self, *args, **kwargs):
1797 return self._rustmap.removefile(*args, **kwargs)
1797 return self._rustmap.removefile(*args, **kwargs)
1798
1798
1799 def dropfile(self, *args, **kwargs):
1799 def dropfile(self, *args, **kwargs):
1800 return self._rustmap.dropfile(*args, **kwargs)
1800 return self._rustmap.dropfile(*args, **kwargs)
1801
1801
1802 def clearambiguoustimes(self, *args, **kwargs):
1802 def clearambiguoustimes(self, *args, **kwargs):
1803 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1803 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1804
1804
1805 def nonnormalentries(self):
1805 def nonnormalentries(self):
1806 return self._rustmap.nonnormalentries()
1806 return self._rustmap.nonnormalentries()
1807
1807
1808 def get(self, *args, **kwargs):
1808 def get(self, *args, **kwargs):
1809 return self._rustmap.get(*args, **kwargs)
1809 return self._rustmap.get(*args, **kwargs)
1810
1810
1811 @property
1811 @property
1812 def copymap(self):
1812 def copymap(self):
1813 return self._rustmap.copymap()
1813 return self._rustmap.copymap()
1814
1814
1815 def directories(self):
1815 def directories(self):
1816 return self._rustmap.directories()
1816 return self._rustmap.directories()
1817
1817
1818 def preload(self):
1818 def preload(self):
1819 self._rustmap
1819 self._rustmap
1820
1820
1821 def clear(self):
1821 def clear(self):
1822 self._rustmap.clear()
1822 self._rustmap.clear()
1823 self.setparents(
1823 self.setparents(
1824 self._nodeconstants.nullid, self._nodeconstants.nullid
1824 self._nodeconstants.nullid, self._nodeconstants.nullid
1825 )
1825 )
1826 util.clearcachedproperty(self, b"_dirs")
1826 util.clearcachedproperty(self, b"_dirs")
1827 util.clearcachedproperty(self, b"_alldirs")
1827 util.clearcachedproperty(self, b"_alldirs")
1828 util.clearcachedproperty(self, b"dirfoldmap")
1828 util.clearcachedproperty(self, b"dirfoldmap")
1829
1829
1830 def items(self):
1830 def items(self):
1831 return self._rustmap.items()
1831 return self._rustmap.items()
1832
1832
1833 def keys(self):
1833 def keys(self):
1834 return iter(self._rustmap)
1834 return iter(self._rustmap)
1835
1835
1836 def __contains__(self, key):
1836 def __contains__(self, key):
1837 return key in self._rustmap
1837 return key in self._rustmap
1838
1838
1839 def __getitem__(self, item):
1839 def __getitem__(self, item):
1840 return self._rustmap[item]
1840 return self._rustmap[item]
1841
1841
1842 def __len__(self):
1842 def __len__(self):
1843 return len(self._rustmap)
1843 return len(self._rustmap)
1844
1844
1845 def __iter__(self):
1845 def __iter__(self):
1846 return iter(self._rustmap)
1846 return iter(self._rustmap)
1847
1847
1848 # forward for python2,3 compat
1848 # forward for python2,3 compat
1849 iteritems = items
1849 iteritems = items
1850
1850
1851 def _opendirstatefile(self):
1851 def _opendirstatefile(self):
1852 fp, mode = txnutil.trypending(
1852 fp, mode = txnutil.trypending(
1853 self._root, self._opener, self._filename
1853 self._root, self._opener, self._filename
1854 )
1854 )
1855 if self._pendingmode is not None and self._pendingmode != mode:
1855 if self._pendingmode is not None and self._pendingmode != mode:
1856 fp.close()
1856 fp.close()
1857 raise error.Abort(
1857 raise error.Abort(
1858 _(b'working directory state may be changed parallelly')
1858 _(b'working directory state may be changed parallelly')
1859 )
1859 )
1860 self._pendingmode = mode
1860 self._pendingmode = mode
1861 return fp
1861 return fp
1862
1862
1863 def setparents(self, p1, p2):
1863 def setparents(self, p1, p2):
1864 self._parents = (p1, p2)
1864 self._parents = (p1, p2)
1865 self._dirtyparents = True
1865 self._dirtyparents = True
1866
1866
1867 def parents(self):
1867 def parents(self):
1868 if not self._parents:
1868 if not self._parents:
1869 if self._use_dirstate_v2:
1869 if self._use_dirstate_v2:
1870 offset = len(rustmod.V2_FORMAT_MARKER)
1870 offset = len(rustmod.V2_FORMAT_MARKER)
1871 else:
1871 else:
1872 offset = 0
1872 offset = 0
1873 read_len = offset + self._nodelen * 2
1873 read_len = offset + self._nodelen * 2
1874 try:
1874 try:
1875 fp = self._opendirstatefile()
1875 fp = self._opendirstatefile()
1876 st = fp.read(read_len)
1876 st = fp.read(read_len)
1877 fp.close()
1877 fp.close()
1878 except IOError as err:
1878 except IOError as err:
1879 if err.errno != errno.ENOENT:
1879 if err.errno != errno.ENOENT:
1880 raise
1880 raise
1881 # File doesn't exist, so the current state is empty
1881 # File doesn't exist, so the current state is empty
1882 st = b''
1882 st = b''
1883
1883
1884 l = len(st)
1884 l = len(st)
1885 if l == read_len:
1885 if l == read_len:
1886 st = st[offset:]
1886 st = st[offset:]
1887 self._parents = (
1887 self._parents = (
1888 st[: self._nodelen],
1888 st[: self._nodelen],
1889 st[self._nodelen : 2 * self._nodelen],
1889 st[self._nodelen : 2 * self._nodelen],
1890 )
1890 )
1891 elif l == 0:
1891 elif l == 0:
1892 self._parents = (
1892 self._parents = (
1893 self._nodeconstants.nullid,
1893 self._nodeconstants.nullid,
1894 self._nodeconstants.nullid,
1894 self._nodeconstants.nullid,
1895 )
1895 )
1896 else:
1896 else:
1897 raise error.Abort(
1897 raise error.Abort(
1898 _(b'working directory state appears damaged!')
1898 _(b'working directory state appears damaged!')
1899 )
1899 )
1900
1900
1901 return self._parents
1901 return self._parents
1902
1902
1903 @propertycache
1903 @propertycache
1904 def _rustmap(self):
1904 def _rustmap(self):
1905 """
1905 """
1906 Fills the Dirstatemap when called.
1906 Fills the Dirstatemap when called.
1907 """
1907 """
1908 # ignore HG_PENDING because identity is used only for writing
1908 # ignore HG_PENDING because identity is used only for writing
1909 self.identity = util.filestat.frompath(
1909 self.identity = util.filestat.frompath(
1910 self._opener.join(self._filename)
1910 self._opener.join(self._filename)
1911 )
1911 )
1912
1912
1913 try:
1913 try:
1914 fp = self._opendirstatefile()
1914 fp = self._opendirstatefile()
1915 try:
1915 try:
1916 st = fp.read()
1916 st = fp.read()
1917 finally:
1917 finally:
1918 fp.close()
1918 fp.close()
1919 except IOError as err:
1919 except IOError as err:
1920 if err.errno != errno.ENOENT:
1920 if err.errno != errno.ENOENT:
1921 raise
1921 raise
1922 st = b''
1922 st = b''
1923
1923
1924 self._rustmap, parents = rustmod.DirstateMap.new(
1924 self._rustmap, parents = rustmod.DirstateMap.new(
1925 self._use_dirstate_tree, self._use_dirstate_v2, st
1925 self._use_dirstate_tree, self._use_dirstate_v2, st
1926 )
1926 )
1927
1927
1928 if parents and not self._dirtyparents:
1928 if parents and not self._dirtyparents:
1929 self.setparents(*parents)
1929 self.setparents(*parents)
1930
1930
1931 self.__contains__ = self._rustmap.__contains__
1931 self.__contains__ = self._rustmap.__contains__
1932 self.__getitem__ = self._rustmap.__getitem__
1932 self.__getitem__ = self._rustmap.__getitem__
1933 self.get = self._rustmap.get
1933 self.get = self._rustmap.get
1934 return self._rustmap
1934 return self._rustmap
1935
1935
1936 def write(self, st, now):
1936 def write(self, st, now):
1937 parents = self.parents()
1937 parents = self.parents()
1938 packed = self._rustmap.write(
1938 packed = self._rustmap.write(
1939 self._use_dirstate_v2, parents[0], parents[1], now
1939 self._use_dirstate_v2, parents[0], parents[1], now
1940 )
1940 )
1941 st.write(packed)
1941 st.write(packed)
1942 st.close()
1942 st.close()
1943 self._dirtyparents = False
1943 self._dirtyparents = False
1944
1944
1945 @propertycache
1945 @propertycache
1946 def filefoldmap(self):
1946 def filefoldmap(self):
1947 """Returns a dictionary mapping normalized case paths to their
1947 """Returns a dictionary mapping normalized case paths to their
1948 non-normalized versions.
1948 non-normalized versions.
1949 """
1949 """
1950 return self._rustmap.filefoldmapasdict()
1950 return self._rustmap.filefoldmapasdict()
1951
1951
1952 def hastrackeddir(self, d):
1952 def hastrackeddir(self, d):
1953 return self._rustmap.hastrackeddir(d)
1953 return self._rustmap.hastrackeddir(d)
1954
1954
1955 def hasdir(self, d):
1955 def hasdir(self, d):
1956 return self._rustmap.hasdir(d)
1956 return self._rustmap.hasdir(d)
1957
1957
1958 @propertycache
1958 @propertycache
1959 def identity(self):
1959 def identity(self):
1960 self._rustmap
1960 self._rustmap
1961 return self.identity
1961 return self.identity
1962
1962
1963 @property
1963 @property
1964 def nonnormalset(self):
1964 def nonnormalset(self):
1965 nonnorm = self._rustmap.non_normal_entries()
1965 nonnorm = self._rustmap.non_normal_entries()
1966 return nonnorm
1966 return nonnorm
1967
1967
1968 @propertycache
1968 @propertycache
1969 def otherparentset(self):
1969 def otherparentset(self):
1970 otherparents = self._rustmap.other_parent_entries()
1970 otherparents = self._rustmap.other_parent_entries()
1971 return otherparents
1971 return otherparents
1972
1972
1973 def non_normal_or_other_parent_paths(self):
1973 def non_normal_or_other_parent_paths(self):
1974 return self._rustmap.non_normal_or_other_parent_paths()
1974 return self._rustmap.non_normal_or_other_parent_paths()
1975
1975
1976 @propertycache
1976 @propertycache
1977 def dirfoldmap(self):
1977 def dirfoldmap(self):
1978 f = {}
1978 f = {}
1979 normcase = util.normcase
1979 normcase = util.normcase
1980 for name, _pseudo_entry in self.directories():
1980 for name, _pseudo_entry in self.directories():
1981 f[normcase(name)] = name
1981 f[normcase(name)] = name
1982 return f
1982 return f
General Comments 0
You need to be logged in to leave comments. Login now