##// END OF EJS Templates
dirstate: introduce a symbolic constant for the NONNORMAL marker...
marmoute -
r48277:4ac418b4 default
parent child Browse files
Show More
@@ -1,1985 +1,1988 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = -2
52 FROM_P2 = -2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
56
54
57
55 class repocache(filecache):
58 class repocache(filecache):
56 """filecache for files in .hg/"""
59 """filecache for files in .hg/"""
57
60
58 def join(self, obj, fname):
61 def join(self, obj, fname):
59 return obj._opener.join(fname)
62 return obj._opener.join(fname)
60
63
61
64
62 class rootcache(filecache):
65 class rootcache(filecache):
63 """filecache for files in the repository root"""
66 """filecache for files in the repository root"""
64
67
65 def join(self, obj, fname):
68 def join(self, obj, fname):
66 return obj._join(fname)
69 return obj._join(fname)
67
70
68
71
69 def _getfsnow(vfs):
72 def _getfsnow(vfs):
70 '''Get "now" timestamp on filesystem'''
73 '''Get "now" timestamp on filesystem'''
71 tmpfd, tmpname = vfs.mkstemp()
74 tmpfd, tmpname = vfs.mkstemp()
72 try:
75 try:
73 return os.fstat(tmpfd)[stat.ST_MTIME]
76 return os.fstat(tmpfd)[stat.ST_MTIME]
74 finally:
77 finally:
75 os.close(tmpfd)
78 os.close(tmpfd)
76 vfs.unlink(tmpname)
79 vfs.unlink(tmpname)
77
80
78
81
79 @interfaceutil.implementer(intdirstate.idirstate)
82 @interfaceutil.implementer(intdirstate.idirstate)
80 class dirstate(object):
83 class dirstate(object):
81 def __init__(
84 def __init__(
82 self,
85 self,
83 opener,
86 opener,
84 ui,
87 ui,
85 root,
88 root,
86 validate,
89 validate,
87 sparsematchfn,
90 sparsematchfn,
88 nodeconstants,
91 nodeconstants,
89 use_dirstate_v2,
92 use_dirstate_v2,
90 ):
93 ):
91 """Create a new dirstate object.
94 """Create a new dirstate object.
92
95
93 opener is an open()-like callable that can be used to open the
96 opener is an open()-like callable that can be used to open the
94 dirstate file; root is the root of the directory tracked by
97 dirstate file; root is the root of the directory tracked by
95 the dirstate.
98 the dirstate.
96 """
99 """
97 self._use_dirstate_v2 = use_dirstate_v2
100 self._use_dirstate_v2 = use_dirstate_v2
98 self._nodeconstants = nodeconstants
101 self._nodeconstants = nodeconstants
99 self._opener = opener
102 self._opener = opener
100 self._validate = validate
103 self._validate = validate
101 self._root = root
104 self._root = root
102 self._sparsematchfn = sparsematchfn
105 self._sparsematchfn = sparsematchfn
103 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
106 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
104 # UNC path pointing to root share (issue4557)
107 # UNC path pointing to root share (issue4557)
105 self._rootdir = pathutil.normasprefix(root)
108 self._rootdir = pathutil.normasprefix(root)
106 self._dirty = False
109 self._dirty = False
107 self._lastnormaltime = 0
110 self._lastnormaltime = 0
108 self._ui = ui
111 self._ui = ui
109 self._filecache = {}
112 self._filecache = {}
110 self._parentwriters = 0
113 self._parentwriters = 0
111 self._filename = b'dirstate'
114 self._filename = b'dirstate'
112 self._pendingfilename = b'%s.pending' % self._filename
115 self._pendingfilename = b'%s.pending' % self._filename
113 self._plchangecallbacks = {}
116 self._plchangecallbacks = {}
114 self._origpl = None
117 self._origpl = None
115 self._updatedfiles = set()
118 self._updatedfiles = set()
116 self._mapcls = dirstatemap
119 self._mapcls = dirstatemap
117 # Access and cache cwd early, so we don't access it for the first time
120 # Access and cache cwd early, so we don't access it for the first time
118 # after a working-copy update caused it to not exist (accessing it then
121 # after a working-copy update caused it to not exist (accessing it then
119 # raises an exception).
122 # raises an exception).
120 self._cwd
123 self._cwd
121
124
122 def prefetch_parents(self):
125 def prefetch_parents(self):
123 """make sure the parents are loaded
126 """make sure the parents are loaded
124
127
125 Used to avoid a race condition.
128 Used to avoid a race condition.
126 """
129 """
127 self._pl
130 self._pl
128
131
129 @contextlib.contextmanager
132 @contextlib.contextmanager
130 def parentchange(self):
133 def parentchange(self):
131 """Context manager for handling dirstate parents.
134 """Context manager for handling dirstate parents.
132
135
133 If an exception occurs in the scope of the context manager,
136 If an exception occurs in the scope of the context manager,
134 the incoherent dirstate won't be written when wlock is
137 the incoherent dirstate won't be written when wlock is
135 released.
138 released.
136 """
139 """
137 self._parentwriters += 1
140 self._parentwriters += 1
138 yield
141 yield
139 # Typically we want the "undo" step of a context manager in a
142 # Typically we want the "undo" step of a context manager in a
140 # finally block so it happens even when an exception
143 # finally block so it happens even when an exception
141 # occurs. In this case, however, we only want to decrement
144 # occurs. In this case, however, we only want to decrement
142 # parentwriters if the code in the with statement exits
145 # parentwriters if the code in the with statement exits
143 # normally, so we don't have a try/finally here on purpose.
146 # normally, so we don't have a try/finally here on purpose.
144 self._parentwriters -= 1
147 self._parentwriters -= 1
145
148
146 def pendingparentchange(self):
149 def pendingparentchange(self):
147 """Returns true if the dirstate is in the middle of a set of changes
150 """Returns true if the dirstate is in the middle of a set of changes
148 that modify the dirstate parent.
151 that modify the dirstate parent.
149 """
152 """
150 return self._parentwriters > 0
153 return self._parentwriters > 0
151
154
152 @propertycache
155 @propertycache
153 def _map(self):
156 def _map(self):
154 """Return the dirstate contents (see documentation for dirstatemap)."""
157 """Return the dirstate contents (see documentation for dirstatemap)."""
155 self._map = self._mapcls(
158 self._map = self._mapcls(
156 self._ui,
159 self._ui,
157 self._opener,
160 self._opener,
158 self._root,
161 self._root,
159 self._nodeconstants,
162 self._nodeconstants,
160 self._use_dirstate_v2,
163 self._use_dirstate_v2,
161 )
164 )
162 return self._map
165 return self._map
163
166
164 @property
167 @property
165 def _sparsematcher(self):
168 def _sparsematcher(self):
166 """The matcher for the sparse checkout.
169 """The matcher for the sparse checkout.
167
170
168 The working directory may not include every file from a manifest. The
171 The working directory may not include every file from a manifest. The
169 matcher obtained by this property will match a path if it is to be
172 matcher obtained by this property will match a path if it is to be
170 included in the working directory.
173 included in the working directory.
171 """
174 """
172 # TODO there is potential to cache this property. For now, the matcher
175 # TODO there is potential to cache this property. For now, the matcher
173 # is resolved on every access. (But the called function does use a
176 # is resolved on every access. (But the called function does use a
174 # cache to keep the lookup fast.)
177 # cache to keep the lookup fast.)
175 return self._sparsematchfn()
178 return self._sparsematchfn()
176
179
177 @repocache(b'branch')
180 @repocache(b'branch')
178 def _branch(self):
181 def _branch(self):
179 try:
182 try:
180 return self._opener.read(b"branch").strip() or b"default"
183 return self._opener.read(b"branch").strip() or b"default"
181 except IOError as inst:
184 except IOError as inst:
182 if inst.errno != errno.ENOENT:
185 if inst.errno != errno.ENOENT:
183 raise
186 raise
184 return b"default"
187 return b"default"
185
188
186 @property
189 @property
187 def _pl(self):
190 def _pl(self):
188 return self._map.parents()
191 return self._map.parents()
189
192
190 def hasdir(self, d):
193 def hasdir(self, d):
191 return self._map.hastrackeddir(d)
194 return self._map.hastrackeddir(d)
192
195
193 @rootcache(b'.hgignore')
196 @rootcache(b'.hgignore')
194 def _ignore(self):
197 def _ignore(self):
195 files = self._ignorefiles()
198 files = self._ignorefiles()
196 if not files:
199 if not files:
197 return matchmod.never()
200 return matchmod.never()
198
201
199 pats = [b'include:%s' % f for f in files]
202 pats = [b'include:%s' % f for f in files]
200 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
203 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
201
204
202 @propertycache
205 @propertycache
203 def _slash(self):
206 def _slash(self):
204 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
207 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
205
208
206 @propertycache
209 @propertycache
207 def _checklink(self):
210 def _checklink(self):
208 return util.checklink(self._root)
211 return util.checklink(self._root)
209
212
210 @propertycache
213 @propertycache
211 def _checkexec(self):
214 def _checkexec(self):
212 return bool(util.checkexec(self._root))
215 return bool(util.checkexec(self._root))
213
216
214 @propertycache
217 @propertycache
215 def _checkcase(self):
218 def _checkcase(self):
216 return not util.fscasesensitive(self._join(b'.hg'))
219 return not util.fscasesensitive(self._join(b'.hg'))
217
220
218 def _join(self, f):
221 def _join(self, f):
219 # much faster than os.path.join()
222 # much faster than os.path.join()
220 # it's safe because f is always a relative path
223 # it's safe because f is always a relative path
221 return self._rootdir + f
224 return self._rootdir + f
222
225
223 def flagfunc(self, buildfallback):
226 def flagfunc(self, buildfallback):
224 if self._checklink and self._checkexec:
227 if self._checklink and self._checkexec:
225
228
226 def f(x):
229 def f(x):
227 try:
230 try:
228 st = os.lstat(self._join(x))
231 st = os.lstat(self._join(x))
229 if util.statislink(st):
232 if util.statislink(st):
230 return b'l'
233 return b'l'
231 if util.statisexec(st):
234 if util.statisexec(st):
232 return b'x'
235 return b'x'
233 except OSError:
236 except OSError:
234 pass
237 pass
235 return b''
238 return b''
236
239
237 return f
240 return f
238
241
239 fallback = buildfallback()
242 fallback = buildfallback()
240 if self._checklink:
243 if self._checklink:
241
244
242 def f(x):
245 def f(x):
243 if os.path.islink(self._join(x)):
246 if os.path.islink(self._join(x)):
244 return b'l'
247 return b'l'
245 if b'x' in fallback(x):
248 if b'x' in fallback(x):
246 return b'x'
249 return b'x'
247 return b''
250 return b''
248
251
249 return f
252 return f
250 if self._checkexec:
253 if self._checkexec:
251
254
252 def f(x):
255 def f(x):
253 if b'l' in fallback(x):
256 if b'l' in fallback(x):
254 return b'l'
257 return b'l'
255 if util.isexec(self._join(x)):
258 if util.isexec(self._join(x)):
256 return b'x'
259 return b'x'
257 return b''
260 return b''
258
261
259 return f
262 return f
260 else:
263 else:
261 return fallback
264 return fallback
262
265
263 @propertycache
266 @propertycache
264 def _cwd(self):
267 def _cwd(self):
265 # internal config: ui.forcecwd
268 # internal config: ui.forcecwd
266 forcecwd = self._ui.config(b'ui', b'forcecwd')
269 forcecwd = self._ui.config(b'ui', b'forcecwd')
267 if forcecwd:
270 if forcecwd:
268 return forcecwd
271 return forcecwd
269 return encoding.getcwd()
272 return encoding.getcwd()
270
273
271 def getcwd(self):
274 def getcwd(self):
272 """Return the path from which a canonical path is calculated.
275 """Return the path from which a canonical path is calculated.
273
276
274 This path should be used to resolve file patterns or to convert
277 This path should be used to resolve file patterns or to convert
275 canonical paths back to file paths for display. It shouldn't be
278 canonical paths back to file paths for display. It shouldn't be
276 used to get real file paths. Use vfs functions instead.
279 used to get real file paths. Use vfs functions instead.
277 """
280 """
278 cwd = self._cwd
281 cwd = self._cwd
279 if cwd == self._root:
282 if cwd == self._root:
280 return b''
283 return b''
281 # self._root ends with a path separator if self._root is '/' or 'C:\'
284 # self._root ends with a path separator if self._root is '/' or 'C:\'
282 rootsep = self._root
285 rootsep = self._root
283 if not util.endswithsep(rootsep):
286 if not util.endswithsep(rootsep):
284 rootsep += pycompat.ossep
287 rootsep += pycompat.ossep
285 if cwd.startswith(rootsep):
288 if cwd.startswith(rootsep):
286 return cwd[len(rootsep) :]
289 return cwd[len(rootsep) :]
287 else:
290 else:
288 # we're outside the repo. return an absolute path.
291 # we're outside the repo. return an absolute path.
289 return cwd
292 return cwd
290
293
291 def pathto(self, f, cwd=None):
294 def pathto(self, f, cwd=None):
292 if cwd is None:
295 if cwd is None:
293 cwd = self.getcwd()
296 cwd = self.getcwd()
294 path = util.pathto(self._root, cwd, f)
297 path = util.pathto(self._root, cwd, f)
295 if self._slash:
298 if self._slash:
296 return util.pconvert(path)
299 return util.pconvert(path)
297 return path
300 return path
298
301
299 def __getitem__(self, key):
302 def __getitem__(self, key):
300 """Return the current state of key (a filename) in the dirstate.
303 """Return the current state of key (a filename) in the dirstate.
301
304
302 States are:
305 States are:
303 n normal
306 n normal
304 m needs merging
307 m needs merging
305 r marked for removal
308 r marked for removal
306 a marked for addition
309 a marked for addition
307 ? not tracked
310 ? not tracked
308 """
311 """
309 return self._map.get(key, (b"?",))[0]
312 return self._map.get(key, (b"?",))[0]
310
313
311 def __contains__(self, key):
314 def __contains__(self, key):
312 return key in self._map
315 return key in self._map
313
316
314 def __iter__(self):
317 def __iter__(self):
315 return iter(sorted(self._map))
318 return iter(sorted(self._map))
316
319
317 def items(self):
320 def items(self):
318 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
319
322
320 iteritems = items
323 iteritems = items
321
324
322 def directories(self):
325 def directories(self):
323 return self._map.directories()
326 return self._map.directories()
324
327
325 def parents(self):
328 def parents(self):
326 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
327
330
328 def p1(self):
331 def p1(self):
329 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
330
333
331 def p2(self):
334 def p2(self):
332 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
333
336
334 def branch(self):
337 def branch(self):
335 return encoding.tolocal(self._branch)
338 return encoding.tolocal(self._branch)
336
339
337 def setparents(self, p1, p2=None):
340 def setparents(self, p1, p2=None):
338 """Set dirstate parents to p1 and p2.
341 """Set dirstate parents to p1 and p2.
339
342
340 When moving from two parents to one, 'm' merged entries a
343 When moving from two parents to one, 'm' merged entries a
341 adjusted to normal and previous copy records discarded and
344 adjusted to normal and previous copy records discarded and
342 returned by the call.
345 returned by the call.
343
346
344 See localrepo.setparents()
347 See localrepo.setparents()
345 """
348 """
346 if p2 is None:
349 if p2 is None:
347 p2 = self._nodeconstants.nullid
350 p2 = self._nodeconstants.nullid
348 if self._parentwriters == 0:
351 if self._parentwriters == 0:
349 raise ValueError(
352 raise ValueError(
350 b"cannot set dirstate parent outside of "
353 b"cannot set dirstate parent outside of "
351 b"dirstate.parentchange context manager"
354 b"dirstate.parentchange context manager"
352 )
355 )
353
356
354 self._dirty = True
357 self._dirty = True
355 oldp2 = self._pl[1]
358 oldp2 = self._pl[1]
356 if self._origpl is None:
359 if self._origpl is None:
357 self._origpl = self._pl
360 self._origpl = self._pl
358 self._map.setparents(p1, p2)
361 self._map.setparents(p1, p2)
359 copies = {}
362 copies = {}
360 if (
363 if (
361 oldp2 != self._nodeconstants.nullid
364 oldp2 != self._nodeconstants.nullid
362 and p2 == self._nodeconstants.nullid
365 and p2 == self._nodeconstants.nullid
363 ):
366 ):
364 candidatefiles = self._map.non_normal_or_other_parent_paths()
367 candidatefiles = self._map.non_normal_or_other_parent_paths()
365
368
366 for f in candidatefiles:
369 for f in candidatefiles:
367 s = self._map.get(f)
370 s = self._map.get(f)
368 if s is None:
371 if s is None:
369 continue
372 continue
370
373
371 # Discard 'm' markers when moving away from a merge state
374 # Discard 'm' markers when moving away from a merge state
372 if s[0] == b'm':
375 if s[0] == b'm':
373 source = self._map.copymap.get(f)
376 source = self._map.copymap.get(f)
374 if source:
377 if source:
375 copies[f] = source
378 copies[f] = source
376 self.normallookup(f)
379 self.normallookup(f)
377 # Also fix up otherparent markers
380 # Also fix up otherparent markers
378 elif s[0] == b'n' and s[2] == FROM_P2:
381 elif s[0] == b'n' and s[2] == FROM_P2:
379 source = self._map.copymap.get(f)
382 source = self._map.copymap.get(f)
380 if source:
383 if source:
381 copies[f] = source
384 copies[f] = source
382 self.add(f)
385 self.add(f)
383 return copies
386 return copies
384
387
385 def setbranch(self, branch):
388 def setbranch(self, branch):
386 self.__class__._branch.set(self, encoding.fromlocal(branch))
389 self.__class__._branch.set(self, encoding.fromlocal(branch))
387 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
390 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
388 try:
391 try:
389 f.write(self._branch + b'\n')
392 f.write(self._branch + b'\n')
390 f.close()
393 f.close()
391
394
392 # make sure filecache has the correct stat info for _branch after
395 # make sure filecache has the correct stat info for _branch after
393 # replacing the underlying file
396 # replacing the underlying file
394 ce = self._filecache[b'_branch']
397 ce = self._filecache[b'_branch']
395 if ce:
398 if ce:
396 ce.refresh()
399 ce.refresh()
397 except: # re-raises
400 except: # re-raises
398 f.discard()
401 f.discard()
399 raise
402 raise
400
403
401 def invalidate(self):
404 def invalidate(self):
402 """Causes the next access to reread the dirstate.
405 """Causes the next access to reread the dirstate.
403
406
404 This is different from localrepo.invalidatedirstate() because it always
407 This is different from localrepo.invalidatedirstate() because it always
405 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
408 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
406 check whether the dirstate has changed before rereading it."""
409 check whether the dirstate has changed before rereading it."""
407
410
408 for a in ("_map", "_branch", "_ignore"):
411 for a in ("_map", "_branch", "_ignore"):
409 if a in self.__dict__:
412 if a in self.__dict__:
410 delattr(self, a)
413 delattr(self, a)
411 self._lastnormaltime = 0
414 self._lastnormaltime = 0
412 self._dirty = False
415 self._dirty = False
413 self._updatedfiles.clear()
416 self._updatedfiles.clear()
414 self._parentwriters = 0
417 self._parentwriters = 0
415 self._origpl = None
418 self._origpl = None
416
419
417 def copy(self, source, dest):
420 def copy(self, source, dest):
418 """Mark dest as a copy of source. Unmark dest if source is None."""
421 """Mark dest as a copy of source. Unmark dest if source is None."""
419 if source == dest:
422 if source == dest:
420 return
423 return
421 self._dirty = True
424 self._dirty = True
422 if source is not None:
425 if source is not None:
423 self._map.copymap[dest] = source
426 self._map.copymap[dest] = source
424 self._updatedfiles.add(source)
427 self._updatedfiles.add(source)
425 self._updatedfiles.add(dest)
428 self._updatedfiles.add(dest)
426 elif self._map.copymap.pop(dest, None):
429 elif self._map.copymap.pop(dest, None):
427 self._updatedfiles.add(dest)
430 self._updatedfiles.add(dest)
428
431
429 def copied(self, file):
432 def copied(self, file):
430 return self._map.copymap.get(file, None)
433 return self._map.copymap.get(file, None)
431
434
432 def copies(self):
435 def copies(self):
433 return self._map.copymap
436 return self._map.copymap
434
437
435 def _addpath(self, f, state, mode, size, mtime):
438 def _addpath(self, f, state, mode, size, mtime):
436 oldstate = self[f]
439 oldstate = self[f]
437 if state == b'a' or oldstate == b'r':
440 if state == b'a' or oldstate == b'r':
438 scmutil.checkfilename(f)
441 scmutil.checkfilename(f)
439 if self._map.hastrackeddir(f):
442 if self._map.hastrackeddir(f):
440 msg = _(b'directory %r already in dirstate')
443 msg = _(b'directory %r already in dirstate')
441 msg %= pycompat.bytestr(f)
444 msg %= pycompat.bytestr(f)
442 raise error.Abort(msg)
445 raise error.Abort(msg)
443 # shadows
446 # shadows
444 for d in pathutil.finddirs(f):
447 for d in pathutil.finddirs(f):
445 if self._map.hastrackeddir(d):
448 if self._map.hastrackeddir(d):
446 break
449 break
447 entry = self._map.get(d)
450 entry = self._map.get(d)
448 if entry is not None and entry[0] != b'r':
451 if entry is not None and entry[0] != b'r':
449 msg = _(b'file %r in dirstate clashes with %r')
452 msg = _(b'file %r in dirstate clashes with %r')
450 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
453 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
451 raise error.Abort(msg)
454 raise error.Abort(msg)
452 self._dirty = True
455 self._dirty = True
453 self._updatedfiles.add(f)
456 self._updatedfiles.add(f)
454 self._map.addfile(f, oldstate, state, mode, size, mtime)
457 self._map.addfile(f, oldstate, state, mode, size, mtime)
455
458
456 def normal(self, f, parentfiledata=None):
459 def normal(self, f, parentfiledata=None):
457 """Mark a file normal and clean.
460 """Mark a file normal and clean.
458
461
459 parentfiledata: (mode, size, mtime) of the clean file
462 parentfiledata: (mode, size, mtime) of the clean file
460
463
461 parentfiledata should be computed from memory (for mode,
464 parentfiledata should be computed from memory (for mode,
462 size), as or close as possible from the point where we
465 size), as or close as possible from the point where we
463 determined the file was clean, to limit the risk of the
466 determined the file was clean, to limit the risk of the
464 file having been changed by an external process between the
467 file having been changed by an external process between the
465 moment where the file was determined to be clean and now."""
468 moment where the file was determined to be clean and now."""
466 if parentfiledata:
469 if parentfiledata:
467 (mode, size, mtime) = parentfiledata
470 (mode, size, mtime) = parentfiledata
468 else:
471 else:
469 s = os.lstat(self._join(f))
472 s = os.lstat(self._join(f))
470 mode = s.st_mode
473 mode = s.st_mode
471 size = s.st_size
474 size = s.st_size
472 mtime = s[stat.ST_MTIME]
475 mtime = s[stat.ST_MTIME]
473 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
476 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
474 self._map.copymap.pop(f, None)
477 self._map.copymap.pop(f, None)
475 if f in self._map.nonnormalset:
478 if f in self._map.nonnormalset:
476 self._map.nonnormalset.remove(f)
479 self._map.nonnormalset.remove(f)
477 if mtime > self._lastnormaltime:
480 if mtime > self._lastnormaltime:
478 # Remember the most recent modification timeslot for status(),
481 # Remember the most recent modification timeslot for status(),
479 # to make sure we won't miss future size-preserving file content
482 # to make sure we won't miss future size-preserving file content
480 # modifications that happen within the same timeslot.
483 # modifications that happen within the same timeslot.
481 self._lastnormaltime = mtime
484 self._lastnormaltime = mtime
482
485
483 def normallookup(self, f):
486 def normallookup(self, f):
484 '''Mark a file normal, but possibly dirty.'''
487 '''Mark a file normal, but possibly dirty.'''
485 if self._pl[1] != self._nodeconstants.nullid:
488 if self._pl[1] != self._nodeconstants.nullid:
486 # if there is a merge going on and the file was either
489 # if there is a merge going on and the file was either
487 # in state 'm' (-1) or coming from other parent (-2) before
490 # in state 'm' (-1) or coming from other parent (-2) before
488 # being removed, restore that state.
491 # being removed, restore that state.
489 entry = self._map.get(f)
492 entry = self._map.get(f)
490 if entry is not None:
493 if entry is not None:
491 if entry[0] == b'r' and entry[2] in (-1, FROM_P2):
494 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
492 source = self._map.copymap.get(f)
495 source = self._map.copymap.get(f)
493 if entry[2] == -1:
496 if entry[2] == NONNORMAL:
494 self.merge(f)
497 self.merge(f)
495 elif entry[2] == FROM_P2:
498 elif entry[2] == FROM_P2:
496 self.otherparent(f)
499 self.otherparent(f)
497 if source:
500 if source:
498 self.copy(source, f)
501 self.copy(source, f)
499 return
502 return
500 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
503 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
501 return
504 return
502 self._addpath(f, b'n', 0, -1, -1)
505 self._addpath(f, b'n', 0, NONNORMAL, -1)
503 self._map.copymap.pop(f, None)
506 self._map.copymap.pop(f, None)
504
507
505 def otherparent(self, f):
508 def otherparent(self, f):
506 '''Mark as coming from the other parent, always dirty.'''
509 '''Mark as coming from the other parent, always dirty.'''
507 if self._pl[1] == self._nodeconstants.nullid:
510 if self._pl[1] == self._nodeconstants.nullid:
508 msg = _(b"setting %r to other parent only allowed in merges") % f
511 msg = _(b"setting %r to other parent only allowed in merges") % f
509 raise error.Abort(msg)
512 raise error.Abort(msg)
510 if f in self and self[f] == b'n':
513 if f in self and self[f] == b'n':
511 # merge-like
514 # merge-like
512 self._addpath(f, b'm', 0, FROM_P2, -1)
515 self._addpath(f, b'm', 0, FROM_P2, -1)
513 else:
516 else:
514 # add-like
517 # add-like
515 self._addpath(f, b'n', 0, FROM_P2, -1)
518 self._addpath(f, b'n', 0, FROM_P2, -1)
516 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
517
520
518 def add(self, f):
521 def add(self, f):
519 '''Mark a file added.'''
522 '''Mark a file added.'''
520 self._addpath(f, b'a', 0, -1, -1)
523 self._addpath(f, b'a', 0, NONNORMAL, -1)
521 self._map.copymap.pop(f, None)
524 self._map.copymap.pop(f, None)
522
525
523 def remove(self, f):
526 def remove(self, f):
524 '''Mark a file removed.'''
527 '''Mark a file removed.'''
525 self._dirty = True
528 self._dirty = True
526 oldstate = self[f]
529 oldstate = self[f]
527 size = 0
530 size = 0
528 if self._pl[1] != self._nodeconstants.nullid:
531 if self._pl[1] != self._nodeconstants.nullid:
529 entry = self._map.get(f)
532 entry = self._map.get(f)
530 if entry is not None:
533 if entry is not None:
531 # backup the previous state
534 # backup the previous state
532 if entry[0] == b'm': # merge
535 if entry[0] == b'm': # merge
533 size = -1
536 size = NONNORMAL
534 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
537 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
535 size = FROM_P2
538 size = FROM_P2
536 self._map.otherparentset.add(f)
539 self._map.otherparentset.add(f)
537 self._updatedfiles.add(f)
540 self._updatedfiles.add(f)
538 self._map.removefile(f, oldstate, size)
541 self._map.removefile(f, oldstate, size)
539 if size == 0:
542 if size == 0:
540 self._map.copymap.pop(f, None)
543 self._map.copymap.pop(f, None)
541
544
542 def merge(self, f):
545 def merge(self, f):
543 '''Mark a file merged.'''
546 '''Mark a file merged.'''
544 if self._pl[1] == self._nodeconstants.nullid:
547 if self._pl[1] == self._nodeconstants.nullid:
545 return self.normallookup(f)
548 return self.normallookup(f)
546 return self.otherparent(f)
549 return self.otherparent(f)
547
550
548 def drop(self, f):
551 def drop(self, f):
549 '''Drop a file from the dirstate'''
552 '''Drop a file from the dirstate'''
550 oldstate = self[f]
553 oldstate = self[f]
551 if self._map.dropfile(f, oldstate):
554 if self._map.dropfile(f, oldstate):
552 self._dirty = True
555 self._dirty = True
553 self._updatedfiles.add(f)
556 self._updatedfiles.add(f)
554 self._map.copymap.pop(f, None)
557 self._map.copymap.pop(f, None)
555
558
556 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
557 if exists is None:
560 if exists is None:
558 exists = os.path.lexists(os.path.join(self._root, path))
561 exists = os.path.lexists(os.path.join(self._root, path))
559 if not exists:
562 if not exists:
560 # Maybe a path component exists
563 # Maybe a path component exists
561 if not ignoremissing and b'/' in path:
564 if not ignoremissing and b'/' in path:
562 d, f = path.rsplit(b'/', 1)
565 d, f = path.rsplit(b'/', 1)
563 d = self._normalize(d, False, ignoremissing, None)
566 d = self._normalize(d, False, ignoremissing, None)
564 folded = d + b"/" + f
567 folded = d + b"/" + f
565 else:
568 else:
566 # No path components, preserve original case
569 # No path components, preserve original case
567 folded = path
570 folded = path
568 else:
571 else:
569 # recursively normalize leading directory components
572 # recursively normalize leading directory components
570 # against dirstate
573 # against dirstate
571 if b'/' in normed:
574 if b'/' in normed:
572 d, f = normed.rsplit(b'/', 1)
575 d, f = normed.rsplit(b'/', 1)
573 d = self._normalize(d, False, ignoremissing, True)
576 d = self._normalize(d, False, ignoremissing, True)
574 r = self._root + b"/" + d
577 r = self._root + b"/" + d
575 folded = d + b"/" + util.fspath(f, r)
578 folded = d + b"/" + util.fspath(f, r)
576 else:
579 else:
577 folded = util.fspath(normed, self._root)
580 folded = util.fspath(normed, self._root)
578 storemap[normed] = folded
581 storemap[normed] = folded
579
582
580 return folded
583 return folded
581
584
582 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
583 normed = util.normcase(path)
586 normed = util.normcase(path)
584 folded = self._map.filefoldmap.get(normed, None)
587 folded = self._map.filefoldmap.get(normed, None)
585 if folded is None:
588 if folded is None:
586 if isknown:
589 if isknown:
587 folded = path
590 folded = path
588 else:
591 else:
589 folded = self._discoverpath(
592 folded = self._discoverpath(
590 path, normed, ignoremissing, exists, self._map.filefoldmap
593 path, normed, ignoremissing, exists, self._map.filefoldmap
591 )
594 )
592 return folded
595 return folded
593
596
594 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
597 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
595 normed = util.normcase(path)
598 normed = util.normcase(path)
596 folded = self._map.filefoldmap.get(normed, None)
599 folded = self._map.filefoldmap.get(normed, None)
597 if folded is None:
600 if folded is None:
598 folded = self._map.dirfoldmap.get(normed, None)
601 folded = self._map.dirfoldmap.get(normed, None)
599 if folded is None:
602 if folded is None:
600 if isknown:
603 if isknown:
601 folded = path
604 folded = path
602 else:
605 else:
603 # store discovered result in dirfoldmap so that future
606 # store discovered result in dirfoldmap so that future
604 # normalizefile calls don't start matching directories
607 # normalizefile calls don't start matching directories
605 folded = self._discoverpath(
608 folded = self._discoverpath(
606 path, normed, ignoremissing, exists, self._map.dirfoldmap
609 path, normed, ignoremissing, exists, self._map.dirfoldmap
607 )
610 )
608 return folded
611 return folded
609
612
610 def normalize(self, path, isknown=False, ignoremissing=False):
613 def normalize(self, path, isknown=False, ignoremissing=False):
611 """
614 """
612 normalize the case of a pathname when on a casefolding filesystem
615 normalize the case of a pathname when on a casefolding filesystem
613
616
614 isknown specifies whether the filename came from walking the
617 isknown specifies whether the filename came from walking the
615 disk, to avoid extra filesystem access.
618 disk, to avoid extra filesystem access.
616
619
617 If ignoremissing is True, missing path are returned
620 If ignoremissing is True, missing path are returned
618 unchanged. Otherwise, we try harder to normalize possibly
621 unchanged. Otherwise, we try harder to normalize possibly
619 existing path components.
622 existing path components.
620
623
621 The normalized case is determined based on the following precedence:
624 The normalized case is determined based on the following precedence:
622
625
623 - version of name already stored in the dirstate
626 - version of name already stored in the dirstate
624 - version of name stored on disk
627 - version of name stored on disk
625 - version provided via command arguments
628 - version provided via command arguments
626 """
629 """
627
630
628 if self._checkcase:
631 if self._checkcase:
629 return self._normalize(path, isknown, ignoremissing)
632 return self._normalize(path, isknown, ignoremissing)
630 return path
633 return path
631
634
632 def clear(self):
635 def clear(self):
633 self._map.clear()
636 self._map.clear()
634 self._lastnormaltime = 0
637 self._lastnormaltime = 0
635 self._updatedfiles.clear()
638 self._updatedfiles.clear()
636 self._dirty = True
639 self._dirty = True
637
640
638 def rebuild(self, parent, allfiles, changedfiles=None):
641 def rebuild(self, parent, allfiles, changedfiles=None):
639 if changedfiles is None:
642 if changedfiles is None:
640 # Rebuild entire dirstate
643 # Rebuild entire dirstate
641 to_lookup = allfiles
644 to_lookup = allfiles
642 to_drop = []
645 to_drop = []
643 lastnormaltime = self._lastnormaltime
646 lastnormaltime = self._lastnormaltime
644 self.clear()
647 self.clear()
645 self._lastnormaltime = lastnormaltime
648 self._lastnormaltime = lastnormaltime
646 elif len(changedfiles) < 10:
649 elif len(changedfiles) < 10:
647 # Avoid turning allfiles into a set, which can be expensive if it's
650 # Avoid turning allfiles into a set, which can be expensive if it's
648 # large.
651 # large.
649 to_lookup = []
652 to_lookup = []
650 to_drop = []
653 to_drop = []
651 for f in changedfiles:
654 for f in changedfiles:
652 if f in allfiles:
655 if f in allfiles:
653 to_lookup.append(f)
656 to_lookup.append(f)
654 else:
657 else:
655 to_drop.append(f)
658 to_drop.append(f)
656 else:
659 else:
657 changedfilesset = set(changedfiles)
660 changedfilesset = set(changedfiles)
658 to_lookup = changedfilesset & set(allfiles)
661 to_lookup = changedfilesset & set(allfiles)
659 to_drop = changedfilesset - to_lookup
662 to_drop = changedfilesset - to_lookup
660
663
661 if self._origpl is None:
664 if self._origpl is None:
662 self._origpl = self._pl
665 self._origpl = self._pl
663 self._map.setparents(parent, self._nodeconstants.nullid)
666 self._map.setparents(parent, self._nodeconstants.nullid)
664
667
665 for f in to_lookup:
668 for f in to_lookup:
666 self.normallookup(f)
669 self.normallookup(f)
667 for f in to_drop:
670 for f in to_drop:
668 self.drop(f)
671 self.drop(f)
669
672
670 self._dirty = True
673 self._dirty = True
671
674
672 def identity(self):
675 def identity(self):
673 """Return identity of dirstate itself to detect changing in storage
676 """Return identity of dirstate itself to detect changing in storage
674
677
675 If identity of previous dirstate is equal to this, writing
678 If identity of previous dirstate is equal to this, writing
676 changes based on the former dirstate out can keep consistency.
679 changes based on the former dirstate out can keep consistency.
677 """
680 """
678 return self._map.identity
681 return self._map.identity
679
682
680 def write(self, tr):
683 def write(self, tr):
681 if not self._dirty:
684 if not self._dirty:
682 return
685 return
683
686
684 filename = self._filename
687 filename = self._filename
685 if tr:
688 if tr:
686 # 'dirstate.write()' is not only for writing in-memory
689 # 'dirstate.write()' is not only for writing in-memory
687 # changes out, but also for dropping ambiguous timestamp.
690 # changes out, but also for dropping ambiguous timestamp.
688 # delayed writing re-raise "ambiguous timestamp issue".
691 # delayed writing re-raise "ambiguous timestamp issue".
689 # See also the wiki page below for detail:
692 # See also the wiki page below for detail:
690 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
693 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
691
694
692 # emulate dropping timestamp in 'parsers.pack_dirstate'
695 # emulate dropping timestamp in 'parsers.pack_dirstate'
693 now = _getfsnow(self._opener)
696 now = _getfsnow(self._opener)
694 self._map.clearambiguoustimes(self._updatedfiles, now)
697 self._map.clearambiguoustimes(self._updatedfiles, now)
695
698
696 # emulate that all 'dirstate.normal' results are written out
699 # emulate that all 'dirstate.normal' results are written out
697 self._lastnormaltime = 0
700 self._lastnormaltime = 0
698 self._updatedfiles.clear()
701 self._updatedfiles.clear()
699
702
700 # delay writing in-memory changes out
703 # delay writing in-memory changes out
701 tr.addfilegenerator(
704 tr.addfilegenerator(
702 b'dirstate',
705 b'dirstate',
703 (self._filename,),
706 (self._filename,),
704 self._writedirstate,
707 self._writedirstate,
705 location=b'plain',
708 location=b'plain',
706 )
709 )
707 return
710 return
708
711
709 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
712 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
710 self._writedirstate(st)
713 self._writedirstate(st)
711
714
712 def addparentchangecallback(self, category, callback):
715 def addparentchangecallback(self, category, callback):
713 """add a callback to be called when the wd parents are changed
716 """add a callback to be called when the wd parents are changed
714
717
715 Callback will be called with the following arguments:
718 Callback will be called with the following arguments:
716 dirstate, (oldp1, oldp2), (newp1, newp2)
719 dirstate, (oldp1, oldp2), (newp1, newp2)
717
720
718 Category is a unique identifier to allow overwriting an old callback
721 Category is a unique identifier to allow overwriting an old callback
719 with a newer callback.
722 with a newer callback.
720 """
723 """
721 self._plchangecallbacks[category] = callback
724 self._plchangecallbacks[category] = callback
722
725
723 def _writedirstate(self, st):
726 def _writedirstate(self, st):
724 # notify callbacks about parents change
727 # notify callbacks about parents change
725 if self._origpl is not None and self._origpl != self._pl:
728 if self._origpl is not None and self._origpl != self._pl:
726 for c, callback in sorted(
729 for c, callback in sorted(
727 pycompat.iteritems(self._plchangecallbacks)
730 pycompat.iteritems(self._plchangecallbacks)
728 ):
731 ):
729 callback(self, self._origpl, self._pl)
732 callback(self, self._origpl, self._pl)
730 self._origpl = None
733 self._origpl = None
731 # use the modification time of the newly created temporary file as the
734 # use the modification time of the newly created temporary file as the
732 # filesystem's notion of 'now'
735 # filesystem's notion of 'now'
733 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
736 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
734
737
735 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
738 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
736 # timestamp of each entries in dirstate, because of 'now > mtime'
739 # timestamp of each entries in dirstate, because of 'now > mtime'
737 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
740 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
738 if delaywrite > 0:
741 if delaywrite > 0:
739 # do we have any files to delay for?
742 # do we have any files to delay for?
740 for f, e in pycompat.iteritems(self._map):
743 for f, e in pycompat.iteritems(self._map):
741 if e[0] == b'n' and e[3] == now:
744 if e[0] == b'n' and e[3] == now:
742 import time # to avoid useless import
745 import time # to avoid useless import
743
746
744 # rather than sleep n seconds, sleep until the next
747 # rather than sleep n seconds, sleep until the next
745 # multiple of n seconds
748 # multiple of n seconds
746 clock = time.time()
749 clock = time.time()
747 start = int(clock) - (int(clock) % delaywrite)
750 start = int(clock) - (int(clock) % delaywrite)
748 end = start + delaywrite
751 end = start + delaywrite
749 time.sleep(end - clock)
752 time.sleep(end - clock)
750 now = end # trust our estimate that the end is near now
753 now = end # trust our estimate that the end is near now
751 break
754 break
752
755
753 self._map.write(st, now)
756 self._map.write(st, now)
754 self._lastnormaltime = 0
757 self._lastnormaltime = 0
755 self._dirty = False
758 self._dirty = False
756
759
757 def _dirignore(self, f):
760 def _dirignore(self, f):
758 if self._ignore(f):
761 if self._ignore(f):
759 return True
762 return True
760 for p in pathutil.finddirs(f):
763 for p in pathutil.finddirs(f):
761 if self._ignore(p):
764 if self._ignore(p):
762 return True
765 return True
763 return False
766 return False
764
767
765 def _ignorefiles(self):
768 def _ignorefiles(self):
766 files = []
769 files = []
767 if os.path.exists(self._join(b'.hgignore')):
770 if os.path.exists(self._join(b'.hgignore')):
768 files.append(self._join(b'.hgignore'))
771 files.append(self._join(b'.hgignore'))
769 for name, path in self._ui.configitems(b"ui"):
772 for name, path in self._ui.configitems(b"ui"):
770 if name == b'ignore' or name.startswith(b'ignore.'):
773 if name == b'ignore' or name.startswith(b'ignore.'):
771 # we need to use os.path.join here rather than self._join
774 # we need to use os.path.join here rather than self._join
772 # because path is arbitrary and user-specified
775 # because path is arbitrary and user-specified
773 files.append(os.path.join(self._rootdir, util.expandpath(path)))
776 files.append(os.path.join(self._rootdir, util.expandpath(path)))
774 return files
777 return files
775
778
776 def _ignorefileandline(self, f):
779 def _ignorefileandline(self, f):
777 files = collections.deque(self._ignorefiles())
780 files = collections.deque(self._ignorefiles())
778 visited = set()
781 visited = set()
779 while files:
782 while files:
780 i = files.popleft()
783 i = files.popleft()
781 patterns = matchmod.readpatternfile(
784 patterns = matchmod.readpatternfile(
782 i, self._ui.warn, sourceinfo=True
785 i, self._ui.warn, sourceinfo=True
783 )
786 )
784 for pattern, lineno, line in patterns:
787 for pattern, lineno, line in patterns:
785 kind, p = matchmod._patsplit(pattern, b'glob')
788 kind, p = matchmod._patsplit(pattern, b'glob')
786 if kind == b"subinclude":
789 if kind == b"subinclude":
787 if p not in visited:
790 if p not in visited:
788 files.append(p)
791 files.append(p)
789 continue
792 continue
790 m = matchmod.match(
793 m = matchmod.match(
791 self._root, b'', [], [pattern], warn=self._ui.warn
794 self._root, b'', [], [pattern], warn=self._ui.warn
792 )
795 )
793 if m(f):
796 if m(f):
794 return (i, lineno, line)
797 return (i, lineno, line)
795 visited.add(i)
798 visited.add(i)
796 return (None, -1, b"")
799 return (None, -1, b"")
797
800
798 def _walkexplicit(self, match, subrepos):
801 def _walkexplicit(self, match, subrepos):
799 """Get stat data about the files explicitly specified by match.
802 """Get stat data about the files explicitly specified by match.
800
803
801 Return a triple (results, dirsfound, dirsnotfound).
804 Return a triple (results, dirsfound, dirsnotfound).
802 - results is a mapping from filename to stat result. It also contains
805 - results is a mapping from filename to stat result. It also contains
803 listings mapping subrepos and .hg to None.
806 listings mapping subrepos and .hg to None.
804 - dirsfound is a list of files found to be directories.
807 - dirsfound is a list of files found to be directories.
805 - dirsnotfound is a list of files that the dirstate thinks are
808 - dirsnotfound is a list of files that the dirstate thinks are
806 directories and that were not found."""
809 directories and that were not found."""
807
810
808 def badtype(mode):
811 def badtype(mode):
809 kind = _(b'unknown')
812 kind = _(b'unknown')
810 if stat.S_ISCHR(mode):
813 if stat.S_ISCHR(mode):
811 kind = _(b'character device')
814 kind = _(b'character device')
812 elif stat.S_ISBLK(mode):
815 elif stat.S_ISBLK(mode):
813 kind = _(b'block device')
816 kind = _(b'block device')
814 elif stat.S_ISFIFO(mode):
817 elif stat.S_ISFIFO(mode):
815 kind = _(b'fifo')
818 kind = _(b'fifo')
816 elif stat.S_ISSOCK(mode):
819 elif stat.S_ISSOCK(mode):
817 kind = _(b'socket')
820 kind = _(b'socket')
818 elif stat.S_ISDIR(mode):
821 elif stat.S_ISDIR(mode):
819 kind = _(b'directory')
822 kind = _(b'directory')
820 return _(b'unsupported file type (type is %s)') % kind
823 return _(b'unsupported file type (type is %s)') % kind
821
824
822 badfn = match.bad
825 badfn = match.bad
823 dmap = self._map
826 dmap = self._map
824 lstat = os.lstat
827 lstat = os.lstat
825 getkind = stat.S_IFMT
828 getkind = stat.S_IFMT
826 dirkind = stat.S_IFDIR
829 dirkind = stat.S_IFDIR
827 regkind = stat.S_IFREG
830 regkind = stat.S_IFREG
828 lnkkind = stat.S_IFLNK
831 lnkkind = stat.S_IFLNK
829 join = self._join
832 join = self._join
830 dirsfound = []
833 dirsfound = []
831 foundadd = dirsfound.append
834 foundadd = dirsfound.append
832 dirsnotfound = []
835 dirsnotfound = []
833 notfoundadd = dirsnotfound.append
836 notfoundadd = dirsnotfound.append
834
837
835 if not match.isexact() and self._checkcase:
838 if not match.isexact() and self._checkcase:
836 normalize = self._normalize
839 normalize = self._normalize
837 else:
840 else:
838 normalize = None
841 normalize = None
839
842
840 files = sorted(match.files())
843 files = sorted(match.files())
841 subrepos.sort()
844 subrepos.sort()
842 i, j = 0, 0
845 i, j = 0, 0
843 while i < len(files) and j < len(subrepos):
846 while i < len(files) and j < len(subrepos):
844 subpath = subrepos[j] + b"/"
847 subpath = subrepos[j] + b"/"
845 if files[i] < subpath:
848 if files[i] < subpath:
846 i += 1
849 i += 1
847 continue
850 continue
848 while i < len(files) and files[i].startswith(subpath):
851 while i < len(files) and files[i].startswith(subpath):
849 del files[i]
852 del files[i]
850 j += 1
853 j += 1
851
854
852 if not files or b'' in files:
855 if not files or b'' in files:
853 files = [b'']
856 files = [b'']
854 # constructing the foldmap is expensive, so don't do it for the
857 # constructing the foldmap is expensive, so don't do it for the
855 # common case where files is ['']
858 # common case where files is ['']
856 normalize = None
859 normalize = None
857 results = dict.fromkeys(subrepos)
860 results = dict.fromkeys(subrepos)
858 results[b'.hg'] = None
861 results[b'.hg'] = None
859
862
860 for ff in files:
863 for ff in files:
861 if normalize:
864 if normalize:
862 nf = normalize(ff, False, True)
865 nf = normalize(ff, False, True)
863 else:
866 else:
864 nf = ff
867 nf = ff
865 if nf in results:
868 if nf in results:
866 continue
869 continue
867
870
868 try:
871 try:
869 st = lstat(join(nf))
872 st = lstat(join(nf))
870 kind = getkind(st.st_mode)
873 kind = getkind(st.st_mode)
871 if kind == dirkind:
874 if kind == dirkind:
872 if nf in dmap:
875 if nf in dmap:
873 # file replaced by dir on disk but still in dirstate
876 # file replaced by dir on disk but still in dirstate
874 results[nf] = None
877 results[nf] = None
875 foundadd((nf, ff))
878 foundadd((nf, ff))
876 elif kind == regkind or kind == lnkkind:
879 elif kind == regkind or kind == lnkkind:
877 results[nf] = st
880 results[nf] = st
878 else:
881 else:
879 badfn(ff, badtype(kind))
882 badfn(ff, badtype(kind))
880 if nf in dmap:
883 if nf in dmap:
881 results[nf] = None
884 results[nf] = None
882 except OSError as inst: # nf not found on disk - it is dirstate only
885 except OSError as inst: # nf not found on disk - it is dirstate only
883 if nf in dmap: # does it exactly match a missing file?
886 if nf in dmap: # does it exactly match a missing file?
884 results[nf] = None
887 results[nf] = None
885 else: # does it match a missing directory?
888 else: # does it match a missing directory?
886 if self._map.hasdir(nf):
889 if self._map.hasdir(nf):
887 notfoundadd(nf)
890 notfoundadd(nf)
888 else:
891 else:
889 badfn(ff, encoding.strtolocal(inst.strerror))
892 badfn(ff, encoding.strtolocal(inst.strerror))
890
893
891 # match.files() may contain explicitly-specified paths that shouldn't
894 # match.files() may contain explicitly-specified paths that shouldn't
892 # be taken; drop them from the list of files found. dirsfound/notfound
895 # be taken; drop them from the list of files found. dirsfound/notfound
893 # aren't filtered here because they will be tested later.
896 # aren't filtered here because they will be tested later.
894 if match.anypats():
897 if match.anypats():
895 for f in list(results):
898 for f in list(results):
896 if f == b'.hg' or f in subrepos:
899 if f == b'.hg' or f in subrepos:
897 # keep sentinel to disable further out-of-repo walks
900 # keep sentinel to disable further out-of-repo walks
898 continue
901 continue
899 if not match(f):
902 if not match(f):
900 del results[f]
903 del results[f]
901
904
902 # Case insensitive filesystems cannot rely on lstat() failing to detect
905 # Case insensitive filesystems cannot rely on lstat() failing to detect
903 # a case-only rename. Prune the stat object for any file that does not
906 # a case-only rename. Prune the stat object for any file that does not
904 # match the case in the filesystem, if there are multiple files that
907 # match the case in the filesystem, if there are multiple files that
905 # normalize to the same path.
908 # normalize to the same path.
906 if match.isexact() and self._checkcase:
909 if match.isexact() and self._checkcase:
907 normed = {}
910 normed = {}
908
911
909 for f, st in pycompat.iteritems(results):
912 for f, st in pycompat.iteritems(results):
910 if st is None:
913 if st is None:
911 continue
914 continue
912
915
913 nc = util.normcase(f)
916 nc = util.normcase(f)
914 paths = normed.get(nc)
917 paths = normed.get(nc)
915
918
916 if paths is None:
919 if paths is None:
917 paths = set()
920 paths = set()
918 normed[nc] = paths
921 normed[nc] = paths
919
922
920 paths.add(f)
923 paths.add(f)
921
924
922 for norm, paths in pycompat.iteritems(normed):
925 for norm, paths in pycompat.iteritems(normed):
923 if len(paths) > 1:
926 if len(paths) > 1:
924 for path in paths:
927 for path in paths:
925 folded = self._discoverpath(
928 folded = self._discoverpath(
926 path, norm, True, None, self._map.dirfoldmap
929 path, norm, True, None, self._map.dirfoldmap
927 )
930 )
928 if path != folded:
931 if path != folded:
929 results[path] = None
932 results[path] = None
930
933
931 return results, dirsfound, dirsnotfound
934 return results, dirsfound, dirsnotfound
932
935
933 def walk(self, match, subrepos, unknown, ignored, full=True):
936 def walk(self, match, subrepos, unknown, ignored, full=True):
934 """
937 """
935 Walk recursively through the directory tree, finding all files
938 Walk recursively through the directory tree, finding all files
936 matched by match.
939 matched by match.
937
940
938 If full is False, maybe skip some known-clean files.
941 If full is False, maybe skip some known-clean files.
939
942
940 Return a dict mapping filename to stat-like object (either
943 Return a dict mapping filename to stat-like object (either
941 mercurial.osutil.stat instance or return value of os.stat()).
944 mercurial.osutil.stat instance or return value of os.stat()).
942
945
943 """
946 """
944 # full is a flag that extensions that hook into walk can use -- this
947 # full is a flag that extensions that hook into walk can use -- this
945 # implementation doesn't use it at all. This satisfies the contract
948 # implementation doesn't use it at all. This satisfies the contract
946 # because we only guarantee a "maybe".
949 # because we only guarantee a "maybe".
947
950
948 if ignored:
951 if ignored:
949 ignore = util.never
952 ignore = util.never
950 dirignore = util.never
953 dirignore = util.never
951 elif unknown:
954 elif unknown:
952 ignore = self._ignore
955 ignore = self._ignore
953 dirignore = self._dirignore
956 dirignore = self._dirignore
954 else:
957 else:
955 # if not unknown and not ignored, drop dir recursion and step 2
958 # if not unknown and not ignored, drop dir recursion and step 2
956 ignore = util.always
959 ignore = util.always
957 dirignore = util.always
960 dirignore = util.always
958
961
959 matchfn = match.matchfn
962 matchfn = match.matchfn
960 matchalways = match.always()
963 matchalways = match.always()
961 matchtdir = match.traversedir
964 matchtdir = match.traversedir
962 dmap = self._map
965 dmap = self._map
963 listdir = util.listdir
966 listdir = util.listdir
964 lstat = os.lstat
967 lstat = os.lstat
965 dirkind = stat.S_IFDIR
968 dirkind = stat.S_IFDIR
966 regkind = stat.S_IFREG
969 regkind = stat.S_IFREG
967 lnkkind = stat.S_IFLNK
970 lnkkind = stat.S_IFLNK
968 join = self._join
971 join = self._join
969
972
970 exact = skipstep3 = False
973 exact = skipstep3 = False
971 if match.isexact(): # match.exact
974 if match.isexact(): # match.exact
972 exact = True
975 exact = True
973 dirignore = util.always # skip step 2
976 dirignore = util.always # skip step 2
974 elif match.prefix(): # match.match, no patterns
977 elif match.prefix(): # match.match, no patterns
975 skipstep3 = True
978 skipstep3 = True
976
979
977 if not exact and self._checkcase:
980 if not exact and self._checkcase:
978 normalize = self._normalize
981 normalize = self._normalize
979 normalizefile = self._normalizefile
982 normalizefile = self._normalizefile
980 skipstep3 = False
983 skipstep3 = False
981 else:
984 else:
982 normalize = self._normalize
985 normalize = self._normalize
983 normalizefile = None
986 normalizefile = None
984
987
985 # step 1: find all explicit files
988 # step 1: find all explicit files
986 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
989 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
987 if matchtdir:
990 if matchtdir:
988 for d in work:
991 for d in work:
989 matchtdir(d[0])
992 matchtdir(d[0])
990 for d in dirsnotfound:
993 for d in dirsnotfound:
991 matchtdir(d)
994 matchtdir(d)
992
995
993 skipstep3 = skipstep3 and not (work or dirsnotfound)
996 skipstep3 = skipstep3 and not (work or dirsnotfound)
994 work = [d for d in work if not dirignore(d[0])]
997 work = [d for d in work if not dirignore(d[0])]
995
998
996 # step 2: visit subdirectories
999 # step 2: visit subdirectories
997 def traverse(work, alreadynormed):
1000 def traverse(work, alreadynormed):
998 wadd = work.append
1001 wadd = work.append
999 while work:
1002 while work:
1000 tracing.counter('dirstate.walk work', len(work))
1003 tracing.counter('dirstate.walk work', len(work))
1001 nd = work.pop()
1004 nd = work.pop()
1002 visitentries = match.visitchildrenset(nd)
1005 visitentries = match.visitchildrenset(nd)
1003 if not visitentries:
1006 if not visitentries:
1004 continue
1007 continue
1005 if visitentries == b'this' or visitentries == b'all':
1008 if visitentries == b'this' or visitentries == b'all':
1006 visitentries = None
1009 visitentries = None
1007 skip = None
1010 skip = None
1008 if nd != b'':
1011 if nd != b'':
1009 skip = b'.hg'
1012 skip = b'.hg'
1010 try:
1013 try:
1011 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1014 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1012 entries = listdir(join(nd), stat=True, skip=skip)
1015 entries = listdir(join(nd), stat=True, skip=skip)
1013 except OSError as inst:
1016 except OSError as inst:
1014 if inst.errno in (errno.EACCES, errno.ENOENT):
1017 if inst.errno in (errno.EACCES, errno.ENOENT):
1015 match.bad(
1018 match.bad(
1016 self.pathto(nd), encoding.strtolocal(inst.strerror)
1019 self.pathto(nd), encoding.strtolocal(inst.strerror)
1017 )
1020 )
1018 continue
1021 continue
1019 raise
1022 raise
1020 for f, kind, st in entries:
1023 for f, kind, st in entries:
1021 # Some matchers may return files in the visitentries set,
1024 # Some matchers may return files in the visitentries set,
1022 # instead of 'this', if the matcher explicitly mentions them
1025 # instead of 'this', if the matcher explicitly mentions them
1023 # and is not an exactmatcher. This is acceptable; we do not
1026 # and is not an exactmatcher. This is acceptable; we do not
1024 # make any hard assumptions about file-or-directory below
1027 # make any hard assumptions about file-or-directory below
1025 # based on the presence of `f` in visitentries. If
1028 # based on the presence of `f` in visitentries. If
1026 # visitchildrenset returned a set, we can always skip the
1029 # visitchildrenset returned a set, we can always skip the
1027 # entries *not* in the set it provided regardless of whether
1030 # entries *not* in the set it provided regardless of whether
1028 # they're actually a file or a directory.
1031 # they're actually a file or a directory.
1029 if visitentries and f not in visitentries:
1032 if visitentries and f not in visitentries:
1030 continue
1033 continue
1031 if normalizefile:
1034 if normalizefile:
1032 # even though f might be a directory, we're only
1035 # even though f might be a directory, we're only
1033 # interested in comparing it to files currently in the
1036 # interested in comparing it to files currently in the
1034 # dmap -- therefore normalizefile is enough
1037 # dmap -- therefore normalizefile is enough
1035 nf = normalizefile(
1038 nf = normalizefile(
1036 nd and (nd + b"/" + f) or f, True, True
1039 nd and (nd + b"/" + f) or f, True, True
1037 )
1040 )
1038 else:
1041 else:
1039 nf = nd and (nd + b"/" + f) or f
1042 nf = nd and (nd + b"/" + f) or f
1040 if nf not in results:
1043 if nf not in results:
1041 if kind == dirkind:
1044 if kind == dirkind:
1042 if not ignore(nf):
1045 if not ignore(nf):
1043 if matchtdir:
1046 if matchtdir:
1044 matchtdir(nf)
1047 matchtdir(nf)
1045 wadd(nf)
1048 wadd(nf)
1046 if nf in dmap and (matchalways or matchfn(nf)):
1049 if nf in dmap and (matchalways or matchfn(nf)):
1047 results[nf] = None
1050 results[nf] = None
1048 elif kind == regkind or kind == lnkkind:
1051 elif kind == regkind or kind == lnkkind:
1049 if nf in dmap:
1052 if nf in dmap:
1050 if matchalways or matchfn(nf):
1053 if matchalways or matchfn(nf):
1051 results[nf] = st
1054 results[nf] = st
1052 elif (matchalways or matchfn(nf)) and not ignore(
1055 elif (matchalways or matchfn(nf)) and not ignore(
1053 nf
1056 nf
1054 ):
1057 ):
1055 # unknown file -- normalize if necessary
1058 # unknown file -- normalize if necessary
1056 if not alreadynormed:
1059 if not alreadynormed:
1057 nf = normalize(nf, False, True)
1060 nf = normalize(nf, False, True)
1058 results[nf] = st
1061 results[nf] = st
1059 elif nf in dmap and (matchalways or matchfn(nf)):
1062 elif nf in dmap and (matchalways or matchfn(nf)):
1060 results[nf] = None
1063 results[nf] = None
1061
1064
1062 for nd, d in work:
1065 for nd, d in work:
1063 # alreadynormed means that processwork doesn't have to do any
1066 # alreadynormed means that processwork doesn't have to do any
1064 # expensive directory normalization
1067 # expensive directory normalization
1065 alreadynormed = not normalize or nd == d
1068 alreadynormed = not normalize or nd == d
1066 traverse([d], alreadynormed)
1069 traverse([d], alreadynormed)
1067
1070
1068 for s in subrepos:
1071 for s in subrepos:
1069 del results[s]
1072 del results[s]
1070 del results[b'.hg']
1073 del results[b'.hg']
1071
1074
1072 # step 3: visit remaining files from dmap
1075 # step 3: visit remaining files from dmap
1073 if not skipstep3 and not exact:
1076 if not skipstep3 and not exact:
1074 # If a dmap file is not in results yet, it was either
1077 # If a dmap file is not in results yet, it was either
1075 # a) not matching matchfn b) ignored, c) missing, or d) under a
1078 # a) not matching matchfn b) ignored, c) missing, or d) under a
1076 # symlink directory.
1079 # symlink directory.
1077 if not results and matchalways:
1080 if not results and matchalways:
1078 visit = [f for f in dmap]
1081 visit = [f for f in dmap]
1079 else:
1082 else:
1080 visit = [f for f in dmap if f not in results and matchfn(f)]
1083 visit = [f for f in dmap if f not in results and matchfn(f)]
1081 visit.sort()
1084 visit.sort()
1082
1085
1083 if unknown:
1086 if unknown:
1084 # unknown == True means we walked all dirs under the roots
1087 # unknown == True means we walked all dirs under the roots
1085 # that wasn't ignored, and everything that matched was stat'ed
1088 # that wasn't ignored, and everything that matched was stat'ed
1086 # and is already in results.
1089 # and is already in results.
1087 # The rest must thus be ignored or under a symlink.
1090 # The rest must thus be ignored or under a symlink.
1088 audit_path = pathutil.pathauditor(self._root, cached=True)
1091 audit_path = pathutil.pathauditor(self._root, cached=True)
1089
1092
1090 for nf in iter(visit):
1093 for nf in iter(visit):
1091 # If a stat for the same file was already added with a
1094 # If a stat for the same file was already added with a
1092 # different case, don't add one for this, since that would
1095 # different case, don't add one for this, since that would
1093 # make it appear as if the file exists under both names
1096 # make it appear as if the file exists under both names
1094 # on disk.
1097 # on disk.
1095 if (
1098 if (
1096 normalizefile
1099 normalizefile
1097 and normalizefile(nf, True, True) in results
1100 and normalizefile(nf, True, True) in results
1098 ):
1101 ):
1099 results[nf] = None
1102 results[nf] = None
1100 # Report ignored items in the dmap as long as they are not
1103 # Report ignored items in the dmap as long as they are not
1101 # under a symlink directory.
1104 # under a symlink directory.
1102 elif audit_path.check(nf):
1105 elif audit_path.check(nf):
1103 try:
1106 try:
1104 results[nf] = lstat(join(nf))
1107 results[nf] = lstat(join(nf))
1105 # file was just ignored, no links, and exists
1108 # file was just ignored, no links, and exists
1106 except OSError:
1109 except OSError:
1107 # file doesn't exist
1110 # file doesn't exist
1108 results[nf] = None
1111 results[nf] = None
1109 else:
1112 else:
1110 # It's either missing or under a symlink directory
1113 # It's either missing or under a symlink directory
1111 # which we in this case report as missing
1114 # which we in this case report as missing
1112 results[nf] = None
1115 results[nf] = None
1113 else:
1116 else:
1114 # We may not have walked the full directory tree above,
1117 # We may not have walked the full directory tree above,
1115 # so stat and check everything we missed.
1118 # so stat and check everything we missed.
1116 iv = iter(visit)
1119 iv = iter(visit)
1117 for st in util.statfiles([join(i) for i in visit]):
1120 for st in util.statfiles([join(i) for i in visit]):
1118 results[next(iv)] = st
1121 results[next(iv)] = st
1119 return results
1122 return results
1120
1123
1121 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1124 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1122 # Force Rayon (Rust parallelism library) to respect the number of
1125 # Force Rayon (Rust parallelism library) to respect the number of
1123 # workers. This is a temporary workaround until Rust code knows
1126 # workers. This is a temporary workaround until Rust code knows
1124 # how to read the config file.
1127 # how to read the config file.
1125 numcpus = self._ui.configint(b"worker", b"numcpus")
1128 numcpus = self._ui.configint(b"worker", b"numcpus")
1126 if numcpus is not None:
1129 if numcpus is not None:
1127 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1130 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1128
1131
1129 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1132 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1130 if not workers_enabled:
1133 if not workers_enabled:
1131 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1134 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1132
1135
1133 (
1136 (
1134 lookup,
1137 lookup,
1135 modified,
1138 modified,
1136 added,
1139 added,
1137 removed,
1140 removed,
1138 deleted,
1141 deleted,
1139 clean,
1142 clean,
1140 ignored,
1143 ignored,
1141 unknown,
1144 unknown,
1142 warnings,
1145 warnings,
1143 bad,
1146 bad,
1144 traversed,
1147 traversed,
1145 dirty,
1148 dirty,
1146 ) = rustmod.status(
1149 ) = rustmod.status(
1147 self._map._rustmap,
1150 self._map._rustmap,
1148 matcher,
1151 matcher,
1149 self._rootdir,
1152 self._rootdir,
1150 self._ignorefiles(),
1153 self._ignorefiles(),
1151 self._checkexec,
1154 self._checkexec,
1152 self._lastnormaltime,
1155 self._lastnormaltime,
1153 bool(list_clean),
1156 bool(list_clean),
1154 bool(list_ignored),
1157 bool(list_ignored),
1155 bool(list_unknown),
1158 bool(list_unknown),
1156 bool(matcher.traversedir),
1159 bool(matcher.traversedir),
1157 )
1160 )
1158
1161
1159 self._dirty |= dirty
1162 self._dirty |= dirty
1160
1163
1161 if matcher.traversedir:
1164 if matcher.traversedir:
1162 for dir in traversed:
1165 for dir in traversed:
1163 matcher.traversedir(dir)
1166 matcher.traversedir(dir)
1164
1167
1165 if self._ui.warn:
1168 if self._ui.warn:
1166 for item in warnings:
1169 for item in warnings:
1167 if isinstance(item, tuple):
1170 if isinstance(item, tuple):
1168 file_path, syntax = item
1171 file_path, syntax = item
1169 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1172 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1170 file_path,
1173 file_path,
1171 syntax,
1174 syntax,
1172 )
1175 )
1173 self._ui.warn(msg)
1176 self._ui.warn(msg)
1174 else:
1177 else:
1175 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1178 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1176 self._ui.warn(
1179 self._ui.warn(
1177 msg
1180 msg
1178 % (
1181 % (
1179 pathutil.canonpath(
1182 pathutil.canonpath(
1180 self._rootdir, self._rootdir, item
1183 self._rootdir, self._rootdir, item
1181 ),
1184 ),
1182 b"No such file or directory",
1185 b"No such file or directory",
1183 )
1186 )
1184 )
1187 )
1185
1188
1186 for (fn, message) in bad:
1189 for (fn, message) in bad:
1187 matcher.bad(fn, encoding.strtolocal(message))
1190 matcher.bad(fn, encoding.strtolocal(message))
1188
1191
1189 status = scmutil.status(
1192 status = scmutil.status(
1190 modified=modified,
1193 modified=modified,
1191 added=added,
1194 added=added,
1192 removed=removed,
1195 removed=removed,
1193 deleted=deleted,
1196 deleted=deleted,
1194 unknown=unknown,
1197 unknown=unknown,
1195 ignored=ignored,
1198 ignored=ignored,
1196 clean=clean,
1199 clean=clean,
1197 )
1200 )
1198 return (lookup, status)
1201 return (lookup, status)
1199
1202
1200 def status(self, match, subrepos, ignored, clean, unknown):
1203 def status(self, match, subrepos, ignored, clean, unknown):
1201 """Determine the status of the working copy relative to the
1204 """Determine the status of the working copy relative to the
1202 dirstate and return a pair of (unsure, status), where status is of type
1205 dirstate and return a pair of (unsure, status), where status is of type
1203 scmutil.status and:
1206 scmutil.status and:
1204
1207
1205 unsure:
1208 unsure:
1206 files that might have been modified since the dirstate was
1209 files that might have been modified since the dirstate was
1207 written, but need to be read to be sure (size is the same
1210 written, but need to be read to be sure (size is the same
1208 but mtime differs)
1211 but mtime differs)
1209 status.modified:
1212 status.modified:
1210 files that have definitely been modified since the dirstate
1213 files that have definitely been modified since the dirstate
1211 was written (different size or mode)
1214 was written (different size or mode)
1212 status.clean:
1215 status.clean:
1213 files that have definitely not been modified since the
1216 files that have definitely not been modified since the
1214 dirstate was written
1217 dirstate was written
1215 """
1218 """
1216 listignored, listclean, listunknown = ignored, clean, unknown
1219 listignored, listclean, listunknown = ignored, clean, unknown
1217 lookup, modified, added, unknown, ignored = [], [], [], [], []
1220 lookup, modified, added, unknown, ignored = [], [], [], [], []
1218 removed, deleted, clean = [], [], []
1221 removed, deleted, clean = [], [], []
1219
1222
1220 dmap = self._map
1223 dmap = self._map
1221 dmap.preload()
1224 dmap.preload()
1222
1225
1223 use_rust = True
1226 use_rust = True
1224
1227
1225 allowed_matchers = (
1228 allowed_matchers = (
1226 matchmod.alwaysmatcher,
1229 matchmod.alwaysmatcher,
1227 matchmod.exactmatcher,
1230 matchmod.exactmatcher,
1228 matchmod.includematcher,
1231 matchmod.includematcher,
1229 )
1232 )
1230
1233
1231 if rustmod is None:
1234 if rustmod is None:
1232 use_rust = False
1235 use_rust = False
1233 elif self._checkcase:
1236 elif self._checkcase:
1234 # Case-insensitive filesystems are not handled yet
1237 # Case-insensitive filesystems are not handled yet
1235 use_rust = False
1238 use_rust = False
1236 elif subrepos:
1239 elif subrepos:
1237 use_rust = False
1240 use_rust = False
1238 elif sparse.enabled:
1241 elif sparse.enabled:
1239 use_rust = False
1242 use_rust = False
1240 elif not isinstance(match, allowed_matchers):
1243 elif not isinstance(match, allowed_matchers):
1241 # Some matchers have yet to be implemented
1244 # Some matchers have yet to be implemented
1242 use_rust = False
1245 use_rust = False
1243
1246
1244 if use_rust:
1247 if use_rust:
1245 try:
1248 try:
1246 return self._rust_status(
1249 return self._rust_status(
1247 match, listclean, listignored, listunknown
1250 match, listclean, listignored, listunknown
1248 )
1251 )
1249 except rustmod.FallbackError:
1252 except rustmod.FallbackError:
1250 pass
1253 pass
1251
1254
1252 def noop(f):
1255 def noop(f):
1253 pass
1256 pass
1254
1257
1255 dcontains = dmap.__contains__
1258 dcontains = dmap.__contains__
1256 dget = dmap.__getitem__
1259 dget = dmap.__getitem__
1257 ladd = lookup.append # aka "unsure"
1260 ladd = lookup.append # aka "unsure"
1258 madd = modified.append
1261 madd = modified.append
1259 aadd = added.append
1262 aadd = added.append
1260 uadd = unknown.append if listunknown else noop
1263 uadd = unknown.append if listunknown else noop
1261 iadd = ignored.append if listignored else noop
1264 iadd = ignored.append if listignored else noop
1262 radd = removed.append
1265 radd = removed.append
1263 dadd = deleted.append
1266 dadd = deleted.append
1264 cadd = clean.append if listclean else noop
1267 cadd = clean.append if listclean else noop
1265 mexact = match.exact
1268 mexact = match.exact
1266 dirignore = self._dirignore
1269 dirignore = self._dirignore
1267 checkexec = self._checkexec
1270 checkexec = self._checkexec
1268 copymap = self._map.copymap
1271 copymap = self._map.copymap
1269 lastnormaltime = self._lastnormaltime
1272 lastnormaltime = self._lastnormaltime
1270
1273
1271 # We need to do full walks when either
1274 # We need to do full walks when either
1272 # - we're listing all clean files, or
1275 # - we're listing all clean files, or
1273 # - match.traversedir does something, because match.traversedir should
1276 # - match.traversedir does something, because match.traversedir should
1274 # be called for every dir in the working dir
1277 # be called for every dir in the working dir
1275 full = listclean or match.traversedir is not None
1278 full = listclean or match.traversedir is not None
1276 for fn, st in pycompat.iteritems(
1279 for fn, st in pycompat.iteritems(
1277 self.walk(match, subrepos, listunknown, listignored, full=full)
1280 self.walk(match, subrepos, listunknown, listignored, full=full)
1278 ):
1281 ):
1279 if not dcontains(fn):
1282 if not dcontains(fn):
1280 if (listignored or mexact(fn)) and dirignore(fn):
1283 if (listignored or mexact(fn)) and dirignore(fn):
1281 if listignored:
1284 if listignored:
1282 iadd(fn)
1285 iadd(fn)
1283 else:
1286 else:
1284 uadd(fn)
1287 uadd(fn)
1285 continue
1288 continue
1286
1289
1287 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1290 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1288 # written like that for performance reasons. dmap[fn] is not a
1291 # written like that for performance reasons. dmap[fn] is not a
1289 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1292 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1290 # opcode has fast paths when the value to be unpacked is a tuple or
1293 # opcode has fast paths when the value to be unpacked is a tuple or
1291 # a list, but falls back to creating a full-fledged iterator in
1294 # a list, but falls back to creating a full-fledged iterator in
1292 # general. That is much slower than simply accessing and storing the
1295 # general. That is much slower than simply accessing and storing the
1293 # tuple members one by one.
1296 # tuple members one by one.
1294 t = dget(fn)
1297 t = dget(fn)
1295 state = t[0]
1298 state = t[0]
1296 mode = t[1]
1299 mode = t[1]
1297 size = t[2]
1300 size = t[2]
1298 time = t[3]
1301 time = t[3]
1299
1302
1300 if not st and state in b"nma":
1303 if not st and state in b"nma":
1301 dadd(fn)
1304 dadd(fn)
1302 elif state == b'n':
1305 elif state == b'n':
1303 if (
1306 if (
1304 size >= 0
1307 size >= 0
1305 and (
1308 and (
1306 (size != st.st_size and size != st.st_size & _rangemask)
1309 (size != st.st_size and size != st.st_size & _rangemask)
1307 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1310 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1308 )
1311 )
1309 or size == FROM_P2 # other parent
1312 or size == FROM_P2 # other parent
1310 or fn in copymap
1313 or fn in copymap
1311 ):
1314 ):
1312 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1315 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1313 # issue6456: Size returned may be longer due to
1316 # issue6456: Size returned may be longer due to
1314 # encryption on EXT-4 fscrypt, undecided.
1317 # encryption on EXT-4 fscrypt, undecided.
1315 ladd(fn)
1318 ladd(fn)
1316 else:
1319 else:
1317 madd(fn)
1320 madd(fn)
1318 elif (
1321 elif (
1319 time != st[stat.ST_MTIME]
1322 time != st[stat.ST_MTIME]
1320 and time != st[stat.ST_MTIME] & _rangemask
1323 and time != st[stat.ST_MTIME] & _rangemask
1321 ):
1324 ):
1322 ladd(fn)
1325 ladd(fn)
1323 elif st[stat.ST_MTIME] == lastnormaltime:
1326 elif st[stat.ST_MTIME] == lastnormaltime:
1324 # fn may have just been marked as normal and it may have
1327 # fn may have just been marked as normal and it may have
1325 # changed in the same second without changing its size.
1328 # changed in the same second without changing its size.
1326 # This can happen if we quickly do multiple commits.
1329 # This can happen if we quickly do multiple commits.
1327 # Force lookup, so we don't miss such a racy file change.
1330 # Force lookup, so we don't miss such a racy file change.
1328 ladd(fn)
1331 ladd(fn)
1329 elif listclean:
1332 elif listclean:
1330 cadd(fn)
1333 cadd(fn)
1331 elif state == b'm':
1334 elif state == b'm':
1332 madd(fn)
1335 madd(fn)
1333 elif state == b'a':
1336 elif state == b'a':
1334 aadd(fn)
1337 aadd(fn)
1335 elif state == b'r':
1338 elif state == b'r':
1336 radd(fn)
1339 radd(fn)
1337 status = scmutil.status(
1340 status = scmutil.status(
1338 modified, added, removed, deleted, unknown, ignored, clean
1341 modified, added, removed, deleted, unknown, ignored, clean
1339 )
1342 )
1340 return (lookup, status)
1343 return (lookup, status)
1341
1344
1342 def matches(self, match):
1345 def matches(self, match):
1343 """
1346 """
1344 return files in the dirstate (in whatever state) filtered by match
1347 return files in the dirstate (in whatever state) filtered by match
1345 """
1348 """
1346 dmap = self._map
1349 dmap = self._map
1347 if rustmod is not None:
1350 if rustmod is not None:
1348 dmap = self._map._rustmap
1351 dmap = self._map._rustmap
1349
1352
1350 if match.always():
1353 if match.always():
1351 return dmap.keys()
1354 return dmap.keys()
1352 files = match.files()
1355 files = match.files()
1353 if match.isexact():
1356 if match.isexact():
1354 # fast path -- filter the other way around, since typically files is
1357 # fast path -- filter the other way around, since typically files is
1355 # much smaller than dmap
1358 # much smaller than dmap
1356 return [f for f in files if f in dmap]
1359 return [f for f in files if f in dmap]
1357 if match.prefix() and all(fn in dmap for fn in files):
1360 if match.prefix() and all(fn in dmap for fn in files):
1358 # fast path -- all the values are known to be files, so just return
1361 # fast path -- all the values are known to be files, so just return
1359 # that
1362 # that
1360 return list(files)
1363 return list(files)
1361 return [f for f in dmap if match(f)]
1364 return [f for f in dmap if match(f)]
1362
1365
1363 def _actualfilename(self, tr):
1366 def _actualfilename(self, tr):
1364 if tr:
1367 if tr:
1365 return self._pendingfilename
1368 return self._pendingfilename
1366 else:
1369 else:
1367 return self._filename
1370 return self._filename
1368
1371
1369 def savebackup(self, tr, backupname):
1372 def savebackup(self, tr, backupname):
1370 '''Save current dirstate into backup file'''
1373 '''Save current dirstate into backup file'''
1371 filename = self._actualfilename(tr)
1374 filename = self._actualfilename(tr)
1372 assert backupname != filename
1375 assert backupname != filename
1373
1376
1374 # use '_writedirstate' instead of 'write' to write changes certainly,
1377 # use '_writedirstate' instead of 'write' to write changes certainly,
1375 # because the latter omits writing out if transaction is running.
1378 # because the latter omits writing out if transaction is running.
1376 # output file will be used to create backup of dirstate at this point.
1379 # output file will be used to create backup of dirstate at this point.
1377 if self._dirty or not self._opener.exists(filename):
1380 if self._dirty or not self._opener.exists(filename):
1378 self._writedirstate(
1381 self._writedirstate(
1379 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1382 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1380 )
1383 )
1381
1384
1382 if tr:
1385 if tr:
1383 # ensure that subsequent tr.writepending returns True for
1386 # ensure that subsequent tr.writepending returns True for
1384 # changes written out above, even if dirstate is never
1387 # changes written out above, even if dirstate is never
1385 # changed after this
1388 # changed after this
1386 tr.addfilegenerator(
1389 tr.addfilegenerator(
1387 b'dirstate',
1390 b'dirstate',
1388 (self._filename,),
1391 (self._filename,),
1389 self._writedirstate,
1392 self._writedirstate,
1390 location=b'plain',
1393 location=b'plain',
1391 )
1394 )
1392
1395
1393 # ensure that pending file written above is unlinked at
1396 # ensure that pending file written above is unlinked at
1394 # failure, even if tr.writepending isn't invoked until the
1397 # failure, even if tr.writepending isn't invoked until the
1395 # end of this transaction
1398 # end of this transaction
1396 tr.registertmp(filename, location=b'plain')
1399 tr.registertmp(filename, location=b'plain')
1397
1400
1398 self._opener.tryunlink(backupname)
1401 self._opener.tryunlink(backupname)
1399 # hardlink backup is okay because _writedirstate is always called
1402 # hardlink backup is okay because _writedirstate is always called
1400 # with an "atomictemp=True" file.
1403 # with an "atomictemp=True" file.
1401 util.copyfile(
1404 util.copyfile(
1402 self._opener.join(filename),
1405 self._opener.join(filename),
1403 self._opener.join(backupname),
1406 self._opener.join(backupname),
1404 hardlink=True,
1407 hardlink=True,
1405 )
1408 )
1406
1409
1407 def restorebackup(self, tr, backupname):
1410 def restorebackup(self, tr, backupname):
1408 '''Restore dirstate by backup file'''
1411 '''Restore dirstate by backup file'''
1409 # this "invalidate()" prevents "wlock.release()" from writing
1412 # this "invalidate()" prevents "wlock.release()" from writing
1410 # changes of dirstate out after restoring from backup file
1413 # changes of dirstate out after restoring from backup file
1411 self.invalidate()
1414 self.invalidate()
1412 filename = self._actualfilename(tr)
1415 filename = self._actualfilename(tr)
1413 o = self._opener
1416 o = self._opener
1414 if util.samefile(o.join(backupname), o.join(filename)):
1417 if util.samefile(o.join(backupname), o.join(filename)):
1415 o.unlink(backupname)
1418 o.unlink(backupname)
1416 else:
1419 else:
1417 o.rename(backupname, filename, checkambig=True)
1420 o.rename(backupname, filename, checkambig=True)
1418
1421
1419 def clearbackup(self, tr, backupname):
1422 def clearbackup(self, tr, backupname):
1420 '''Clear backup file'''
1423 '''Clear backup file'''
1421 self._opener.unlink(backupname)
1424 self._opener.unlink(backupname)
1422
1425
1423
1426
1424 class dirstatemap(object):
1427 class dirstatemap(object):
1425 """Map encapsulating the dirstate's contents.
1428 """Map encapsulating the dirstate's contents.
1426
1429
1427 The dirstate contains the following state:
1430 The dirstate contains the following state:
1428
1431
1429 - `identity` is the identity of the dirstate file, which can be used to
1432 - `identity` is the identity of the dirstate file, which can be used to
1430 detect when changes have occurred to the dirstate file.
1433 detect when changes have occurred to the dirstate file.
1431
1434
1432 - `parents` is a pair containing the parents of the working copy. The
1435 - `parents` is a pair containing the parents of the working copy. The
1433 parents are updated by calling `setparents`.
1436 parents are updated by calling `setparents`.
1434
1437
1435 - the state map maps filenames to tuples of (state, mode, size, mtime),
1438 - the state map maps filenames to tuples of (state, mode, size, mtime),
1436 where state is a single character representing 'normal', 'added',
1439 where state is a single character representing 'normal', 'added',
1437 'removed', or 'merged'. It is read by treating the dirstate as a
1440 'removed', or 'merged'. It is read by treating the dirstate as a
1438 dict. File state is updated by calling the `addfile`, `removefile` and
1441 dict. File state is updated by calling the `addfile`, `removefile` and
1439 `dropfile` methods.
1442 `dropfile` methods.
1440
1443
1441 - `copymap` maps destination filenames to their source filename.
1444 - `copymap` maps destination filenames to their source filename.
1442
1445
1443 The dirstate also provides the following views onto the state:
1446 The dirstate also provides the following views onto the state:
1444
1447
1445 - `nonnormalset` is a set of the filenames that have state other
1448 - `nonnormalset` is a set of the filenames that have state other
1446 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1449 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1447
1450
1448 - `otherparentset` is a set of the filenames that are marked as coming
1451 - `otherparentset` is a set of the filenames that are marked as coming
1449 from the second parent when the dirstate is currently being merged.
1452 from the second parent when the dirstate is currently being merged.
1450
1453
1451 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1454 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1452 form that they appear as in the dirstate.
1455 form that they appear as in the dirstate.
1453
1456
1454 - `dirfoldmap` is a dict mapping normalized directory names to the
1457 - `dirfoldmap` is a dict mapping normalized directory names to the
1455 denormalized form that they appear as in the dirstate.
1458 denormalized form that they appear as in the dirstate.
1456 """
1459 """
1457
1460
1458 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1459 self._ui = ui
1462 self._ui = ui
1460 self._opener = opener
1463 self._opener = opener
1461 self._root = root
1464 self._root = root
1462 self._filename = b'dirstate'
1465 self._filename = b'dirstate'
1463 self._nodelen = 20
1466 self._nodelen = 20
1464 self._nodeconstants = nodeconstants
1467 self._nodeconstants = nodeconstants
1465 assert (
1468 assert (
1466 not use_dirstate_v2
1469 not use_dirstate_v2
1467 ), "should have detected unsupported requirement"
1470 ), "should have detected unsupported requirement"
1468
1471
1469 self._parents = None
1472 self._parents = None
1470 self._dirtyparents = False
1473 self._dirtyparents = False
1471
1474
1472 # for consistent view between _pl() and _read() invocations
1475 # for consistent view between _pl() and _read() invocations
1473 self._pendingmode = None
1476 self._pendingmode = None
1474
1477
1475 @propertycache
1478 @propertycache
1476 def _map(self):
1479 def _map(self):
1477 self._map = {}
1480 self._map = {}
1478 self.read()
1481 self.read()
1479 return self._map
1482 return self._map
1480
1483
1481 @propertycache
1484 @propertycache
1482 def copymap(self):
1485 def copymap(self):
1483 self.copymap = {}
1486 self.copymap = {}
1484 self._map
1487 self._map
1485 return self.copymap
1488 return self.copymap
1486
1489
1487 def directories(self):
1490 def directories(self):
1488 # Rust / dirstate-v2 only
1491 # Rust / dirstate-v2 only
1489 return []
1492 return []
1490
1493
1491 def clear(self):
1494 def clear(self):
1492 self._map.clear()
1495 self._map.clear()
1493 self.copymap.clear()
1496 self.copymap.clear()
1494 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1497 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1495 util.clearcachedproperty(self, b"_dirs")
1498 util.clearcachedproperty(self, b"_dirs")
1496 util.clearcachedproperty(self, b"_alldirs")
1499 util.clearcachedproperty(self, b"_alldirs")
1497 util.clearcachedproperty(self, b"filefoldmap")
1500 util.clearcachedproperty(self, b"filefoldmap")
1498 util.clearcachedproperty(self, b"dirfoldmap")
1501 util.clearcachedproperty(self, b"dirfoldmap")
1499 util.clearcachedproperty(self, b"nonnormalset")
1502 util.clearcachedproperty(self, b"nonnormalset")
1500 util.clearcachedproperty(self, b"otherparentset")
1503 util.clearcachedproperty(self, b"otherparentset")
1501
1504
1502 def items(self):
1505 def items(self):
1503 return pycompat.iteritems(self._map)
1506 return pycompat.iteritems(self._map)
1504
1507
1505 # forward for python2,3 compat
1508 # forward for python2,3 compat
1506 iteritems = items
1509 iteritems = items
1507
1510
1508 def __len__(self):
1511 def __len__(self):
1509 return len(self._map)
1512 return len(self._map)
1510
1513
1511 def __iter__(self):
1514 def __iter__(self):
1512 return iter(self._map)
1515 return iter(self._map)
1513
1516
1514 def get(self, key, default=None):
1517 def get(self, key, default=None):
1515 return self._map.get(key, default)
1518 return self._map.get(key, default)
1516
1519
1517 def __contains__(self, key):
1520 def __contains__(self, key):
1518 return key in self._map
1521 return key in self._map
1519
1522
1520 def __getitem__(self, key):
1523 def __getitem__(self, key):
1521 return self._map[key]
1524 return self._map[key]
1522
1525
1523 def keys(self):
1526 def keys(self):
1524 return self._map.keys()
1527 return self._map.keys()
1525
1528
1526 def preload(self):
1529 def preload(self):
1527 """Loads the underlying data, if it's not already loaded"""
1530 """Loads the underlying data, if it's not already loaded"""
1528 self._map
1531 self._map
1529
1532
1530 def addfile(self, f, oldstate, state, mode, size, mtime):
1533 def addfile(self, f, oldstate, state, mode, size, mtime):
1531 """Add a tracked file to the dirstate."""
1534 """Add a tracked file to the dirstate."""
1532 if oldstate in b"?r" and "_dirs" in self.__dict__:
1535 if oldstate in b"?r" and "_dirs" in self.__dict__:
1533 self._dirs.addpath(f)
1536 self._dirs.addpath(f)
1534 if oldstate == b"?" and "_alldirs" in self.__dict__:
1537 if oldstate == b"?" and "_alldirs" in self.__dict__:
1535 self._alldirs.addpath(f)
1538 self._alldirs.addpath(f)
1536 self._map[f] = dirstatetuple(state, mode, size, mtime)
1539 self._map[f] = dirstatetuple(state, mode, size, mtime)
1537 if state != b'n' or mtime == -1:
1540 if state != b'n' or mtime == -1:
1538 self.nonnormalset.add(f)
1541 self.nonnormalset.add(f)
1539 if size == FROM_P2:
1542 if size == FROM_P2:
1540 self.otherparentset.add(f)
1543 self.otherparentset.add(f)
1541
1544
1542 def removefile(self, f, oldstate, size):
1545 def removefile(self, f, oldstate, size):
1543 """
1546 """
1544 Mark a file as removed in the dirstate.
1547 Mark a file as removed in the dirstate.
1545
1548
1546 The `size` parameter is used to store sentinel values that indicate
1549 The `size` parameter is used to store sentinel values that indicate
1547 the file's previous state. In the future, we should refactor this
1550 the file's previous state. In the future, we should refactor this
1548 to be more explicit about what that state is.
1551 to be more explicit about what that state is.
1549 """
1552 """
1550 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1553 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1551 self._dirs.delpath(f)
1554 self._dirs.delpath(f)
1552 if oldstate == b"?" and "_alldirs" in self.__dict__:
1555 if oldstate == b"?" and "_alldirs" in self.__dict__:
1553 self._alldirs.addpath(f)
1556 self._alldirs.addpath(f)
1554 if "filefoldmap" in self.__dict__:
1557 if "filefoldmap" in self.__dict__:
1555 normed = util.normcase(f)
1558 normed = util.normcase(f)
1556 self.filefoldmap.pop(normed, None)
1559 self.filefoldmap.pop(normed, None)
1557 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1560 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1558 self.nonnormalset.add(f)
1561 self.nonnormalset.add(f)
1559
1562
1560 def dropfile(self, f, oldstate):
1563 def dropfile(self, f, oldstate):
1561 """
1564 """
1562 Remove a file from the dirstate. Returns True if the file was
1565 Remove a file from the dirstate. Returns True if the file was
1563 previously recorded.
1566 previously recorded.
1564 """
1567 """
1565 exists = self._map.pop(f, None) is not None
1568 exists = self._map.pop(f, None) is not None
1566 if exists:
1569 if exists:
1567 if oldstate != b"r" and "_dirs" in self.__dict__:
1570 if oldstate != b"r" and "_dirs" in self.__dict__:
1568 self._dirs.delpath(f)
1571 self._dirs.delpath(f)
1569 if "_alldirs" in self.__dict__:
1572 if "_alldirs" in self.__dict__:
1570 self._alldirs.delpath(f)
1573 self._alldirs.delpath(f)
1571 if "filefoldmap" in self.__dict__:
1574 if "filefoldmap" in self.__dict__:
1572 normed = util.normcase(f)
1575 normed = util.normcase(f)
1573 self.filefoldmap.pop(normed, None)
1576 self.filefoldmap.pop(normed, None)
1574 self.nonnormalset.discard(f)
1577 self.nonnormalset.discard(f)
1575 return exists
1578 return exists
1576
1579
1577 def clearambiguoustimes(self, files, now):
1580 def clearambiguoustimes(self, files, now):
1578 for f in files:
1581 for f in files:
1579 e = self.get(f)
1582 e = self.get(f)
1580 if e is not None and e[0] == b'n' and e[3] == now:
1583 if e is not None and e[0] == b'n' and e[3] == now:
1581 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1584 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1582 self.nonnormalset.add(f)
1585 self.nonnormalset.add(f)
1583
1586
1584 def nonnormalentries(self):
1587 def nonnormalentries(self):
1585 '''Compute the nonnormal dirstate entries from the dmap'''
1588 '''Compute the nonnormal dirstate entries from the dmap'''
1586 try:
1589 try:
1587 return parsers.nonnormalotherparententries(self._map)
1590 return parsers.nonnormalotherparententries(self._map)
1588 except AttributeError:
1591 except AttributeError:
1589 nonnorm = set()
1592 nonnorm = set()
1590 otherparent = set()
1593 otherparent = set()
1591 for fname, e in pycompat.iteritems(self._map):
1594 for fname, e in pycompat.iteritems(self._map):
1592 if e[0] != b'n' or e[3] == -1:
1595 if e[0] != b'n' or e[3] == -1:
1593 nonnorm.add(fname)
1596 nonnorm.add(fname)
1594 if e[0] == b'n' and e[2] == FROM_P2:
1597 if e[0] == b'n' and e[2] == FROM_P2:
1595 otherparent.add(fname)
1598 otherparent.add(fname)
1596 return nonnorm, otherparent
1599 return nonnorm, otherparent
1597
1600
1598 @propertycache
1601 @propertycache
1599 def filefoldmap(self):
1602 def filefoldmap(self):
1600 """Returns a dictionary mapping normalized case paths to their
1603 """Returns a dictionary mapping normalized case paths to their
1601 non-normalized versions.
1604 non-normalized versions.
1602 """
1605 """
1603 try:
1606 try:
1604 makefilefoldmap = parsers.make_file_foldmap
1607 makefilefoldmap = parsers.make_file_foldmap
1605 except AttributeError:
1608 except AttributeError:
1606 pass
1609 pass
1607 else:
1610 else:
1608 return makefilefoldmap(
1611 return makefilefoldmap(
1609 self._map, util.normcasespec, util.normcasefallback
1612 self._map, util.normcasespec, util.normcasefallback
1610 )
1613 )
1611
1614
1612 f = {}
1615 f = {}
1613 normcase = util.normcase
1616 normcase = util.normcase
1614 for name, s in pycompat.iteritems(self._map):
1617 for name, s in pycompat.iteritems(self._map):
1615 if s[0] != b'r':
1618 if s[0] != b'r':
1616 f[normcase(name)] = name
1619 f[normcase(name)] = name
1617 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1620 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1618 return f
1621 return f
1619
1622
1620 def hastrackeddir(self, d):
1623 def hastrackeddir(self, d):
1621 """
1624 """
1622 Returns True if the dirstate contains a tracked (not removed) file
1625 Returns True if the dirstate contains a tracked (not removed) file
1623 in this directory.
1626 in this directory.
1624 """
1627 """
1625 return d in self._dirs
1628 return d in self._dirs
1626
1629
1627 def hasdir(self, d):
1630 def hasdir(self, d):
1628 """
1631 """
1629 Returns True if the dirstate contains a file (tracked or removed)
1632 Returns True if the dirstate contains a file (tracked or removed)
1630 in this directory.
1633 in this directory.
1631 """
1634 """
1632 return d in self._alldirs
1635 return d in self._alldirs
1633
1636
1634 @propertycache
1637 @propertycache
1635 def _dirs(self):
1638 def _dirs(self):
1636 return pathutil.dirs(self._map, b'r')
1639 return pathutil.dirs(self._map, b'r')
1637
1640
1638 @propertycache
1641 @propertycache
1639 def _alldirs(self):
1642 def _alldirs(self):
1640 return pathutil.dirs(self._map)
1643 return pathutil.dirs(self._map)
1641
1644
1642 def _opendirstatefile(self):
1645 def _opendirstatefile(self):
1643 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1646 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1644 if self._pendingmode is not None and self._pendingmode != mode:
1647 if self._pendingmode is not None and self._pendingmode != mode:
1645 fp.close()
1648 fp.close()
1646 raise error.Abort(
1649 raise error.Abort(
1647 _(b'working directory state may be changed parallelly')
1650 _(b'working directory state may be changed parallelly')
1648 )
1651 )
1649 self._pendingmode = mode
1652 self._pendingmode = mode
1650 return fp
1653 return fp
1651
1654
1652 def parents(self):
1655 def parents(self):
1653 if not self._parents:
1656 if not self._parents:
1654 try:
1657 try:
1655 fp = self._opendirstatefile()
1658 fp = self._opendirstatefile()
1656 st = fp.read(2 * self._nodelen)
1659 st = fp.read(2 * self._nodelen)
1657 fp.close()
1660 fp.close()
1658 except IOError as err:
1661 except IOError as err:
1659 if err.errno != errno.ENOENT:
1662 if err.errno != errno.ENOENT:
1660 raise
1663 raise
1661 # File doesn't exist, so the current state is empty
1664 # File doesn't exist, so the current state is empty
1662 st = b''
1665 st = b''
1663
1666
1664 l = len(st)
1667 l = len(st)
1665 if l == self._nodelen * 2:
1668 if l == self._nodelen * 2:
1666 self._parents = (
1669 self._parents = (
1667 st[: self._nodelen],
1670 st[: self._nodelen],
1668 st[self._nodelen : 2 * self._nodelen],
1671 st[self._nodelen : 2 * self._nodelen],
1669 )
1672 )
1670 elif l == 0:
1673 elif l == 0:
1671 self._parents = (
1674 self._parents = (
1672 self._nodeconstants.nullid,
1675 self._nodeconstants.nullid,
1673 self._nodeconstants.nullid,
1676 self._nodeconstants.nullid,
1674 )
1677 )
1675 else:
1678 else:
1676 raise error.Abort(
1679 raise error.Abort(
1677 _(b'working directory state appears damaged!')
1680 _(b'working directory state appears damaged!')
1678 )
1681 )
1679
1682
1680 return self._parents
1683 return self._parents
1681
1684
1682 def setparents(self, p1, p2):
1685 def setparents(self, p1, p2):
1683 self._parents = (p1, p2)
1686 self._parents = (p1, p2)
1684 self._dirtyparents = True
1687 self._dirtyparents = True
1685
1688
1686 def read(self):
1689 def read(self):
1687 # ignore HG_PENDING because identity is used only for writing
1690 # ignore HG_PENDING because identity is used only for writing
1688 self.identity = util.filestat.frompath(
1691 self.identity = util.filestat.frompath(
1689 self._opener.join(self._filename)
1692 self._opener.join(self._filename)
1690 )
1693 )
1691
1694
1692 try:
1695 try:
1693 fp = self._opendirstatefile()
1696 fp = self._opendirstatefile()
1694 try:
1697 try:
1695 st = fp.read()
1698 st = fp.read()
1696 finally:
1699 finally:
1697 fp.close()
1700 fp.close()
1698 except IOError as err:
1701 except IOError as err:
1699 if err.errno != errno.ENOENT:
1702 if err.errno != errno.ENOENT:
1700 raise
1703 raise
1701 return
1704 return
1702 if not st:
1705 if not st:
1703 return
1706 return
1704
1707
1705 if util.safehasattr(parsers, b'dict_new_presized'):
1708 if util.safehasattr(parsers, b'dict_new_presized'):
1706 # Make an estimate of the number of files in the dirstate based on
1709 # Make an estimate of the number of files in the dirstate based on
1707 # its size. This trades wasting some memory for avoiding costly
1710 # its size. This trades wasting some memory for avoiding costly
1708 # resizes. Each entry have a prefix of 17 bytes followed by one or
1711 # resizes. Each entry have a prefix of 17 bytes followed by one or
1709 # two path names. Studies on various large-scale real-world repositories
1712 # two path names. Studies on various large-scale real-world repositories
1710 # found 54 bytes a reasonable upper limit for the average path names.
1713 # found 54 bytes a reasonable upper limit for the average path names.
1711 # Copy entries are ignored for the sake of this estimate.
1714 # Copy entries are ignored for the sake of this estimate.
1712 self._map = parsers.dict_new_presized(len(st) // 71)
1715 self._map = parsers.dict_new_presized(len(st) // 71)
1713
1716
1714 # Python's garbage collector triggers a GC each time a certain number
1717 # Python's garbage collector triggers a GC each time a certain number
1715 # of container objects (the number being defined by
1718 # of container objects (the number being defined by
1716 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1719 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1717 # for each file in the dirstate. The C version then immediately marks
1720 # for each file in the dirstate. The C version then immediately marks
1718 # them as not to be tracked by the collector. However, this has no
1721 # them as not to be tracked by the collector. However, this has no
1719 # effect on when GCs are triggered, only on what objects the GC looks
1722 # effect on when GCs are triggered, only on what objects the GC looks
1720 # into. This means that O(number of files) GCs are unavoidable.
1723 # into. This means that O(number of files) GCs are unavoidable.
1721 # Depending on when in the process's lifetime the dirstate is parsed,
1724 # Depending on when in the process's lifetime the dirstate is parsed,
1722 # this can get very expensive. As a workaround, disable GC while
1725 # this can get very expensive. As a workaround, disable GC while
1723 # parsing the dirstate.
1726 # parsing the dirstate.
1724 #
1727 #
1725 # (we cannot decorate the function directly since it is in a C module)
1728 # (we cannot decorate the function directly since it is in a C module)
1726 parse_dirstate = util.nogc(parsers.parse_dirstate)
1729 parse_dirstate = util.nogc(parsers.parse_dirstate)
1727 p = parse_dirstate(self._map, self.copymap, st)
1730 p = parse_dirstate(self._map, self.copymap, st)
1728 if not self._dirtyparents:
1731 if not self._dirtyparents:
1729 self.setparents(*p)
1732 self.setparents(*p)
1730
1733
1731 # Avoid excess attribute lookups by fast pathing certain checks
1734 # Avoid excess attribute lookups by fast pathing certain checks
1732 self.__contains__ = self._map.__contains__
1735 self.__contains__ = self._map.__contains__
1733 self.__getitem__ = self._map.__getitem__
1736 self.__getitem__ = self._map.__getitem__
1734 self.get = self._map.get
1737 self.get = self._map.get
1735
1738
1736 def write(self, st, now):
1739 def write(self, st, now):
1737 st.write(
1740 st.write(
1738 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1741 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1739 )
1742 )
1740 st.close()
1743 st.close()
1741 self._dirtyparents = False
1744 self._dirtyparents = False
1742 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1745 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1743
1746
1744 @propertycache
1747 @propertycache
1745 def nonnormalset(self):
1748 def nonnormalset(self):
1746 nonnorm, otherparents = self.nonnormalentries()
1749 nonnorm, otherparents = self.nonnormalentries()
1747 self.otherparentset = otherparents
1750 self.otherparentset = otherparents
1748 return nonnorm
1751 return nonnorm
1749
1752
1750 @propertycache
1753 @propertycache
1751 def otherparentset(self):
1754 def otherparentset(self):
1752 nonnorm, otherparents = self.nonnormalentries()
1755 nonnorm, otherparents = self.nonnormalentries()
1753 self.nonnormalset = nonnorm
1756 self.nonnormalset = nonnorm
1754 return otherparents
1757 return otherparents
1755
1758
1756 def non_normal_or_other_parent_paths(self):
1759 def non_normal_or_other_parent_paths(self):
1757 return self.nonnormalset.union(self.otherparentset)
1760 return self.nonnormalset.union(self.otherparentset)
1758
1761
1759 @propertycache
1762 @propertycache
1760 def identity(self):
1763 def identity(self):
1761 self._map
1764 self._map
1762 return self.identity
1765 return self.identity
1763
1766
1764 @propertycache
1767 @propertycache
1765 def dirfoldmap(self):
1768 def dirfoldmap(self):
1766 f = {}
1769 f = {}
1767 normcase = util.normcase
1770 normcase = util.normcase
1768 for name in self._dirs:
1771 for name in self._dirs:
1769 f[normcase(name)] = name
1772 f[normcase(name)] = name
1770 return f
1773 return f
1771
1774
1772
1775
1773 if rustmod is not None:
1776 if rustmod is not None:
1774
1777
1775 class dirstatemap(object):
1778 class dirstatemap(object):
1776 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1779 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1777 self._use_dirstate_v2 = use_dirstate_v2
1780 self._use_dirstate_v2 = use_dirstate_v2
1778 self._nodeconstants = nodeconstants
1781 self._nodeconstants = nodeconstants
1779 self._ui = ui
1782 self._ui = ui
1780 self._opener = opener
1783 self._opener = opener
1781 self._root = root
1784 self._root = root
1782 self._filename = b'dirstate'
1785 self._filename = b'dirstate'
1783 self._nodelen = 20 # Also update Rust code when changing this!
1786 self._nodelen = 20 # Also update Rust code when changing this!
1784 self._parents = None
1787 self._parents = None
1785 self._dirtyparents = False
1788 self._dirtyparents = False
1786
1789
1787 # for consistent view between _pl() and _read() invocations
1790 # for consistent view between _pl() and _read() invocations
1788 self._pendingmode = None
1791 self._pendingmode = None
1789
1792
1790 self._use_dirstate_tree = self._ui.configbool(
1793 self._use_dirstate_tree = self._ui.configbool(
1791 b"experimental",
1794 b"experimental",
1792 b"dirstate-tree.in-memory",
1795 b"dirstate-tree.in-memory",
1793 False,
1796 False,
1794 )
1797 )
1795
1798
1796 def addfile(self, *args, **kwargs):
1799 def addfile(self, *args, **kwargs):
1797 return self._rustmap.addfile(*args, **kwargs)
1800 return self._rustmap.addfile(*args, **kwargs)
1798
1801
1799 def removefile(self, *args, **kwargs):
1802 def removefile(self, *args, **kwargs):
1800 return self._rustmap.removefile(*args, **kwargs)
1803 return self._rustmap.removefile(*args, **kwargs)
1801
1804
1802 def dropfile(self, *args, **kwargs):
1805 def dropfile(self, *args, **kwargs):
1803 return self._rustmap.dropfile(*args, **kwargs)
1806 return self._rustmap.dropfile(*args, **kwargs)
1804
1807
1805 def clearambiguoustimes(self, *args, **kwargs):
1808 def clearambiguoustimes(self, *args, **kwargs):
1806 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1809 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1807
1810
1808 def nonnormalentries(self):
1811 def nonnormalentries(self):
1809 return self._rustmap.nonnormalentries()
1812 return self._rustmap.nonnormalentries()
1810
1813
1811 def get(self, *args, **kwargs):
1814 def get(self, *args, **kwargs):
1812 return self._rustmap.get(*args, **kwargs)
1815 return self._rustmap.get(*args, **kwargs)
1813
1816
1814 @property
1817 @property
1815 def copymap(self):
1818 def copymap(self):
1816 return self._rustmap.copymap()
1819 return self._rustmap.copymap()
1817
1820
1818 def directories(self):
1821 def directories(self):
1819 return self._rustmap.directories()
1822 return self._rustmap.directories()
1820
1823
1821 def preload(self):
1824 def preload(self):
1822 self._rustmap
1825 self._rustmap
1823
1826
1824 def clear(self):
1827 def clear(self):
1825 self._rustmap.clear()
1828 self._rustmap.clear()
1826 self.setparents(
1829 self.setparents(
1827 self._nodeconstants.nullid, self._nodeconstants.nullid
1830 self._nodeconstants.nullid, self._nodeconstants.nullid
1828 )
1831 )
1829 util.clearcachedproperty(self, b"_dirs")
1832 util.clearcachedproperty(self, b"_dirs")
1830 util.clearcachedproperty(self, b"_alldirs")
1833 util.clearcachedproperty(self, b"_alldirs")
1831 util.clearcachedproperty(self, b"dirfoldmap")
1834 util.clearcachedproperty(self, b"dirfoldmap")
1832
1835
1833 def items(self):
1836 def items(self):
1834 return self._rustmap.items()
1837 return self._rustmap.items()
1835
1838
1836 def keys(self):
1839 def keys(self):
1837 return iter(self._rustmap)
1840 return iter(self._rustmap)
1838
1841
1839 def __contains__(self, key):
1842 def __contains__(self, key):
1840 return key in self._rustmap
1843 return key in self._rustmap
1841
1844
1842 def __getitem__(self, item):
1845 def __getitem__(self, item):
1843 return self._rustmap[item]
1846 return self._rustmap[item]
1844
1847
1845 def __len__(self):
1848 def __len__(self):
1846 return len(self._rustmap)
1849 return len(self._rustmap)
1847
1850
1848 def __iter__(self):
1851 def __iter__(self):
1849 return iter(self._rustmap)
1852 return iter(self._rustmap)
1850
1853
1851 # forward for python2,3 compat
1854 # forward for python2,3 compat
1852 iteritems = items
1855 iteritems = items
1853
1856
1854 def _opendirstatefile(self):
1857 def _opendirstatefile(self):
1855 fp, mode = txnutil.trypending(
1858 fp, mode = txnutil.trypending(
1856 self._root, self._opener, self._filename
1859 self._root, self._opener, self._filename
1857 )
1860 )
1858 if self._pendingmode is not None and self._pendingmode != mode:
1861 if self._pendingmode is not None and self._pendingmode != mode:
1859 fp.close()
1862 fp.close()
1860 raise error.Abort(
1863 raise error.Abort(
1861 _(b'working directory state may be changed parallelly')
1864 _(b'working directory state may be changed parallelly')
1862 )
1865 )
1863 self._pendingmode = mode
1866 self._pendingmode = mode
1864 return fp
1867 return fp
1865
1868
1866 def setparents(self, p1, p2):
1869 def setparents(self, p1, p2):
1867 self._parents = (p1, p2)
1870 self._parents = (p1, p2)
1868 self._dirtyparents = True
1871 self._dirtyparents = True
1869
1872
1870 def parents(self):
1873 def parents(self):
1871 if not self._parents:
1874 if not self._parents:
1872 if self._use_dirstate_v2:
1875 if self._use_dirstate_v2:
1873 offset = len(rustmod.V2_FORMAT_MARKER)
1876 offset = len(rustmod.V2_FORMAT_MARKER)
1874 else:
1877 else:
1875 offset = 0
1878 offset = 0
1876 read_len = offset + self._nodelen * 2
1879 read_len = offset + self._nodelen * 2
1877 try:
1880 try:
1878 fp = self._opendirstatefile()
1881 fp = self._opendirstatefile()
1879 st = fp.read(read_len)
1882 st = fp.read(read_len)
1880 fp.close()
1883 fp.close()
1881 except IOError as err:
1884 except IOError as err:
1882 if err.errno != errno.ENOENT:
1885 if err.errno != errno.ENOENT:
1883 raise
1886 raise
1884 # File doesn't exist, so the current state is empty
1887 # File doesn't exist, so the current state is empty
1885 st = b''
1888 st = b''
1886
1889
1887 l = len(st)
1890 l = len(st)
1888 if l == read_len:
1891 if l == read_len:
1889 st = st[offset:]
1892 st = st[offset:]
1890 self._parents = (
1893 self._parents = (
1891 st[: self._nodelen],
1894 st[: self._nodelen],
1892 st[self._nodelen : 2 * self._nodelen],
1895 st[self._nodelen : 2 * self._nodelen],
1893 )
1896 )
1894 elif l == 0:
1897 elif l == 0:
1895 self._parents = (
1898 self._parents = (
1896 self._nodeconstants.nullid,
1899 self._nodeconstants.nullid,
1897 self._nodeconstants.nullid,
1900 self._nodeconstants.nullid,
1898 )
1901 )
1899 else:
1902 else:
1900 raise error.Abort(
1903 raise error.Abort(
1901 _(b'working directory state appears damaged!')
1904 _(b'working directory state appears damaged!')
1902 )
1905 )
1903
1906
1904 return self._parents
1907 return self._parents
1905
1908
1906 @propertycache
1909 @propertycache
1907 def _rustmap(self):
1910 def _rustmap(self):
1908 """
1911 """
1909 Fills the Dirstatemap when called.
1912 Fills the Dirstatemap when called.
1910 """
1913 """
1911 # ignore HG_PENDING because identity is used only for writing
1914 # ignore HG_PENDING because identity is used only for writing
1912 self.identity = util.filestat.frompath(
1915 self.identity = util.filestat.frompath(
1913 self._opener.join(self._filename)
1916 self._opener.join(self._filename)
1914 )
1917 )
1915
1918
1916 try:
1919 try:
1917 fp = self._opendirstatefile()
1920 fp = self._opendirstatefile()
1918 try:
1921 try:
1919 st = fp.read()
1922 st = fp.read()
1920 finally:
1923 finally:
1921 fp.close()
1924 fp.close()
1922 except IOError as err:
1925 except IOError as err:
1923 if err.errno != errno.ENOENT:
1926 if err.errno != errno.ENOENT:
1924 raise
1927 raise
1925 st = b''
1928 st = b''
1926
1929
1927 self._rustmap, parents = rustmod.DirstateMap.new(
1930 self._rustmap, parents = rustmod.DirstateMap.new(
1928 self._use_dirstate_tree, self._use_dirstate_v2, st
1931 self._use_dirstate_tree, self._use_dirstate_v2, st
1929 )
1932 )
1930
1933
1931 if parents and not self._dirtyparents:
1934 if parents and not self._dirtyparents:
1932 self.setparents(*parents)
1935 self.setparents(*parents)
1933
1936
1934 self.__contains__ = self._rustmap.__contains__
1937 self.__contains__ = self._rustmap.__contains__
1935 self.__getitem__ = self._rustmap.__getitem__
1938 self.__getitem__ = self._rustmap.__getitem__
1936 self.get = self._rustmap.get
1939 self.get = self._rustmap.get
1937 return self._rustmap
1940 return self._rustmap
1938
1941
1939 def write(self, st, now):
1942 def write(self, st, now):
1940 parents = self.parents()
1943 parents = self.parents()
1941 packed = self._rustmap.write(
1944 packed = self._rustmap.write(
1942 self._use_dirstate_v2, parents[0], parents[1], now
1945 self._use_dirstate_v2, parents[0], parents[1], now
1943 )
1946 )
1944 st.write(packed)
1947 st.write(packed)
1945 st.close()
1948 st.close()
1946 self._dirtyparents = False
1949 self._dirtyparents = False
1947
1950
1948 @propertycache
1951 @propertycache
1949 def filefoldmap(self):
1952 def filefoldmap(self):
1950 """Returns a dictionary mapping normalized case paths to their
1953 """Returns a dictionary mapping normalized case paths to their
1951 non-normalized versions.
1954 non-normalized versions.
1952 """
1955 """
1953 return self._rustmap.filefoldmapasdict()
1956 return self._rustmap.filefoldmapasdict()
1954
1957
1955 def hastrackeddir(self, d):
1958 def hastrackeddir(self, d):
1956 return self._rustmap.hastrackeddir(d)
1959 return self._rustmap.hastrackeddir(d)
1957
1960
1958 def hasdir(self, d):
1961 def hasdir(self, d):
1959 return self._rustmap.hasdir(d)
1962 return self._rustmap.hasdir(d)
1960
1963
1961 @propertycache
1964 @propertycache
1962 def identity(self):
1965 def identity(self):
1963 self._rustmap
1966 self._rustmap
1964 return self.identity
1967 return self.identity
1965
1968
1966 @property
1969 @property
1967 def nonnormalset(self):
1970 def nonnormalset(self):
1968 nonnorm = self._rustmap.non_normal_entries()
1971 nonnorm = self._rustmap.non_normal_entries()
1969 return nonnorm
1972 return nonnorm
1970
1973
1971 @propertycache
1974 @propertycache
1972 def otherparentset(self):
1975 def otherparentset(self):
1973 otherparents = self._rustmap.other_parent_entries()
1976 otherparents = self._rustmap.other_parent_entries()
1974 return otherparents
1977 return otherparents
1975
1978
1976 def non_normal_or_other_parent_paths(self):
1979 def non_normal_or_other_parent_paths(self):
1977 return self._rustmap.non_normal_or_other_parent_paths()
1980 return self._rustmap.non_normal_or_other_parent_paths()
1978
1981
1979 @propertycache
1982 @propertycache
1980 def dirfoldmap(self):
1983 def dirfoldmap(self):
1981 f = {}
1984 f = {}
1982 normcase = util.normcase
1985 normcase = util.normcase
1983 for name, _pseudo_entry in self.directories():
1986 for name, _pseudo_entry in self.directories():
1984 f[normcase(name)] = name
1987 f[normcase(name)] = name
1985 return f
1988 return f
General Comments 0
You need to be logged in to leave comments. Login now