##// END OF EJS Templates
dirstate: add default value to _addpath...
marmoute -
r48280:523c0038 default
parent child Browse files
Show More
@@ -1,1995 +1,1995 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = -2
52 FROM_P2 = -2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
55 NONNORMAL = -1
56
56
57 # a special value used internally for `time` if the time is ambigeous
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = -1
58 AMBIGUOUS_TIME = -1
59
59
60
60
61 class repocache(filecache):
61 class repocache(filecache):
62 """filecache for files in .hg/"""
62 """filecache for files in .hg/"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._opener.join(fname)
65 return obj._opener.join(fname)
66
66
67
67
68 class rootcache(filecache):
68 class rootcache(filecache):
69 """filecache for files in the repository root"""
69 """filecache for files in the repository root"""
70
70
71 def join(self, obj, fname):
71 def join(self, obj, fname):
72 return obj._join(fname)
72 return obj._join(fname)
73
73
74
74
75 def _getfsnow(vfs):
75 def _getfsnow(vfs):
76 '''Get "now" timestamp on filesystem'''
76 '''Get "now" timestamp on filesystem'''
77 tmpfd, tmpname = vfs.mkstemp()
77 tmpfd, tmpname = vfs.mkstemp()
78 try:
78 try:
79 return os.fstat(tmpfd)[stat.ST_MTIME]
79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 finally:
80 finally:
81 os.close(tmpfd)
81 os.close(tmpfd)
82 vfs.unlink(tmpname)
82 vfs.unlink(tmpname)
83
83
84
84
85 @interfaceutil.implementer(intdirstate.idirstate)
85 @interfaceutil.implementer(intdirstate.idirstate)
86 class dirstate(object):
86 class dirstate(object):
87 def __init__(
87 def __init__(
88 self,
88 self,
89 opener,
89 opener,
90 ui,
90 ui,
91 root,
91 root,
92 validate,
92 validate,
93 sparsematchfn,
93 sparsematchfn,
94 nodeconstants,
94 nodeconstants,
95 use_dirstate_v2,
95 use_dirstate_v2,
96 ):
96 ):
97 """Create a new dirstate object.
97 """Create a new dirstate object.
98
98
99 opener is an open()-like callable that can be used to open the
99 opener is an open()-like callable that can be used to open the
100 dirstate file; root is the root of the directory tracked by
100 dirstate file; root is the root of the directory tracked by
101 the dirstate.
101 the dirstate.
102 """
102 """
103 self._use_dirstate_v2 = use_dirstate_v2
103 self._use_dirstate_v2 = use_dirstate_v2
104 self._nodeconstants = nodeconstants
104 self._nodeconstants = nodeconstants
105 self._opener = opener
105 self._opener = opener
106 self._validate = validate
106 self._validate = validate
107 self._root = root
107 self._root = root
108 self._sparsematchfn = sparsematchfn
108 self._sparsematchfn = sparsematchfn
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 # UNC path pointing to root share (issue4557)
110 # UNC path pointing to root share (issue4557)
111 self._rootdir = pathutil.normasprefix(root)
111 self._rootdir = pathutil.normasprefix(root)
112 self._dirty = False
112 self._dirty = False
113 self._lastnormaltime = 0
113 self._lastnormaltime = 0
114 self._ui = ui
114 self._ui = ui
115 self._filecache = {}
115 self._filecache = {}
116 self._parentwriters = 0
116 self._parentwriters = 0
117 self._filename = b'dirstate'
117 self._filename = b'dirstate'
118 self._pendingfilename = b'%s.pending' % self._filename
118 self._pendingfilename = b'%s.pending' % self._filename
119 self._plchangecallbacks = {}
119 self._plchangecallbacks = {}
120 self._origpl = None
120 self._origpl = None
121 self._updatedfiles = set()
121 self._updatedfiles = set()
122 self._mapcls = dirstatemap
122 self._mapcls = dirstatemap
123 # Access and cache cwd early, so we don't access it for the first time
123 # Access and cache cwd early, so we don't access it for the first time
124 # after a working-copy update caused it to not exist (accessing it then
124 # after a working-copy update caused it to not exist (accessing it then
125 # raises an exception).
125 # raises an exception).
126 self._cwd
126 self._cwd
127
127
128 def prefetch_parents(self):
128 def prefetch_parents(self):
129 """make sure the parents are loaded
129 """make sure the parents are loaded
130
130
131 Used to avoid a race condition.
131 Used to avoid a race condition.
132 """
132 """
133 self._pl
133 self._pl
134
134
135 @contextlib.contextmanager
135 @contextlib.contextmanager
136 def parentchange(self):
136 def parentchange(self):
137 """Context manager for handling dirstate parents.
137 """Context manager for handling dirstate parents.
138
138
139 If an exception occurs in the scope of the context manager,
139 If an exception occurs in the scope of the context manager,
140 the incoherent dirstate won't be written when wlock is
140 the incoherent dirstate won't be written when wlock is
141 released.
141 released.
142 """
142 """
143 self._parentwriters += 1
143 self._parentwriters += 1
144 yield
144 yield
145 # Typically we want the "undo" step of a context manager in a
145 # Typically we want the "undo" step of a context manager in a
146 # finally block so it happens even when an exception
146 # finally block so it happens even when an exception
147 # occurs. In this case, however, we only want to decrement
147 # occurs. In this case, however, we only want to decrement
148 # parentwriters if the code in the with statement exits
148 # parentwriters if the code in the with statement exits
149 # normally, so we don't have a try/finally here on purpose.
149 # normally, so we don't have a try/finally here on purpose.
150 self._parentwriters -= 1
150 self._parentwriters -= 1
151
151
152 def pendingparentchange(self):
152 def pendingparentchange(self):
153 """Returns true if the dirstate is in the middle of a set of changes
153 """Returns true if the dirstate is in the middle of a set of changes
154 that modify the dirstate parent.
154 that modify the dirstate parent.
155 """
155 """
156 return self._parentwriters > 0
156 return self._parentwriters > 0
157
157
158 @propertycache
158 @propertycache
159 def _map(self):
159 def _map(self):
160 """Return the dirstate contents (see documentation for dirstatemap)."""
160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 self._map = self._mapcls(
161 self._map = self._mapcls(
162 self._ui,
162 self._ui,
163 self._opener,
163 self._opener,
164 self._root,
164 self._root,
165 self._nodeconstants,
165 self._nodeconstants,
166 self._use_dirstate_v2,
166 self._use_dirstate_v2,
167 )
167 )
168 return self._map
168 return self._map
169
169
170 @property
170 @property
171 def _sparsematcher(self):
171 def _sparsematcher(self):
172 """The matcher for the sparse checkout.
172 """The matcher for the sparse checkout.
173
173
174 The working directory may not include every file from a manifest. The
174 The working directory may not include every file from a manifest. The
175 matcher obtained by this property will match a path if it is to be
175 matcher obtained by this property will match a path if it is to be
176 included in the working directory.
176 included in the working directory.
177 """
177 """
178 # TODO there is potential to cache this property. For now, the matcher
178 # TODO there is potential to cache this property. For now, the matcher
179 # is resolved on every access. (But the called function does use a
179 # is resolved on every access. (But the called function does use a
180 # cache to keep the lookup fast.)
180 # cache to keep the lookup fast.)
181 return self._sparsematchfn()
181 return self._sparsematchfn()
182
182
183 @repocache(b'branch')
183 @repocache(b'branch')
184 def _branch(self):
184 def _branch(self):
185 try:
185 try:
186 return self._opener.read(b"branch").strip() or b"default"
186 return self._opener.read(b"branch").strip() or b"default"
187 except IOError as inst:
187 except IOError as inst:
188 if inst.errno != errno.ENOENT:
188 if inst.errno != errno.ENOENT:
189 raise
189 raise
190 return b"default"
190 return b"default"
191
191
192 @property
192 @property
193 def _pl(self):
193 def _pl(self):
194 return self._map.parents()
194 return self._map.parents()
195
195
196 def hasdir(self, d):
196 def hasdir(self, d):
197 return self._map.hastrackeddir(d)
197 return self._map.hastrackeddir(d)
198
198
199 @rootcache(b'.hgignore')
199 @rootcache(b'.hgignore')
200 def _ignore(self):
200 def _ignore(self):
201 files = self._ignorefiles()
201 files = self._ignorefiles()
202 if not files:
202 if not files:
203 return matchmod.never()
203 return matchmod.never()
204
204
205 pats = [b'include:%s' % f for f in files]
205 pats = [b'include:%s' % f for f in files]
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207
207
208 @propertycache
208 @propertycache
209 def _slash(self):
209 def _slash(self):
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211
211
212 @propertycache
212 @propertycache
213 def _checklink(self):
213 def _checklink(self):
214 return util.checklink(self._root)
214 return util.checklink(self._root)
215
215
216 @propertycache
216 @propertycache
217 def _checkexec(self):
217 def _checkexec(self):
218 return bool(util.checkexec(self._root))
218 return bool(util.checkexec(self._root))
219
219
220 @propertycache
220 @propertycache
221 def _checkcase(self):
221 def _checkcase(self):
222 return not util.fscasesensitive(self._join(b'.hg'))
222 return not util.fscasesensitive(self._join(b'.hg'))
223
223
224 def _join(self, f):
224 def _join(self, f):
225 # much faster than os.path.join()
225 # much faster than os.path.join()
226 # it's safe because f is always a relative path
226 # it's safe because f is always a relative path
227 return self._rootdir + f
227 return self._rootdir + f
228
228
229 def flagfunc(self, buildfallback):
229 def flagfunc(self, buildfallback):
230 if self._checklink and self._checkexec:
230 if self._checklink and self._checkexec:
231
231
232 def f(x):
232 def f(x):
233 try:
233 try:
234 st = os.lstat(self._join(x))
234 st = os.lstat(self._join(x))
235 if util.statislink(st):
235 if util.statislink(st):
236 return b'l'
236 return b'l'
237 if util.statisexec(st):
237 if util.statisexec(st):
238 return b'x'
238 return b'x'
239 except OSError:
239 except OSError:
240 pass
240 pass
241 return b''
241 return b''
242
242
243 return f
243 return f
244
244
245 fallback = buildfallback()
245 fallback = buildfallback()
246 if self._checklink:
246 if self._checklink:
247
247
248 def f(x):
248 def f(x):
249 if os.path.islink(self._join(x)):
249 if os.path.islink(self._join(x)):
250 return b'l'
250 return b'l'
251 if b'x' in fallback(x):
251 if b'x' in fallback(x):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 if self._checkexec:
256 if self._checkexec:
257
257
258 def f(x):
258 def f(x):
259 if b'l' in fallback(x):
259 if b'l' in fallback(x):
260 return b'l'
260 return b'l'
261 if util.isexec(self._join(x)):
261 if util.isexec(self._join(x)):
262 return b'x'
262 return b'x'
263 return b''
263 return b''
264
264
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 # internal config: ui.forcecwd
271 # internal config: ui.forcecwd
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 if forcecwd:
273 if forcecwd:
274 return forcecwd
274 return forcecwd
275 return encoding.getcwd()
275 return encoding.getcwd()
276
276
277 def getcwd(self):
277 def getcwd(self):
278 """Return the path from which a canonical path is calculated.
278 """Return the path from which a canonical path is calculated.
279
279
280 This path should be used to resolve file patterns or to convert
280 This path should be used to resolve file patterns or to convert
281 canonical paths back to file paths for display. It shouldn't be
281 canonical paths back to file paths for display. It shouldn't be
282 used to get real file paths. Use vfs functions instead.
282 used to get real file paths. Use vfs functions instead.
283 """
283 """
284 cwd = self._cwd
284 cwd = self._cwd
285 if cwd == self._root:
285 if cwd == self._root:
286 return b''
286 return b''
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 rootsep = self._root
288 rootsep = self._root
289 if not util.endswithsep(rootsep):
289 if not util.endswithsep(rootsep):
290 rootsep += pycompat.ossep
290 rootsep += pycompat.ossep
291 if cwd.startswith(rootsep):
291 if cwd.startswith(rootsep):
292 return cwd[len(rootsep) :]
292 return cwd[len(rootsep) :]
293 else:
293 else:
294 # we're outside the repo. return an absolute path.
294 # we're outside the repo. return an absolute path.
295 return cwd
295 return cwd
296
296
297 def pathto(self, f, cwd=None):
297 def pathto(self, f, cwd=None):
298 if cwd is None:
298 if cwd is None:
299 cwd = self.getcwd()
299 cwd = self.getcwd()
300 path = util.pathto(self._root, cwd, f)
300 path = util.pathto(self._root, cwd, f)
301 if self._slash:
301 if self._slash:
302 return util.pconvert(path)
302 return util.pconvert(path)
303 return path
303 return path
304
304
305 def __getitem__(self, key):
305 def __getitem__(self, key):
306 """Return the current state of key (a filename) in the dirstate.
306 """Return the current state of key (a filename) in the dirstate.
307
307
308 States are:
308 States are:
309 n normal
309 n normal
310 m needs merging
310 m needs merging
311 r marked for removal
311 r marked for removal
312 a marked for addition
312 a marked for addition
313 ? not tracked
313 ? not tracked
314 """
314 """
315 return self._map.get(key, (b"?",))[0]
315 return self._map.get(key, (b"?",))[0]
316
316
317 def __contains__(self, key):
317 def __contains__(self, key):
318 return key in self._map
318 return key in self._map
319
319
320 def __iter__(self):
320 def __iter__(self):
321 return iter(sorted(self._map))
321 return iter(sorted(self._map))
322
322
323 def items(self):
323 def items(self):
324 return pycompat.iteritems(self._map)
324 return pycompat.iteritems(self._map)
325
325
326 iteritems = items
326 iteritems = items
327
327
328 def directories(self):
328 def directories(self):
329 return self._map.directories()
329 return self._map.directories()
330
330
331 def parents(self):
331 def parents(self):
332 return [self._validate(p) for p in self._pl]
332 return [self._validate(p) for p in self._pl]
333
333
334 def p1(self):
334 def p1(self):
335 return self._validate(self._pl[0])
335 return self._validate(self._pl[0])
336
336
337 def p2(self):
337 def p2(self):
338 return self._validate(self._pl[1])
338 return self._validate(self._pl[1])
339
339
340 def branch(self):
340 def branch(self):
341 return encoding.tolocal(self._branch)
341 return encoding.tolocal(self._branch)
342
342
343 def setparents(self, p1, p2=None):
343 def setparents(self, p1, p2=None):
344 """Set dirstate parents to p1 and p2.
344 """Set dirstate parents to p1 and p2.
345
345
346 When moving from two parents to one, 'm' merged entries a
346 When moving from two parents to one, 'm' merged entries a
347 adjusted to normal and previous copy records discarded and
347 adjusted to normal and previous copy records discarded and
348 returned by the call.
348 returned by the call.
349
349
350 See localrepo.setparents()
350 See localrepo.setparents()
351 """
351 """
352 if p2 is None:
352 if p2 is None:
353 p2 = self._nodeconstants.nullid
353 p2 = self._nodeconstants.nullid
354 if self._parentwriters == 0:
354 if self._parentwriters == 0:
355 raise ValueError(
355 raise ValueError(
356 b"cannot set dirstate parent outside of "
356 b"cannot set dirstate parent outside of "
357 b"dirstate.parentchange context manager"
357 b"dirstate.parentchange context manager"
358 )
358 )
359
359
360 self._dirty = True
360 self._dirty = True
361 oldp2 = self._pl[1]
361 oldp2 = self._pl[1]
362 if self._origpl is None:
362 if self._origpl is None:
363 self._origpl = self._pl
363 self._origpl = self._pl
364 self._map.setparents(p1, p2)
364 self._map.setparents(p1, p2)
365 copies = {}
365 copies = {}
366 if (
366 if (
367 oldp2 != self._nodeconstants.nullid
367 oldp2 != self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
369 ):
369 ):
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
371
371
372 for f in candidatefiles:
372 for f in candidatefiles:
373 s = self._map.get(f)
373 s = self._map.get(f)
374 if s is None:
374 if s is None:
375 continue
375 continue
376
376
377 # Discard 'm' markers when moving away from a merge state
377 # Discard 'm' markers when moving away from a merge state
378 if s[0] == b'm':
378 if s[0] == b'm':
379 source = self._map.copymap.get(f)
379 source = self._map.copymap.get(f)
380 if source:
380 if source:
381 copies[f] = source
381 copies[f] = source
382 self.normallookup(f)
382 self.normallookup(f)
383 # Also fix up otherparent markers
383 # Also fix up otherparent markers
384 elif s[0] == b'n' and s[2] == FROM_P2:
384 elif s[0] == b'n' and s[2] == FROM_P2:
385 source = self._map.copymap.get(f)
385 source = self._map.copymap.get(f)
386 if source:
386 if source:
387 copies[f] = source
387 copies[f] = source
388 self.add(f)
388 self.add(f)
389 return copies
389 return copies
390
390
391 def setbranch(self, branch):
391 def setbranch(self, branch):
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 try:
394 try:
395 f.write(self._branch + b'\n')
395 f.write(self._branch + b'\n')
396 f.close()
396 f.close()
397
397
398 # make sure filecache has the correct stat info for _branch after
398 # make sure filecache has the correct stat info for _branch after
399 # replacing the underlying file
399 # replacing the underlying file
400 ce = self._filecache[b'_branch']
400 ce = self._filecache[b'_branch']
401 if ce:
401 if ce:
402 ce.refresh()
402 ce.refresh()
403 except: # re-raises
403 except: # re-raises
404 f.discard()
404 f.discard()
405 raise
405 raise
406
406
407 def invalidate(self):
407 def invalidate(self):
408 """Causes the next access to reread the dirstate.
408 """Causes the next access to reread the dirstate.
409
409
410 This is different from localrepo.invalidatedirstate() because it always
410 This is different from localrepo.invalidatedirstate() because it always
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 check whether the dirstate has changed before rereading it."""
412 check whether the dirstate has changed before rereading it."""
413
413
414 for a in ("_map", "_branch", "_ignore"):
414 for a in ("_map", "_branch", "_ignore"):
415 if a in self.__dict__:
415 if a in self.__dict__:
416 delattr(self, a)
416 delattr(self, a)
417 self._lastnormaltime = 0
417 self._lastnormaltime = 0
418 self._dirty = False
418 self._dirty = False
419 self._updatedfiles.clear()
419 self._updatedfiles.clear()
420 self._parentwriters = 0
420 self._parentwriters = 0
421 self._origpl = None
421 self._origpl = None
422
422
423 def copy(self, source, dest):
423 def copy(self, source, dest):
424 """Mark dest as a copy of source. Unmark dest if source is None."""
424 """Mark dest as a copy of source. Unmark dest if source is None."""
425 if source == dest:
425 if source == dest:
426 return
426 return
427 self._dirty = True
427 self._dirty = True
428 if source is not None:
428 if source is not None:
429 self._map.copymap[dest] = source
429 self._map.copymap[dest] = source
430 self._updatedfiles.add(source)
430 self._updatedfiles.add(source)
431 self._updatedfiles.add(dest)
431 self._updatedfiles.add(dest)
432 elif self._map.copymap.pop(dest, None):
432 elif self._map.copymap.pop(dest, None):
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434
434
435 def copied(self, file):
435 def copied(self, file):
436 return self._map.copymap.get(file, None)
436 return self._map.copymap.get(file, None)
437
437
438 def copies(self):
438 def copies(self):
439 return self._map.copymap
439 return self._map.copymap
440
440
441 def _addpath(self, f, state, mode, size, mtime):
441 def _addpath(self, f, state, mode, size=NONNORMAL, mtime=AMBIGUOUS_TIME):
442 oldstate = self[f]
442 oldstate = self[f]
443 if state == b'a' or oldstate == b'r':
443 if state == b'a' or oldstate == b'r':
444 scmutil.checkfilename(f)
444 scmutil.checkfilename(f)
445 if self._map.hastrackeddir(f):
445 if self._map.hastrackeddir(f):
446 msg = _(b'directory %r already in dirstate')
446 msg = _(b'directory %r already in dirstate')
447 msg %= pycompat.bytestr(f)
447 msg %= pycompat.bytestr(f)
448 raise error.Abort(msg)
448 raise error.Abort(msg)
449 # shadows
449 # shadows
450 for d in pathutil.finddirs(f):
450 for d in pathutil.finddirs(f):
451 if self._map.hastrackeddir(d):
451 if self._map.hastrackeddir(d):
452 break
452 break
453 entry = self._map.get(d)
453 entry = self._map.get(d)
454 if entry is not None and entry[0] != b'r':
454 if entry is not None and entry[0] != b'r':
455 msg = _(b'file %r in dirstate clashes with %r')
455 msg = _(b'file %r in dirstate clashes with %r')
456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
457 raise error.Abort(msg)
457 raise error.Abort(msg)
458 if size != NONNORMAL and size != FROM_P2:
458 if size != NONNORMAL and size != FROM_P2:
459 size = size & _rangemask
459 size = size & _rangemask
460 if mtime != AMBIGUOUS_TIME:
460 if mtime != AMBIGUOUS_TIME:
461 mtime = mtime & _rangemask
461 mtime = mtime & _rangemask
462 self._dirty = True
462 self._dirty = True
463 self._updatedfiles.add(f)
463 self._updatedfiles.add(f)
464 self._map.addfile(f, oldstate, state, mode, size, mtime)
464 self._map.addfile(f, oldstate, state, mode, size, mtime)
465
465
466 def normal(self, f, parentfiledata=None):
466 def normal(self, f, parentfiledata=None):
467 """Mark a file normal and clean.
467 """Mark a file normal and clean.
468
468
469 parentfiledata: (mode, size, mtime) of the clean file
469 parentfiledata: (mode, size, mtime) of the clean file
470
470
471 parentfiledata should be computed from memory (for mode,
471 parentfiledata should be computed from memory (for mode,
472 size), as or close as possible from the point where we
472 size), as or close as possible from the point where we
473 determined the file was clean, to limit the risk of the
473 determined the file was clean, to limit the risk of the
474 file having been changed by an external process between the
474 file having been changed by an external process between the
475 moment where the file was determined to be clean and now."""
475 moment where the file was determined to be clean and now."""
476 if parentfiledata:
476 if parentfiledata:
477 (mode, size, mtime) = parentfiledata
477 (mode, size, mtime) = parentfiledata
478 else:
478 else:
479 s = os.lstat(self._join(f))
479 s = os.lstat(self._join(f))
480 mode = s.st_mode
480 mode = s.st_mode
481 size = s.st_size
481 size = s.st_size
482 mtime = s[stat.ST_MTIME]
482 mtime = s[stat.ST_MTIME]
483 self._addpath(f, b'n', mode, size, mtime)
483 self._addpath(f, b'n', mode, size, mtime)
484 self._map.copymap.pop(f, None)
484 self._map.copymap.pop(f, None)
485 if f in self._map.nonnormalset:
485 if f in self._map.nonnormalset:
486 self._map.nonnormalset.remove(f)
486 self._map.nonnormalset.remove(f)
487 if mtime > self._lastnormaltime:
487 if mtime > self._lastnormaltime:
488 # Remember the most recent modification timeslot for status(),
488 # Remember the most recent modification timeslot for status(),
489 # to make sure we won't miss future size-preserving file content
489 # to make sure we won't miss future size-preserving file content
490 # modifications that happen within the same timeslot.
490 # modifications that happen within the same timeslot.
491 self._lastnormaltime = mtime
491 self._lastnormaltime = mtime
492
492
493 def normallookup(self, f):
493 def normallookup(self, f):
494 '''Mark a file normal, but possibly dirty.'''
494 '''Mark a file normal, but possibly dirty.'''
495 if self._pl[1] != self._nodeconstants.nullid:
495 if self._pl[1] != self._nodeconstants.nullid:
496 # if there is a merge going on and the file was either
496 # if there is a merge going on and the file was either
497 # in state 'm' (-1) or coming from other parent (-2) before
497 # in state 'm' (-1) or coming from other parent (-2) before
498 # being removed, restore that state.
498 # being removed, restore that state.
499 entry = self._map.get(f)
499 entry = self._map.get(f)
500 if entry is not None:
500 if entry is not None:
501 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
501 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
502 source = self._map.copymap.get(f)
502 source = self._map.copymap.get(f)
503 if entry[2] == NONNORMAL:
503 if entry[2] == NONNORMAL:
504 self.merge(f)
504 self.merge(f)
505 elif entry[2] == FROM_P2:
505 elif entry[2] == FROM_P2:
506 self.otherparent(f)
506 self.otherparent(f)
507 if source:
507 if source:
508 self.copy(source, f)
508 self.copy(source, f)
509 return
509 return
510 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
510 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
511 return
511 return
512 self._addpath(f, b'n', 0, NONNORMAL, AMBIGUOUS_TIME)
512 self._addpath(f, b'n', 0)
513 self._map.copymap.pop(f, None)
513 self._map.copymap.pop(f, None)
514
514
515 def otherparent(self, f):
515 def otherparent(self, f):
516 '''Mark as coming from the other parent, always dirty.'''
516 '''Mark as coming from the other parent, always dirty.'''
517 if self._pl[1] == self._nodeconstants.nullid:
517 if self._pl[1] == self._nodeconstants.nullid:
518 msg = _(b"setting %r to other parent only allowed in merges") % f
518 msg = _(b"setting %r to other parent only allowed in merges") % f
519 raise error.Abort(msg)
519 raise error.Abort(msg)
520 if f in self and self[f] == b'n':
520 if f in self and self[f] == b'n':
521 # merge-like
521 # merge-like
522 self._addpath(f, b'm', 0, FROM_P2, AMBIGUOUS_TIME)
522 self._addpath(f, b'm', 0, FROM_P2)
523 else:
523 else:
524 # add-like
524 # add-like
525 self._addpath(f, b'n', 0, FROM_P2, AMBIGUOUS_TIME)
525 self._addpath(f, b'n', 0, FROM_P2)
526 self._map.copymap.pop(f, None)
526 self._map.copymap.pop(f, None)
527
527
528 def add(self, f):
528 def add(self, f):
529 '''Mark a file added.'''
529 '''Mark a file added.'''
530 self._addpath(f, b'a', 0, NONNORMAL, AMBIGUOUS_TIME)
530 self._addpath(f, b'a', 0)
531 self._map.copymap.pop(f, None)
531 self._map.copymap.pop(f, None)
532
532
533 def remove(self, f):
533 def remove(self, f):
534 '''Mark a file removed.'''
534 '''Mark a file removed.'''
535 self._dirty = True
535 self._dirty = True
536 oldstate = self[f]
536 oldstate = self[f]
537 size = 0
537 size = 0
538 if self._pl[1] != self._nodeconstants.nullid:
538 if self._pl[1] != self._nodeconstants.nullid:
539 entry = self._map.get(f)
539 entry = self._map.get(f)
540 if entry is not None:
540 if entry is not None:
541 # backup the previous state
541 # backup the previous state
542 if entry[0] == b'm': # merge
542 if entry[0] == b'm': # merge
543 size = NONNORMAL
543 size = NONNORMAL
544 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
544 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
545 size = FROM_P2
545 size = FROM_P2
546 self._map.otherparentset.add(f)
546 self._map.otherparentset.add(f)
547 self._updatedfiles.add(f)
547 self._updatedfiles.add(f)
548 self._map.removefile(f, oldstate, size)
548 self._map.removefile(f, oldstate, size)
549 if size == 0:
549 if size == 0:
550 self._map.copymap.pop(f, None)
550 self._map.copymap.pop(f, None)
551
551
552 def merge(self, f):
552 def merge(self, f):
553 '''Mark a file merged.'''
553 '''Mark a file merged.'''
554 if self._pl[1] == self._nodeconstants.nullid:
554 if self._pl[1] == self._nodeconstants.nullid:
555 return self.normallookup(f)
555 return self.normallookup(f)
556 return self.otherparent(f)
556 return self.otherparent(f)
557
557
558 def drop(self, f):
558 def drop(self, f):
559 '''Drop a file from the dirstate'''
559 '''Drop a file from the dirstate'''
560 oldstate = self[f]
560 oldstate = self[f]
561 if self._map.dropfile(f, oldstate):
561 if self._map.dropfile(f, oldstate):
562 self._dirty = True
562 self._dirty = True
563 self._updatedfiles.add(f)
563 self._updatedfiles.add(f)
564 self._map.copymap.pop(f, None)
564 self._map.copymap.pop(f, None)
565
565
566 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
566 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
567 if exists is None:
567 if exists is None:
568 exists = os.path.lexists(os.path.join(self._root, path))
568 exists = os.path.lexists(os.path.join(self._root, path))
569 if not exists:
569 if not exists:
570 # Maybe a path component exists
570 # Maybe a path component exists
571 if not ignoremissing and b'/' in path:
571 if not ignoremissing and b'/' in path:
572 d, f = path.rsplit(b'/', 1)
572 d, f = path.rsplit(b'/', 1)
573 d = self._normalize(d, False, ignoremissing, None)
573 d = self._normalize(d, False, ignoremissing, None)
574 folded = d + b"/" + f
574 folded = d + b"/" + f
575 else:
575 else:
576 # No path components, preserve original case
576 # No path components, preserve original case
577 folded = path
577 folded = path
578 else:
578 else:
579 # recursively normalize leading directory components
579 # recursively normalize leading directory components
580 # against dirstate
580 # against dirstate
581 if b'/' in normed:
581 if b'/' in normed:
582 d, f = normed.rsplit(b'/', 1)
582 d, f = normed.rsplit(b'/', 1)
583 d = self._normalize(d, False, ignoremissing, True)
583 d = self._normalize(d, False, ignoremissing, True)
584 r = self._root + b"/" + d
584 r = self._root + b"/" + d
585 folded = d + b"/" + util.fspath(f, r)
585 folded = d + b"/" + util.fspath(f, r)
586 else:
586 else:
587 folded = util.fspath(normed, self._root)
587 folded = util.fspath(normed, self._root)
588 storemap[normed] = folded
588 storemap[normed] = folded
589
589
590 return folded
590 return folded
591
591
592 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
592 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
593 normed = util.normcase(path)
593 normed = util.normcase(path)
594 folded = self._map.filefoldmap.get(normed, None)
594 folded = self._map.filefoldmap.get(normed, None)
595 if folded is None:
595 if folded is None:
596 if isknown:
596 if isknown:
597 folded = path
597 folded = path
598 else:
598 else:
599 folded = self._discoverpath(
599 folded = self._discoverpath(
600 path, normed, ignoremissing, exists, self._map.filefoldmap
600 path, normed, ignoremissing, exists, self._map.filefoldmap
601 )
601 )
602 return folded
602 return folded
603
603
604 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
604 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
605 normed = util.normcase(path)
605 normed = util.normcase(path)
606 folded = self._map.filefoldmap.get(normed, None)
606 folded = self._map.filefoldmap.get(normed, None)
607 if folded is None:
607 if folded is None:
608 folded = self._map.dirfoldmap.get(normed, None)
608 folded = self._map.dirfoldmap.get(normed, None)
609 if folded is None:
609 if folded is None:
610 if isknown:
610 if isknown:
611 folded = path
611 folded = path
612 else:
612 else:
613 # store discovered result in dirfoldmap so that future
613 # store discovered result in dirfoldmap so that future
614 # normalizefile calls don't start matching directories
614 # normalizefile calls don't start matching directories
615 folded = self._discoverpath(
615 folded = self._discoverpath(
616 path, normed, ignoremissing, exists, self._map.dirfoldmap
616 path, normed, ignoremissing, exists, self._map.dirfoldmap
617 )
617 )
618 return folded
618 return folded
619
619
620 def normalize(self, path, isknown=False, ignoremissing=False):
620 def normalize(self, path, isknown=False, ignoremissing=False):
621 """
621 """
622 normalize the case of a pathname when on a casefolding filesystem
622 normalize the case of a pathname when on a casefolding filesystem
623
623
624 isknown specifies whether the filename came from walking the
624 isknown specifies whether the filename came from walking the
625 disk, to avoid extra filesystem access.
625 disk, to avoid extra filesystem access.
626
626
627 If ignoremissing is True, missing path are returned
627 If ignoremissing is True, missing path are returned
628 unchanged. Otherwise, we try harder to normalize possibly
628 unchanged. Otherwise, we try harder to normalize possibly
629 existing path components.
629 existing path components.
630
630
631 The normalized case is determined based on the following precedence:
631 The normalized case is determined based on the following precedence:
632
632
633 - version of name already stored in the dirstate
633 - version of name already stored in the dirstate
634 - version of name stored on disk
634 - version of name stored on disk
635 - version provided via command arguments
635 - version provided via command arguments
636 """
636 """
637
637
638 if self._checkcase:
638 if self._checkcase:
639 return self._normalize(path, isknown, ignoremissing)
639 return self._normalize(path, isknown, ignoremissing)
640 return path
640 return path
641
641
642 def clear(self):
642 def clear(self):
643 self._map.clear()
643 self._map.clear()
644 self._lastnormaltime = 0
644 self._lastnormaltime = 0
645 self._updatedfiles.clear()
645 self._updatedfiles.clear()
646 self._dirty = True
646 self._dirty = True
647
647
648 def rebuild(self, parent, allfiles, changedfiles=None):
648 def rebuild(self, parent, allfiles, changedfiles=None):
649 if changedfiles is None:
649 if changedfiles is None:
650 # Rebuild entire dirstate
650 # Rebuild entire dirstate
651 to_lookup = allfiles
651 to_lookup = allfiles
652 to_drop = []
652 to_drop = []
653 lastnormaltime = self._lastnormaltime
653 lastnormaltime = self._lastnormaltime
654 self.clear()
654 self.clear()
655 self._lastnormaltime = lastnormaltime
655 self._lastnormaltime = lastnormaltime
656 elif len(changedfiles) < 10:
656 elif len(changedfiles) < 10:
657 # Avoid turning allfiles into a set, which can be expensive if it's
657 # Avoid turning allfiles into a set, which can be expensive if it's
658 # large.
658 # large.
659 to_lookup = []
659 to_lookup = []
660 to_drop = []
660 to_drop = []
661 for f in changedfiles:
661 for f in changedfiles:
662 if f in allfiles:
662 if f in allfiles:
663 to_lookup.append(f)
663 to_lookup.append(f)
664 else:
664 else:
665 to_drop.append(f)
665 to_drop.append(f)
666 else:
666 else:
667 changedfilesset = set(changedfiles)
667 changedfilesset = set(changedfiles)
668 to_lookup = changedfilesset & set(allfiles)
668 to_lookup = changedfilesset & set(allfiles)
669 to_drop = changedfilesset - to_lookup
669 to_drop = changedfilesset - to_lookup
670
670
671 if self._origpl is None:
671 if self._origpl is None:
672 self._origpl = self._pl
672 self._origpl = self._pl
673 self._map.setparents(parent, self._nodeconstants.nullid)
673 self._map.setparents(parent, self._nodeconstants.nullid)
674
674
675 for f in to_lookup:
675 for f in to_lookup:
676 self.normallookup(f)
676 self.normallookup(f)
677 for f in to_drop:
677 for f in to_drop:
678 self.drop(f)
678 self.drop(f)
679
679
680 self._dirty = True
680 self._dirty = True
681
681
682 def identity(self):
682 def identity(self):
683 """Return identity of dirstate itself to detect changing in storage
683 """Return identity of dirstate itself to detect changing in storage
684
684
685 If identity of previous dirstate is equal to this, writing
685 If identity of previous dirstate is equal to this, writing
686 changes based on the former dirstate out can keep consistency.
686 changes based on the former dirstate out can keep consistency.
687 """
687 """
688 return self._map.identity
688 return self._map.identity
689
689
690 def write(self, tr):
690 def write(self, tr):
691 if not self._dirty:
691 if not self._dirty:
692 return
692 return
693
693
694 filename = self._filename
694 filename = self._filename
695 if tr:
695 if tr:
696 # 'dirstate.write()' is not only for writing in-memory
696 # 'dirstate.write()' is not only for writing in-memory
697 # changes out, but also for dropping ambiguous timestamp.
697 # changes out, but also for dropping ambiguous timestamp.
698 # delayed writing re-raise "ambiguous timestamp issue".
698 # delayed writing re-raise "ambiguous timestamp issue".
699 # See also the wiki page below for detail:
699 # See also the wiki page below for detail:
700 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
700 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
701
701
702 # emulate dropping timestamp in 'parsers.pack_dirstate'
702 # emulate dropping timestamp in 'parsers.pack_dirstate'
703 now = _getfsnow(self._opener)
703 now = _getfsnow(self._opener)
704 self._map.clearambiguoustimes(self._updatedfiles, now)
704 self._map.clearambiguoustimes(self._updatedfiles, now)
705
705
706 # emulate that all 'dirstate.normal' results are written out
706 # emulate that all 'dirstate.normal' results are written out
707 self._lastnormaltime = 0
707 self._lastnormaltime = 0
708 self._updatedfiles.clear()
708 self._updatedfiles.clear()
709
709
710 # delay writing in-memory changes out
710 # delay writing in-memory changes out
711 tr.addfilegenerator(
711 tr.addfilegenerator(
712 b'dirstate',
712 b'dirstate',
713 (self._filename,),
713 (self._filename,),
714 self._writedirstate,
714 self._writedirstate,
715 location=b'plain',
715 location=b'plain',
716 )
716 )
717 return
717 return
718
718
719 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
719 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
720 self._writedirstate(st)
720 self._writedirstate(st)
721
721
722 def addparentchangecallback(self, category, callback):
722 def addparentchangecallback(self, category, callback):
723 """add a callback to be called when the wd parents are changed
723 """add a callback to be called when the wd parents are changed
724
724
725 Callback will be called with the following arguments:
725 Callback will be called with the following arguments:
726 dirstate, (oldp1, oldp2), (newp1, newp2)
726 dirstate, (oldp1, oldp2), (newp1, newp2)
727
727
728 Category is a unique identifier to allow overwriting an old callback
728 Category is a unique identifier to allow overwriting an old callback
729 with a newer callback.
729 with a newer callback.
730 """
730 """
731 self._plchangecallbacks[category] = callback
731 self._plchangecallbacks[category] = callback
732
732
733 def _writedirstate(self, st):
733 def _writedirstate(self, st):
734 # notify callbacks about parents change
734 # notify callbacks about parents change
735 if self._origpl is not None and self._origpl != self._pl:
735 if self._origpl is not None and self._origpl != self._pl:
736 for c, callback in sorted(
736 for c, callback in sorted(
737 pycompat.iteritems(self._plchangecallbacks)
737 pycompat.iteritems(self._plchangecallbacks)
738 ):
738 ):
739 callback(self, self._origpl, self._pl)
739 callback(self, self._origpl, self._pl)
740 self._origpl = None
740 self._origpl = None
741 # use the modification time of the newly created temporary file as the
741 # use the modification time of the newly created temporary file as the
742 # filesystem's notion of 'now'
742 # filesystem's notion of 'now'
743 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
743 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
744
744
745 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
745 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
746 # timestamp of each entries in dirstate, because of 'now > mtime'
746 # timestamp of each entries in dirstate, because of 'now > mtime'
747 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
747 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
748 if delaywrite > 0:
748 if delaywrite > 0:
749 # do we have any files to delay for?
749 # do we have any files to delay for?
750 for f, e in pycompat.iteritems(self._map):
750 for f, e in pycompat.iteritems(self._map):
751 if e[0] == b'n' and e[3] == now:
751 if e[0] == b'n' and e[3] == now:
752 import time # to avoid useless import
752 import time # to avoid useless import
753
753
754 # rather than sleep n seconds, sleep until the next
754 # rather than sleep n seconds, sleep until the next
755 # multiple of n seconds
755 # multiple of n seconds
756 clock = time.time()
756 clock = time.time()
757 start = int(clock) - (int(clock) % delaywrite)
757 start = int(clock) - (int(clock) % delaywrite)
758 end = start + delaywrite
758 end = start + delaywrite
759 time.sleep(end - clock)
759 time.sleep(end - clock)
760 now = end # trust our estimate that the end is near now
760 now = end # trust our estimate that the end is near now
761 break
761 break
762
762
763 self._map.write(st, now)
763 self._map.write(st, now)
764 self._lastnormaltime = 0
764 self._lastnormaltime = 0
765 self._dirty = False
765 self._dirty = False
766
766
767 def _dirignore(self, f):
767 def _dirignore(self, f):
768 if self._ignore(f):
768 if self._ignore(f):
769 return True
769 return True
770 for p in pathutil.finddirs(f):
770 for p in pathutil.finddirs(f):
771 if self._ignore(p):
771 if self._ignore(p):
772 return True
772 return True
773 return False
773 return False
774
774
775 def _ignorefiles(self):
775 def _ignorefiles(self):
776 files = []
776 files = []
777 if os.path.exists(self._join(b'.hgignore')):
777 if os.path.exists(self._join(b'.hgignore')):
778 files.append(self._join(b'.hgignore'))
778 files.append(self._join(b'.hgignore'))
779 for name, path in self._ui.configitems(b"ui"):
779 for name, path in self._ui.configitems(b"ui"):
780 if name == b'ignore' or name.startswith(b'ignore.'):
780 if name == b'ignore' or name.startswith(b'ignore.'):
781 # we need to use os.path.join here rather than self._join
781 # we need to use os.path.join here rather than self._join
782 # because path is arbitrary and user-specified
782 # because path is arbitrary and user-specified
783 files.append(os.path.join(self._rootdir, util.expandpath(path)))
783 files.append(os.path.join(self._rootdir, util.expandpath(path)))
784 return files
784 return files
785
785
786 def _ignorefileandline(self, f):
786 def _ignorefileandline(self, f):
787 files = collections.deque(self._ignorefiles())
787 files = collections.deque(self._ignorefiles())
788 visited = set()
788 visited = set()
789 while files:
789 while files:
790 i = files.popleft()
790 i = files.popleft()
791 patterns = matchmod.readpatternfile(
791 patterns = matchmod.readpatternfile(
792 i, self._ui.warn, sourceinfo=True
792 i, self._ui.warn, sourceinfo=True
793 )
793 )
794 for pattern, lineno, line in patterns:
794 for pattern, lineno, line in patterns:
795 kind, p = matchmod._patsplit(pattern, b'glob')
795 kind, p = matchmod._patsplit(pattern, b'glob')
796 if kind == b"subinclude":
796 if kind == b"subinclude":
797 if p not in visited:
797 if p not in visited:
798 files.append(p)
798 files.append(p)
799 continue
799 continue
800 m = matchmod.match(
800 m = matchmod.match(
801 self._root, b'', [], [pattern], warn=self._ui.warn
801 self._root, b'', [], [pattern], warn=self._ui.warn
802 )
802 )
803 if m(f):
803 if m(f):
804 return (i, lineno, line)
804 return (i, lineno, line)
805 visited.add(i)
805 visited.add(i)
806 return (None, -1, b"")
806 return (None, -1, b"")
807
807
808 def _walkexplicit(self, match, subrepos):
808 def _walkexplicit(self, match, subrepos):
809 """Get stat data about the files explicitly specified by match.
809 """Get stat data about the files explicitly specified by match.
810
810
811 Return a triple (results, dirsfound, dirsnotfound).
811 Return a triple (results, dirsfound, dirsnotfound).
812 - results is a mapping from filename to stat result. It also contains
812 - results is a mapping from filename to stat result. It also contains
813 listings mapping subrepos and .hg to None.
813 listings mapping subrepos and .hg to None.
814 - dirsfound is a list of files found to be directories.
814 - dirsfound is a list of files found to be directories.
815 - dirsnotfound is a list of files that the dirstate thinks are
815 - dirsnotfound is a list of files that the dirstate thinks are
816 directories and that were not found."""
816 directories and that were not found."""
817
817
818 def badtype(mode):
818 def badtype(mode):
819 kind = _(b'unknown')
819 kind = _(b'unknown')
820 if stat.S_ISCHR(mode):
820 if stat.S_ISCHR(mode):
821 kind = _(b'character device')
821 kind = _(b'character device')
822 elif stat.S_ISBLK(mode):
822 elif stat.S_ISBLK(mode):
823 kind = _(b'block device')
823 kind = _(b'block device')
824 elif stat.S_ISFIFO(mode):
824 elif stat.S_ISFIFO(mode):
825 kind = _(b'fifo')
825 kind = _(b'fifo')
826 elif stat.S_ISSOCK(mode):
826 elif stat.S_ISSOCK(mode):
827 kind = _(b'socket')
827 kind = _(b'socket')
828 elif stat.S_ISDIR(mode):
828 elif stat.S_ISDIR(mode):
829 kind = _(b'directory')
829 kind = _(b'directory')
830 return _(b'unsupported file type (type is %s)') % kind
830 return _(b'unsupported file type (type is %s)') % kind
831
831
832 badfn = match.bad
832 badfn = match.bad
833 dmap = self._map
833 dmap = self._map
834 lstat = os.lstat
834 lstat = os.lstat
835 getkind = stat.S_IFMT
835 getkind = stat.S_IFMT
836 dirkind = stat.S_IFDIR
836 dirkind = stat.S_IFDIR
837 regkind = stat.S_IFREG
837 regkind = stat.S_IFREG
838 lnkkind = stat.S_IFLNK
838 lnkkind = stat.S_IFLNK
839 join = self._join
839 join = self._join
840 dirsfound = []
840 dirsfound = []
841 foundadd = dirsfound.append
841 foundadd = dirsfound.append
842 dirsnotfound = []
842 dirsnotfound = []
843 notfoundadd = dirsnotfound.append
843 notfoundadd = dirsnotfound.append
844
844
845 if not match.isexact() and self._checkcase:
845 if not match.isexact() and self._checkcase:
846 normalize = self._normalize
846 normalize = self._normalize
847 else:
847 else:
848 normalize = None
848 normalize = None
849
849
850 files = sorted(match.files())
850 files = sorted(match.files())
851 subrepos.sort()
851 subrepos.sort()
852 i, j = 0, 0
852 i, j = 0, 0
853 while i < len(files) and j < len(subrepos):
853 while i < len(files) and j < len(subrepos):
854 subpath = subrepos[j] + b"/"
854 subpath = subrepos[j] + b"/"
855 if files[i] < subpath:
855 if files[i] < subpath:
856 i += 1
856 i += 1
857 continue
857 continue
858 while i < len(files) and files[i].startswith(subpath):
858 while i < len(files) and files[i].startswith(subpath):
859 del files[i]
859 del files[i]
860 j += 1
860 j += 1
861
861
862 if not files or b'' in files:
862 if not files or b'' in files:
863 files = [b'']
863 files = [b'']
864 # constructing the foldmap is expensive, so don't do it for the
864 # constructing the foldmap is expensive, so don't do it for the
865 # common case where files is ['']
865 # common case where files is ['']
866 normalize = None
866 normalize = None
867 results = dict.fromkeys(subrepos)
867 results = dict.fromkeys(subrepos)
868 results[b'.hg'] = None
868 results[b'.hg'] = None
869
869
870 for ff in files:
870 for ff in files:
871 if normalize:
871 if normalize:
872 nf = normalize(ff, False, True)
872 nf = normalize(ff, False, True)
873 else:
873 else:
874 nf = ff
874 nf = ff
875 if nf in results:
875 if nf in results:
876 continue
876 continue
877
877
878 try:
878 try:
879 st = lstat(join(nf))
879 st = lstat(join(nf))
880 kind = getkind(st.st_mode)
880 kind = getkind(st.st_mode)
881 if kind == dirkind:
881 if kind == dirkind:
882 if nf in dmap:
882 if nf in dmap:
883 # file replaced by dir on disk but still in dirstate
883 # file replaced by dir on disk but still in dirstate
884 results[nf] = None
884 results[nf] = None
885 foundadd((nf, ff))
885 foundadd((nf, ff))
886 elif kind == regkind or kind == lnkkind:
886 elif kind == regkind or kind == lnkkind:
887 results[nf] = st
887 results[nf] = st
888 else:
888 else:
889 badfn(ff, badtype(kind))
889 badfn(ff, badtype(kind))
890 if nf in dmap:
890 if nf in dmap:
891 results[nf] = None
891 results[nf] = None
892 except OSError as inst: # nf not found on disk - it is dirstate only
892 except OSError as inst: # nf not found on disk - it is dirstate only
893 if nf in dmap: # does it exactly match a missing file?
893 if nf in dmap: # does it exactly match a missing file?
894 results[nf] = None
894 results[nf] = None
895 else: # does it match a missing directory?
895 else: # does it match a missing directory?
896 if self._map.hasdir(nf):
896 if self._map.hasdir(nf):
897 notfoundadd(nf)
897 notfoundadd(nf)
898 else:
898 else:
899 badfn(ff, encoding.strtolocal(inst.strerror))
899 badfn(ff, encoding.strtolocal(inst.strerror))
900
900
901 # match.files() may contain explicitly-specified paths that shouldn't
901 # match.files() may contain explicitly-specified paths that shouldn't
902 # be taken; drop them from the list of files found. dirsfound/notfound
902 # be taken; drop them from the list of files found. dirsfound/notfound
903 # aren't filtered here because they will be tested later.
903 # aren't filtered here because they will be tested later.
904 if match.anypats():
904 if match.anypats():
905 for f in list(results):
905 for f in list(results):
906 if f == b'.hg' or f in subrepos:
906 if f == b'.hg' or f in subrepos:
907 # keep sentinel to disable further out-of-repo walks
907 # keep sentinel to disable further out-of-repo walks
908 continue
908 continue
909 if not match(f):
909 if not match(f):
910 del results[f]
910 del results[f]
911
911
912 # Case insensitive filesystems cannot rely on lstat() failing to detect
912 # Case insensitive filesystems cannot rely on lstat() failing to detect
913 # a case-only rename. Prune the stat object for any file that does not
913 # a case-only rename. Prune the stat object for any file that does not
914 # match the case in the filesystem, if there are multiple files that
914 # match the case in the filesystem, if there are multiple files that
915 # normalize to the same path.
915 # normalize to the same path.
916 if match.isexact() and self._checkcase:
916 if match.isexact() and self._checkcase:
917 normed = {}
917 normed = {}
918
918
919 for f, st in pycompat.iteritems(results):
919 for f, st in pycompat.iteritems(results):
920 if st is None:
920 if st is None:
921 continue
921 continue
922
922
923 nc = util.normcase(f)
923 nc = util.normcase(f)
924 paths = normed.get(nc)
924 paths = normed.get(nc)
925
925
926 if paths is None:
926 if paths is None:
927 paths = set()
927 paths = set()
928 normed[nc] = paths
928 normed[nc] = paths
929
929
930 paths.add(f)
930 paths.add(f)
931
931
932 for norm, paths in pycompat.iteritems(normed):
932 for norm, paths in pycompat.iteritems(normed):
933 if len(paths) > 1:
933 if len(paths) > 1:
934 for path in paths:
934 for path in paths:
935 folded = self._discoverpath(
935 folded = self._discoverpath(
936 path, norm, True, None, self._map.dirfoldmap
936 path, norm, True, None, self._map.dirfoldmap
937 )
937 )
938 if path != folded:
938 if path != folded:
939 results[path] = None
939 results[path] = None
940
940
941 return results, dirsfound, dirsnotfound
941 return results, dirsfound, dirsnotfound
942
942
943 def walk(self, match, subrepos, unknown, ignored, full=True):
943 def walk(self, match, subrepos, unknown, ignored, full=True):
944 """
944 """
945 Walk recursively through the directory tree, finding all files
945 Walk recursively through the directory tree, finding all files
946 matched by match.
946 matched by match.
947
947
948 If full is False, maybe skip some known-clean files.
948 If full is False, maybe skip some known-clean files.
949
949
950 Return a dict mapping filename to stat-like object (either
950 Return a dict mapping filename to stat-like object (either
951 mercurial.osutil.stat instance or return value of os.stat()).
951 mercurial.osutil.stat instance or return value of os.stat()).
952
952
953 """
953 """
954 # full is a flag that extensions that hook into walk can use -- this
954 # full is a flag that extensions that hook into walk can use -- this
955 # implementation doesn't use it at all. This satisfies the contract
955 # implementation doesn't use it at all. This satisfies the contract
956 # because we only guarantee a "maybe".
956 # because we only guarantee a "maybe".
957
957
958 if ignored:
958 if ignored:
959 ignore = util.never
959 ignore = util.never
960 dirignore = util.never
960 dirignore = util.never
961 elif unknown:
961 elif unknown:
962 ignore = self._ignore
962 ignore = self._ignore
963 dirignore = self._dirignore
963 dirignore = self._dirignore
964 else:
964 else:
965 # if not unknown and not ignored, drop dir recursion and step 2
965 # if not unknown and not ignored, drop dir recursion and step 2
966 ignore = util.always
966 ignore = util.always
967 dirignore = util.always
967 dirignore = util.always
968
968
969 matchfn = match.matchfn
969 matchfn = match.matchfn
970 matchalways = match.always()
970 matchalways = match.always()
971 matchtdir = match.traversedir
971 matchtdir = match.traversedir
972 dmap = self._map
972 dmap = self._map
973 listdir = util.listdir
973 listdir = util.listdir
974 lstat = os.lstat
974 lstat = os.lstat
975 dirkind = stat.S_IFDIR
975 dirkind = stat.S_IFDIR
976 regkind = stat.S_IFREG
976 regkind = stat.S_IFREG
977 lnkkind = stat.S_IFLNK
977 lnkkind = stat.S_IFLNK
978 join = self._join
978 join = self._join
979
979
980 exact = skipstep3 = False
980 exact = skipstep3 = False
981 if match.isexact(): # match.exact
981 if match.isexact(): # match.exact
982 exact = True
982 exact = True
983 dirignore = util.always # skip step 2
983 dirignore = util.always # skip step 2
984 elif match.prefix(): # match.match, no patterns
984 elif match.prefix(): # match.match, no patterns
985 skipstep3 = True
985 skipstep3 = True
986
986
987 if not exact and self._checkcase:
987 if not exact and self._checkcase:
988 normalize = self._normalize
988 normalize = self._normalize
989 normalizefile = self._normalizefile
989 normalizefile = self._normalizefile
990 skipstep3 = False
990 skipstep3 = False
991 else:
991 else:
992 normalize = self._normalize
992 normalize = self._normalize
993 normalizefile = None
993 normalizefile = None
994
994
995 # step 1: find all explicit files
995 # step 1: find all explicit files
996 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
996 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
997 if matchtdir:
997 if matchtdir:
998 for d in work:
998 for d in work:
999 matchtdir(d[0])
999 matchtdir(d[0])
1000 for d in dirsnotfound:
1000 for d in dirsnotfound:
1001 matchtdir(d)
1001 matchtdir(d)
1002
1002
1003 skipstep3 = skipstep3 and not (work or dirsnotfound)
1003 skipstep3 = skipstep3 and not (work or dirsnotfound)
1004 work = [d for d in work if not dirignore(d[0])]
1004 work = [d for d in work if not dirignore(d[0])]
1005
1005
1006 # step 2: visit subdirectories
1006 # step 2: visit subdirectories
1007 def traverse(work, alreadynormed):
1007 def traverse(work, alreadynormed):
1008 wadd = work.append
1008 wadd = work.append
1009 while work:
1009 while work:
1010 tracing.counter('dirstate.walk work', len(work))
1010 tracing.counter('dirstate.walk work', len(work))
1011 nd = work.pop()
1011 nd = work.pop()
1012 visitentries = match.visitchildrenset(nd)
1012 visitentries = match.visitchildrenset(nd)
1013 if not visitentries:
1013 if not visitentries:
1014 continue
1014 continue
1015 if visitentries == b'this' or visitentries == b'all':
1015 if visitentries == b'this' or visitentries == b'all':
1016 visitentries = None
1016 visitentries = None
1017 skip = None
1017 skip = None
1018 if nd != b'':
1018 if nd != b'':
1019 skip = b'.hg'
1019 skip = b'.hg'
1020 try:
1020 try:
1021 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1021 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1022 entries = listdir(join(nd), stat=True, skip=skip)
1022 entries = listdir(join(nd), stat=True, skip=skip)
1023 except OSError as inst:
1023 except OSError as inst:
1024 if inst.errno in (errno.EACCES, errno.ENOENT):
1024 if inst.errno in (errno.EACCES, errno.ENOENT):
1025 match.bad(
1025 match.bad(
1026 self.pathto(nd), encoding.strtolocal(inst.strerror)
1026 self.pathto(nd), encoding.strtolocal(inst.strerror)
1027 )
1027 )
1028 continue
1028 continue
1029 raise
1029 raise
1030 for f, kind, st in entries:
1030 for f, kind, st in entries:
1031 # Some matchers may return files in the visitentries set,
1031 # Some matchers may return files in the visitentries set,
1032 # instead of 'this', if the matcher explicitly mentions them
1032 # instead of 'this', if the matcher explicitly mentions them
1033 # and is not an exactmatcher. This is acceptable; we do not
1033 # and is not an exactmatcher. This is acceptable; we do not
1034 # make any hard assumptions about file-or-directory below
1034 # make any hard assumptions about file-or-directory below
1035 # based on the presence of `f` in visitentries. If
1035 # based on the presence of `f` in visitentries. If
1036 # visitchildrenset returned a set, we can always skip the
1036 # visitchildrenset returned a set, we can always skip the
1037 # entries *not* in the set it provided regardless of whether
1037 # entries *not* in the set it provided regardless of whether
1038 # they're actually a file or a directory.
1038 # they're actually a file or a directory.
1039 if visitentries and f not in visitentries:
1039 if visitentries and f not in visitentries:
1040 continue
1040 continue
1041 if normalizefile:
1041 if normalizefile:
1042 # even though f might be a directory, we're only
1042 # even though f might be a directory, we're only
1043 # interested in comparing it to files currently in the
1043 # interested in comparing it to files currently in the
1044 # dmap -- therefore normalizefile is enough
1044 # dmap -- therefore normalizefile is enough
1045 nf = normalizefile(
1045 nf = normalizefile(
1046 nd and (nd + b"/" + f) or f, True, True
1046 nd and (nd + b"/" + f) or f, True, True
1047 )
1047 )
1048 else:
1048 else:
1049 nf = nd and (nd + b"/" + f) or f
1049 nf = nd and (nd + b"/" + f) or f
1050 if nf not in results:
1050 if nf not in results:
1051 if kind == dirkind:
1051 if kind == dirkind:
1052 if not ignore(nf):
1052 if not ignore(nf):
1053 if matchtdir:
1053 if matchtdir:
1054 matchtdir(nf)
1054 matchtdir(nf)
1055 wadd(nf)
1055 wadd(nf)
1056 if nf in dmap and (matchalways or matchfn(nf)):
1056 if nf in dmap and (matchalways or matchfn(nf)):
1057 results[nf] = None
1057 results[nf] = None
1058 elif kind == regkind or kind == lnkkind:
1058 elif kind == regkind or kind == lnkkind:
1059 if nf in dmap:
1059 if nf in dmap:
1060 if matchalways or matchfn(nf):
1060 if matchalways or matchfn(nf):
1061 results[nf] = st
1061 results[nf] = st
1062 elif (matchalways or matchfn(nf)) and not ignore(
1062 elif (matchalways or matchfn(nf)) and not ignore(
1063 nf
1063 nf
1064 ):
1064 ):
1065 # unknown file -- normalize if necessary
1065 # unknown file -- normalize if necessary
1066 if not alreadynormed:
1066 if not alreadynormed:
1067 nf = normalize(nf, False, True)
1067 nf = normalize(nf, False, True)
1068 results[nf] = st
1068 results[nf] = st
1069 elif nf in dmap and (matchalways or matchfn(nf)):
1069 elif nf in dmap and (matchalways or matchfn(nf)):
1070 results[nf] = None
1070 results[nf] = None
1071
1071
1072 for nd, d in work:
1072 for nd, d in work:
1073 # alreadynormed means that processwork doesn't have to do any
1073 # alreadynormed means that processwork doesn't have to do any
1074 # expensive directory normalization
1074 # expensive directory normalization
1075 alreadynormed = not normalize or nd == d
1075 alreadynormed = not normalize or nd == d
1076 traverse([d], alreadynormed)
1076 traverse([d], alreadynormed)
1077
1077
1078 for s in subrepos:
1078 for s in subrepos:
1079 del results[s]
1079 del results[s]
1080 del results[b'.hg']
1080 del results[b'.hg']
1081
1081
1082 # step 3: visit remaining files from dmap
1082 # step 3: visit remaining files from dmap
1083 if not skipstep3 and not exact:
1083 if not skipstep3 and not exact:
1084 # If a dmap file is not in results yet, it was either
1084 # If a dmap file is not in results yet, it was either
1085 # a) not matching matchfn b) ignored, c) missing, or d) under a
1085 # a) not matching matchfn b) ignored, c) missing, or d) under a
1086 # symlink directory.
1086 # symlink directory.
1087 if not results and matchalways:
1087 if not results and matchalways:
1088 visit = [f for f in dmap]
1088 visit = [f for f in dmap]
1089 else:
1089 else:
1090 visit = [f for f in dmap if f not in results and matchfn(f)]
1090 visit = [f for f in dmap if f not in results and matchfn(f)]
1091 visit.sort()
1091 visit.sort()
1092
1092
1093 if unknown:
1093 if unknown:
1094 # unknown == True means we walked all dirs under the roots
1094 # unknown == True means we walked all dirs under the roots
1095 # that wasn't ignored, and everything that matched was stat'ed
1095 # that wasn't ignored, and everything that matched was stat'ed
1096 # and is already in results.
1096 # and is already in results.
1097 # The rest must thus be ignored or under a symlink.
1097 # The rest must thus be ignored or under a symlink.
1098 audit_path = pathutil.pathauditor(self._root, cached=True)
1098 audit_path = pathutil.pathauditor(self._root, cached=True)
1099
1099
1100 for nf in iter(visit):
1100 for nf in iter(visit):
1101 # If a stat for the same file was already added with a
1101 # If a stat for the same file was already added with a
1102 # different case, don't add one for this, since that would
1102 # different case, don't add one for this, since that would
1103 # make it appear as if the file exists under both names
1103 # make it appear as if the file exists under both names
1104 # on disk.
1104 # on disk.
1105 if (
1105 if (
1106 normalizefile
1106 normalizefile
1107 and normalizefile(nf, True, True) in results
1107 and normalizefile(nf, True, True) in results
1108 ):
1108 ):
1109 results[nf] = None
1109 results[nf] = None
1110 # Report ignored items in the dmap as long as they are not
1110 # Report ignored items in the dmap as long as they are not
1111 # under a symlink directory.
1111 # under a symlink directory.
1112 elif audit_path.check(nf):
1112 elif audit_path.check(nf):
1113 try:
1113 try:
1114 results[nf] = lstat(join(nf))
1114 results[nf] = lstat(join(nf))
1115 # file was just ignored, no links, and exists
1115 # file was just ignored, no links, and exists
1116 except OSError:
1116 except OSError:
1117 # file doesn't exist
1117 # file doesn't exist
1118 results[nf] = None
1118 results[nf] = None
1119 else:
1119 else:
1120 # It's either missing or under a symlink directory
1120 # It's either missing or under a symlink directory
1121 # which we in this case report as missing
1121 # which we in this case report as missing
1122 results[nf] = None
1122 results[nf] = None
1123 else:
1123 else:
1124 # We may not have walked the full directory tree above,
1124 # We may not have walked the full directory tree above,
1125 # so stat and check everything we missed.
1125 # so stat and check everything we missed.
1126 iv = iter(visit)
1126 iv = iter(visit)
1127 for st in util.statfiles([join(i) for i in visit]):
1127 for st in util.statfiles([join(i) for i in visit]):
1128 results[next(iv)] = st
1128 results[next(iv)] = st
1129 return results
1129 return results
1130
1130
1131 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1131 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1132 # Force Rayon (Rust parallelism library) to respect the number of
1132 # Force Rayon (Rust parallelism library) to respect the number of
1133 # workers. This is a temporary workaround until Rust code knows
1133 # workers. This is a temporary workaround until Rust code knows
1134 # how to read the config file.
1134 # how to read the config file.
1135 numcpus = self._ui.configint(b"worker", b"numcpus")
1135 numcpus = self._ui.configint(b"worker", b"numcpus")
1136 if numcpus is not None:
1136 if numcpus is not None:
1137 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1137 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1138
1138
1139 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1139 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1140 if not workers_enabled:
1140 if not workers_enabled:
1141 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1141 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1142
1142
1143 (
1143 (
1144 lookup,
1144 lookup,
1145 modified,
1145 modified,
1146 added,
1146 added,
1147 removed,
1147 removed,
1148 deleted,
1148 deleted,
1149 clean,
1149 clean,
1150 ignored,
1150 ignored,
1151 unknown,
1151 unknown,
1152 warnings,
1152 warnings,
1153 bad,
1153 bad,
1154 traversed,
1154 traversed,
1155 dirty,
1155 dirty,
1156 ) = rustmod.status(
1156 ) = rustmod.status(
1157 self._map._rustmap,
1157 self._map._rustmap,
1158 matcher,
1158 matcher,
1159 self._rootdir,
1159 self._rootdir,
1160 self._ignorefiles(),
1160 self._ignorefiles(),
1161 self._checkexec,
1161 self._checkexec,
1162 self._lastnormaltime,
1162 self._lastnormaltime,
1163 bool(list_clean),
1163 bool(list_clean),
1164 bool(list_ignored),
1164 bool(list_ignored),
1165 bool(list_unknown),
1165 bool(list_unknown),
1166 bool(matcher.traversedir),
1166 bool(matcher.traversedir),
1167 )
1167 )
1168
1168
1169 self._dirty |= dirty
1169 self._dirty |= dirty
1170
1170
1171 if matcher.traversedir:
1171 if matcher.traversedir:
1172 for dir in traversed:
1172 for dir in traversed:
1173 matcher.traversedir(dir)
1173 matcher.traversedir(dir)
1174
1174
1175 if self._ui.warn:
1175 if self._ui.warn:
1176 for item in warnings:
1176 for item in warnings:
1177 if isinstance(item, tuple):
1177 if isinstance(item, tuple):
1178 file_path, syntax = item
1178 file_path, syntax = item
1179 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1179 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1180 file_path,
1180 file_path,
1181 syntax,
1181 syntax,
1182 )
1182 )
1183 self._ui.warn(msg)
1183 self._ui.warn(msg)
1184 else:
1184 else:
1185 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1185 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1186 self._ui.warn(
1186 self._ui.warn(
1187 msg
1187 msg
1188 % (
1188 % (
1189 pathutil.canonpath(
1189 pathutil.canonpath(
1190 self._rootdir, self._rootdir, item
1190 self._rootdir, self._rootdir, item
1191 ),
1191 ),
1192 b"No such file or directory",
1192 b"No such file or directory",
1193 )
1193 )
1194 )
1194 )
1195
1195
1196 for (fn, message) in bad:
1196 for (fn, message) in bad:
1197 matcher.bad(fn, encoding.strtolocal(message))
1197 matcher.bad(fn, encoding.strtolocal(message))
1198
1198
1199 status = scmutil.status(
1199 status = scmutil.status(
1200 modified=modified,
1200 modified=modified,
1201 added=added,
1201 added=added,
1202 removed=removed,
1202 removed=removed,
1203 deleted=deleted,
1203 deleted=deleted,
1204 unknown=unknown,
1204 unknown=unknown,
1205 ignored=ignored,
1205 ignored=ignored,
1206 clean=clean,
1206 clean=clean,
1207 )
1207 )
1208 return (lookup, status)
1208 return (lookup, status)
1209
1209
1210 def status(self, match, subrepos, ignored, clean, unknown):
1210 def status(self, match, subrepos, ignored, clean, unknown):
1211 """Determine the status of the working copy relative to the
1211 """Determine the status of the working copy relative to the
1212 dirstate and return a pair of (unsure, status), where status is of type
1212 dirstate and return a pair of (unsure, status), where status is of type
1213 scmutil.status and:
1213 scmutil.status and:
1214
1214
1215 unsure:
1215 unsure:
1216 files that might have been modified since the dirstate was
1216 files that might have been modified since the dirstate was
1217 written, but need to be read to be sure (size is the same
1217 written, but need to be read to be sure (size is the same
1218 but mtime differs)
1218 but mtime differs)
1219 status.modified:
1219 status.modified:
1220 files that have definitely been modified since the dirstate
1220 files that have definitely been modified since the dirstate
1221 was written (different size or mode)
1221 was written (different size or mode)
1222 status.clean:
1222 status.clean:
1223 files that have definitely not been modified since the
1223 files that have definitely not been modified since the
1224 dirstate was written
1224 dirstate was written
1225 """
1225 """
1226 listignored, listclean, listunknown = ignored, clean, unknown
1226 listignored, listclean, listunknown = ignored, clean, unknown
1227 lookup, modified, added, unknown, ignored = [], [], [], [], []
1227 lookup, modified, added, unknown, ignored = [], [], [], [], []
1228 removed, deleted, clean = [], [], []
1228 removed, deleted, clean = [], [], []
1229
1229
1230 dmap = self._map
1230 dmap = self._map
1231 dmap.preload()
1231 dmap.preload()
1232
1232
1233 use_rust = True
1233 use_rust = True
1234
1234
1235 allowed_matchers = (
1235 allowed_matchers = (
1236 matchmod.alwaysmatcher,
1236 matchmod.alwaysmatcher,
1237 matchmod.exactmatcher,
1237 matchmod.exactmatcher,
1238 matchmod.includematcher,
1238 matchmod.includematcher,
1239 )
1239 )
1240
1240
1241 if rustmod is None:
1241 if rustmod is None:
1242 use_rust = False
1242 use_rust = False
1243 elif self._checkcase:
1243 elif self._checkcase:
1244 # Case-insensitive filesystems are not handled yet
1244 # Case-insensitive filesystems are not handled yet
1245 use_rust = False
1245 use_rust = False
1246 elif subrepos:
1246 elif subrepos:
1247 use_rust = False
1247 use_rust = False
1248 elif sparse.enabled:
1248 elif sparse.enabled:
1249 use_rust = False
1249 use_rust = False
1250 elif not isinstance(match, allowed_matchers):
1250 elif not isinstance(match, allowed_matchers):
1251 # Some matchers have yet to be implemented
1251 # Some matchers have yet to be implemented
1252 use_rust = False
1252 use_rust = False
1253
1253
1254 if use_rust:
1254 if use_rust:
1255 try:
1255 try:
1256 return self._rust_status(
1256 return self._rust_status(
1257 match, listclean, listignored, listunknown
1257 match, listclean, listignored, listunknown
1258 )
1258 )
1259 except rustmod.FallbackError:
1259 except rustmod.FallbackError:
1260 pass
1260 pass
1261
1261
1262 def noop(f):
1262 def noop(f):
1263 pass
1263 pass
1264
1264
1265 dcontains = dmap.__contains__
1265 dcontains = dmap.__contains__
1266 dget = dmap.__getitem__
1266 dget = dmap.__getitem__
1267 ladd = lookup.append # aka "unsure"
1267 ladd = lookup.append # aka "unsure"
1268 madd = modified.append
1268 madd = modified.append
1269 aadd = added.append
1269 aadd = added.append
1270 uadd = unknown.append if listunknown else noop
1270 uadd = unknown.append if listunknown else noop
1271 iadd = ignored.append if listignored else noop
1271 iadd = ignored.append if listignored else noop
1272 radd = removed.append
1272 radd = removed.append
1273 dadd = deleted.append
1273 dadd = deleted.append
1274 cadd = clean.append if listclean else noop
1274 cadd = clean.append if listclean else noop
1275 mexact = match.exact
1275 mexact = match.exact
1276 dirignore = self._dirignore
1276 dirignore = self._dirignore
1277 checkexec = self._checkexec
1277 checkexec = self._checkexec
1278 copymap = self._map.copymap
1278 copymap = self._map.copymap
1279 lastnormaltime = self._lastnormaltime
1279 lastnormaltime = self._lastnormaltime
1280
1280
1281 # We need to do full walks when either
1281 # We need to do full walks when either
1282 # - we're listing all clean files, or
1282 # - we're listing all clean files, or
1283 # - match.traversedir does something, because match.traversedir should
1283 # - match.traversedir does something, because match.traversedir should
1284 # be called for every dir in the working dir
1284 # be called for every dir in the working dir
1285 full = listclean or match.traversedir is not None
1285 full = listclean or match.traversedir is not None
1286 for fn, st in pycompat.iteritems(
1286 for fn, st in pycompat.iteritems(
1287 self.walk(match, subrepos, listunknown, listignored, full=full)
1287 self.walk(match, subrepos, listunknown, listignored, full=full)
1288 ):
1288 ):
1289 if not dcontains(fn):
1289 if not dcontains(fn):
1290 if (listignored or mexact(fn)) and dirignore(fn):
1290 if (listignored or mexact(fn)) and dirignore(fn):
1291 if listignored:
1291 if listignored:
1292 iadd(fn)
1292 iadd(fn)
1293 else:
1293 else:
1294 uadd(fn)
1294 uadd(fn)
1295 continue
1295 continue
1296
1296
1297 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1297 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1298 # written like that for performance reasons. dmap[fn] is not a
1298 # written like that for performance reasons. dmap[fn] is not a
1299 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1299 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1300 # opcode has fast paths when the value to be unpacked is a tuple or
1300 # opcode has fast paths when the value to be unpacked is a tuple or
1301 # a list, but falls back to creating a full-fledged iterator in
1301 # a list, but falls back to creating a full-fledged iterator in
1302 # general. That is much slower than simply accessing and storing the
1302 # general. That is much slower than simply accessing and storing the
1303 # tuple members one by one.
1303 # tuple members one by one.
1304 t = dget(fn)
1304 t = dget(fn)
1305 state = t[0]
1305 state = t[0]
1306 mode = t[1]
1306 mode = t[1]
1307 size = t[2]
1307 size = t[2]
1308 time = t[3]
1308 time = t[3]
1309
1309
1310 if not st and state in b"nma":
1310 if not st and state in b"nma":
1311 dadd(fn)
1311 dadd(fn)
1312 elif state == b'n':
1312 elif state == b'n':
1313 if (
1313 if (
1314 size >= 0
1314 size >= 0
1315 and (
1315 and (
1316 (size != st.st_size and size != st.st_size & _rangemask)
1316 (size != st.st_size and size != st.st_size & _rangemask)
1317 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1317 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1318 )
1318 )
1319 or size == FROM_P2 # other parent
1319 or size == FROM_P2 # other parent
1320 or fn in copymap
1320 or fn in copymap
1321 ):
1321 ):
1322 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1322 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1323 # issue6456: Size returned may be longer due to
1323 # issue6456: Size returned may be longer due to
1324 # encryption on EXT-4 fscrypt, undecided.
1324 # encryption on EXT-4 fscrypt, undecided.
1325 ladd(fn)
1325 ladd(fn)
1326 else:
1326 else:
1327 madd(fn)
1327 madd(fn)
1328 elif (
1328 elif (
1329 time != st[stat.ST_MTIME]
1329 time != st[stat.ST_MTIME]
1330 and time != st[stat.ST_MTIME] & _rangemask
1330 and time != st[stat.ST_MTIME] & _rangemask
1331 ):
1331 ):
1332 ladd(fn)
1332 ladd(fn)
1333 elif st[stat.ST_MTIME] == lastnormaltime:
1333 elif st[stat.ST_MTIME] == lastnormaltime:
1334 # fn may have just been marked as normal and it may have
1334 # fn may have just been marked as normal and it may have
1335 # changed in the same second without changing its size.
1335 # changed in the same second without changing its size.
1336 # This can happen if we quickly do multiple commits.
1336 # This can happen if we quickly do multiple commits.
1337 # Force lookup, so we don't miss such a racy file change.
1337 # Force lookup, so we don't miss such a racy file change.
1338 ladd(fn)
1338 ladd(fn)
1339 elif listclean:
1339 elif listclean:
1340 cadd(fn)
1340 cadd(fn)
1341 elif state == b'm':
1341 elif state == b'm':
1342 madd(fn)
1342 madd(fn)
1343 elif state == b'a':
1343 elif state == b'a':
1344 aadd(fn)
1344 aadd(fn)
1345 elif state == b'r':
1345 elif state == b'r':
1346 radd(fn)
1346 radd(fn)
1347 status = scmutil.status(
1347 status = scmutil.status(
1348 modified, added, removed, deleted, unknown, ignored, clean
1348 modified, added, removed, deleted, unknown, ignored, clean
1349 )
1349 )
1350 return (lookup, status)
1350 return (lookup, status)
1351
1351
1352 def matches(self, match):
1352 def matches(self, match):
1353 """
1353 """
1354 return files in the dirstate (in whatever state) filtered by match
1354 return files in the dirstate (in whatever state) filtered by match
1355 """
1355 """
1356 dmap = self._map
1356 dmap = self._map
1357 if rustmod is not None:
1357 if rustmod is not None:
1358 dmap = self._map._rustmap
1358 dmap = self._map._rustmap
1359
1359
1360 if match.always():
1360 if match.always():
1361 return dmap.keys()
1361 return dmap.keys()
1362 files = match.files()
1362 files = match.files()
1363 if match.isexact():
1363 if match.isexact():
1364 # fast path -- filter the other way around, since typically files is
1364 # fast path -- filter the other way around, since typically files is
1365 # much smaller than dmap
1365 # much smaller than dmap
1366 return [f for f in files if f in dmap]
1366 return [f for f in files if f in dmap]
1367 if match.prefix() and all(fn in dmap for fn in files):
1367 if match.prefix() and all(fn in dmap for fn in files):
1368 # fast path -- all the values are known to be files, so just return
1368 # fast path -- all the values are known to be files, so just return
1369 # that
1369 # that
1370 return list(files)
1370 return list(files)
1371 return [f for f in dmap if match(f)]
1371 return [f for f in dmap if match(f)]
1372
1372
1373 def _actualfilename(self, tr):
1373 def _actualfilename(self, tr):
1374 if tr:
1374 if tr:
1375 return self._pendingfilename
1375 return self._pendingfilename
1376 else:
1376 else:
1377 return self._filename
1377 return self._filename
1378
1378
1379 def savebackup(self, tr, backupname):
1379 def savebackup(self, tr, backupname):
1380 '''Save current dirstate into backup file'''
1380 '''Save current dirstate into backup file'''
1381 filename = self._actualfilename(tr)
1381 filename = self._actualfilename(tr)
1382 assert backupname != filename
1382 assert backupname != filename
1383
1383
1384 # use '_writedirstate' instead of 'write' to write changes certainly,
1384 # use '_writedirstate' instead of 'write' to write changes certainly,
1385 # because the latter omits writing out if transaction is running.
1385 # because the latter omits writing out if transaction is running.
1386 # output file will be used to create backup of dirstate at this point.
1386 # output file will be used to create backup of dirstate at this point.
1387 if self._dirty or not self._opener.exists(filename):
1387 if self._dirty or not self._opener.exists(filename):
1388 self._writedirstate(
1388 self._writedirstate(
1389 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1389 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1390 )
1390 )
1391
1391
1392 if tr:
1392 if tr:
1393 # ensure that subsequent tr.writepending returns True for
1393 # ensure that subsequent tr.writepending returns True for
1394 # changes written out above, even if dirstate is never
1394 # changes written out above, even if dirstate is never
1395 # changed after this
1395 # changed after this
1396 tr.addfilegenerator(
1396 tr.addfilegenerator(
1397 b'dirstate',
1397 b'dirstate',
1398 (self._filename,),
1398 (self._filename,),
1399 self._writedirstate,
1399 self._writedirstate,
1400 location=b'plain',
1400 location=b'plain',
1401 )
1401 )
1402
1402
1403 # ensure that pending file written above is unlinked at
1403 # ensure that pending file written above is unlinked at
1404 # failure, even if tr.writepending isn't invoked until the
1404 # failure, even if tr.writepending isn't invoked until the
1405 # end of this transaction
1405 # end of this transaction
1406 tr.registertmp(filename, location=b'plain')
1406 tr.registertmp(filename, location=b'plain')
1407
1407
1408 self._opener.tryunlink(backupname)
1408 self._opener.tryunlink(backupname)
1409 # hardlink backup is okay because _writedirstate is always called
1409 # hardlink backup is okay because _writedirstate is always called
1410 # with an "atomictemp=True" file.
1410 # with an "atomictemp=True" file.
1411 util.copyfile(
1411 util.copyfile(
1412 self._opener.join(filename),
1412 self._opener.join(filename),
1413 self._opener.join(backupname),
1413 self._opener.join(backupname),
1414 hardlink=True,
1414 hardlink=True,
1415 )
1415 )
1416
1416
1417 def restorebackup(self, tr, backupname):
1417 def restorebackup(self, tr, backupname):
1418 '''Restore dirstate by backup file'''
1418 '''Restore dirstate by backup file'''
1419 # this "invalidate()" prevents "wlock.release()" from writing
1419 # this "invalidate()" prevents "wlock.release()" from writing
1420 # changes of dirstate out after restoring from backup file
1420 # changes of dirstate out after restoring from backup file
1421 self.invalidate()
1421 self.invalidate()
1422 filename = self._actualfilename(tr)
1422 filename = self._actualfilename(tr)
1423 o = self._opener
1423 o = self._opener
1424 if util.samefile(o.join(backupname), o.join(filename)):
1424 if util.samefile(o.join(backupname), o.join(filename)):
1425 o.unlink(backupname)
1425 o.unlink(backupname)
1426 else:
1426 else:
1427 o.rename(backupname, filename, checkambig=True)
1427 o.rename(backupname, filename, checkambig=True)
1428
1428
1429 def clearbackup(self, tr, backupname):
1429 def clearbackup(self, tr, backupname):
1430 '''Clear backup file'''
1430 '''Clear backup file'''
1431 self._opener.unlink(backupname)
1431 self._opener.unlink(backupname)
1432
1432
1433
1433
1434 class dirstatemap(object):
1434 class dirstatemap(object):
1435 """Map encapsulating the dirstate's contents.
1435 """Map encapsulating the dirstate's contents.
1436
1436
1437 The dirstate contains the following state:
1437 The dirstate contains the following state:
1438
1438
1439 - `identity` is the identity of the dirstate file, which can be used to
1439 - `identity` is the identity of the dirstate file, which can be used to
1440 detect when changes have occurred to the dirstate file.
1440 detect when changes have occurred to the dirstate file.
1441
1441
1442 - `parents` is a pair containing the parents of the working copy. The
1442 - `parents` is a pair containing the parents of the working copy. The
1443 parents are updated by calling `setparents`.
1443 parents are updated by calling `setparents`.
1444
1444
1445 - the state map maps filenames to tuples of (state, mode, size, mtime),
1445 - the state map maps filenames to tuples of (state, mode, size, mtime),
1446 where state is a single character representing 'normal', 'added',
1446 where state is a single character representing 'normal', 'added',
1447 'removed', or 'merged'. It is read by treating the dirstate as a
1447 'removed', or 'merged'. It is read by treating the dirstate as a
1448 dict. File state is updated by calling the `addfile`, `removefile` and
1448 dict. File state is updated by calling the `addfile`, `removefile` and
1449 `dropfile` methods.
1449 `dropfile` methods.
1450
1450
1451 - `copymap` maps destination filenames to their source filename.
1451 - `copymap` maps destination filenames to their source filename.
1452
1452
1453 The dirstate also provides the following views onto the state:
1453 The dirstate also provides the following views onto the state:
1454
1454
1455 - `nonnormalset` is a set of the filenames that have state other
1455 - `nonnormalset` is a set of the filenames that have state other
1456 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1456 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1457
1457
1458 - `otherparentset` is a set of the filenames that are marked as coming
1458 - `otherparentset` is a set of the filenames that are marked as coming
1459 from the second parent when the dirstate is currently being merged.
1459 from the second parent when the dirstate is currently being merged.
1460
1460
1461 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1461 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1462 form that they appear as in the dirstate.
1462 form that they appear as in the dirstate.
1463
1463
1464 - `dirfoldmap` is a dict mapping normalized directory names to the
1464 - `dirfoldmap` is a dict mapping normalized directory names to the
1465 denormalized form that they appear as in the dirstate.
1465 denormalized form that they appear as in the dirstate.
1466 """
1466 """
1467
1467
1468 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1468 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1469 self._ui = ui
1469 self._ui = ui
1470 self._opener = opener
1470 self._opener = opener
1471 self._root = root
1471 self._root = root
1472 self._filename = b'dirstate'
1472 self._filename = b'dirstate'
1473 self._nodelen = 20
1473 self._nodelen = 20
1474 self._nodeconstants = nodeconstants
1474 self._nodeconstants = nodeconstants
1475 assert (
1475 assert (
1476 not use_dirstate_v2
1476 not use_dirstate_v2
1477 ), "should have detected unsupported requirement"
1477 ), "should have detected unsupported requirement"
1478
1478
1479 self._parents = None
1479 self._parents = None
1480 self._dirtyparents = False
1480 self._dirtyparents = False
1481
1481
1482 # for consistent view between _pl() and _read() invocations
1482 # for consistent view between _pl() and _read() invocations
1483 self._pendingmode = None
1483 self._pendingmode = None
1484
1484
1485 @propertycache
1485 @propertycache
1486 def _map(self):
1486 def _map(self):
1487 self._map = {}
1487 self._map = {}
1488 self.read()
1488 self.read()
1489 return self._map
1489 return self._map
1490
1490
1491 @propertycache
1491 @propertycache
1492 def copymap(self):
1492 def copymap(self):
1493 self.copymap = {}
1493 self.copymap = {}
1494 self._map
1494 self._map
1495 return self.copymap
1495 return self.copymap
1496
1496
1497 def directories(self):
1497 def directories(self):
1498 # Rust / dirstate-v2 only
1498 # Rust / dirstate-v2 only
1499 return []
1499 return []
1500
1500
1501 def clear(self):
1501 def clear(self):
1502 self._map.clear()
1502 self._map.clear()
1503 self.copymap.clear()
1503 self.copymap.clear()
1504 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1504 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1505 util.clearcachedproperty(self, b"_dirs")
1505 util.clearcachedproperty(self, b"_dirs")
1506 util.clearcachedproperty(self, b"_alldirs")
1506 util.clearcachedproperty(self, b"_alldirs")
1507 util.clearcachedproperty(self, b"filefoldmap")
1507 util.clearcachedproperty(self, b"filefoldmap")
1508 util.clearcachedproperty(self, b"dirfoldmap")
1508 util.clearcachedproperty(self, b"dirfoldmap")
1509 util.clearcachedproperty(self, b"nonnormalset")
1509 util.clearcachedproperty(self, b"nonnormalset")
1510 util.clearcachedproperty(self, b"otherparentset")
1510 util.clearcachedproperty(self, b"otherparentset")
1511
1511
1512 def items(self):
1512 def items(self):
1513 return pycompat.iteritems(self._map)
1513 return pycompat.iteritems(self._map)
1514
1514
1515 # forward for python2,3 compat
1515 # forward for python2,3 compat
1516 iteritems = items
1516 iteritems = items
1517
1517
1518 def __len__(self):
1518 def __len__(self):
1519 return len(self._map)
1519 return len(self._map)
1520
1520
1521 def __iter__(self):
1521 def __iter__(self):
1522 return iter(self._map)
1522 return iter(self._map)
1523
1523
1524 def get(self, key, default=None):
1524 def get(self, key, default=None):
1525 return self._map.get(key, default)
1525 return self._map.get(key, default)
1526
1526
1527 def __contains__(self, key):
1527 def __contains__(self, key):
1528 return key in self._map
1528 return key in self._map
1529
1529
1530 def __getitem__(self, key):
1530 def __getitem__(self, key):
1531 return self._map[key]
1531 return self._map[key]
1532
1532
1533 def keys(self):
1533 def keys(self):
1534 return self._map.keys()
1534 return self._map.keys()
1535
1535
1536 def preload(self):
1536 def preload(self):
1537 """Loads the underlying data, if it's not already loaded"""
1537 """Loads the underlying data, if it's not already loaded"""
1538 self._map
1538 self._map
1539
1539
1540 def addfile(self, f, oldstate, state, mode, size, mtime):
1540 def addfile(self, f, oldstate, state, mode, size, mtime):
1541 """Add a tracked file to the dirstate."""
1541 """Add a tracked file to the dirstate."""
1542 if oldstate in b"?r" and "_dirs" in self.__dict__:
1542 if oldstate in b"?r" and "_dirs" in self.__dict__:
1543 self._dirs.addpath(f)
1543 self._dirs.addpath(f)
1544 if oldstate == b"?" and "_alldirs" in self.__dict__:
1544 if oldstate == b"?" and "_alldirs" in self.__dict__:
1545 self._alldirs.addpath(f)
1545 self._alldirs.addpath(f)
1546 self._map[f] = dirstatetuple(state, mode, size, mtime)
1546 self._map[f] = dirstatetuple(state, mode, size, mtime)
1547 if state != b'n' or mtime == AMBIGUOUS_TIME:
1547 if state != b'n' or mtime == AMBIGUOUS_TIME:
1548 self.nonnormalset.add(f)
1548 self.nonnormalset.add(f)
1549 if size == FROM_P2:
1549 if size == FROM_P2:
1550 self.otherparentset.add(f)
1550 self.otherparentset.add(f)
1551
1551
1552 def removefile(self, f, oldstate, size):
1552 def removefile(self, f, oldstate, size):
1553 """
1553 """
1554 Mark a file as removed in the dirstate.
1554 Mark a file as removed in the dirstate.
1555
1555
1556 The `size` parameter is used to store sentinel values that indicate
1556 The `size` parameter is used to store sentinel values that indicate
1557 the file's previous state. In the future, we should refactor this
1557 the file's previous state. In the future, we should refactor this
1558 to be more explicit about what that state is.
1558 to be more explicit about what that state is.
1559 """
1559 """
1560 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1560 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1561 self._dirs.delpath(f)
1561 self._dirs.delpath(f)
1562 if oldstate == b"?" and "_alldirs" in self.__dict__:
1562 if oldstate == b"?" and "_alldirs" in self.__dict__:
1563 self._alldirs.addpath(f)
1563 self._alldirs.addpath(f)
1564 if "filefoldmap" in self.__dict__:
1564 if "filefoldmap" in self.__dict__:
1565 normed = util.normcase(f)
1565 normed = util.normcase(f)
1566 self.filefoldmap.pop(normed, None)
1566 self.filefoldmap.pop(normed, None)
1567 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1567 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1568 self.nonnormalset.add(f)
1568 self.nonnormalset.add(f)
1569
1569
1570 def dropfile(self, f, oldstate):
1570 def dropfile(self, f, oldstate):
1571 """
1571 """
1572 Remove a file from the dirstate. Returns True if the file was
1572 Remove a file from the dirstate. Returns True if the file was
1573 previously recorded.
1573 previously recorded.
1574 """
1574 """
1575 exists = self._map.pop(f, None) is not None
1575 exists = self._map.pop(f, None) is not None
1576 if exists:
1576 if exists:
1577 if oldstate != b"r" and "_dirs" in self.__dict__:
1577 if oldstate != b"r" and "_dirs" in self.__dict__:
1578 self._dirs.delpath(f)
1578 self._dirs.delpath(f)
1579 if "_alldirs" in self.__dict__:
1579 if "_alldirs" in self.__dict__:
1580 self._alldirs.delpath(f)
1580 self._alldirs.delpath(f)
1581 if "filefoldmap" in self.__dict__:
1581 if "filefoldmap" in self.__dict__:
1582 normed = util.normcase(f)
1582 normed = util.normcase(f)
1583 self.filefoldmap.pop(normed, None)
1583 self.filefoldmap.pop(normed, None)
1584 self.nonnormalset.discard(f)
1584 self.nonnormalset.discard(f)
1585 return exists
1585 return exists
1586
1586
1587 def clearambiguoustimes(self, files, now):
1587 def clearambiguoustimes(self, files, now):
1588 for f in files:
1588 for f in files:
1589 e = self.get(f)
1589 e = self.get(f)
1590 if e is not None and e[0] == b'n' and e[3] == now:
1590 if e is not None and e[0] == b'n' and e[3] == now:
1591 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1591 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1592 self.nonnormalset.add(f)
1592 self.nonnormalset.add(f)
1593
1593
1594 def nonnormalentries(self):
1594 def nonnormalentries(self):
1595 '''Compute the nonnormal dirstate entries from the dmap'''
1595 '''Compute the nonnormal dirstate entries from the dmap'''
1596 try:
1596 try:
1597 return parsers.nonnormalotherparententries(self._map)
1597 return parsers.nonnormalotherparententries(self._map)
1598 except AttributeError:
1598 except AttributeError:
1599 nonnorm = set()
1599 nonnorm = set()
1600 otherparent = set()
1600 otherparent = set()
1601 for fname, e in pycompat.iteritems(self._map):
1601 for fname, e in pycompat.iteritems(self._map):
1602 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1602 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1603 nonnorm.add(fname)
1603 nonnorm.add(fname)
1604 if e[0] == b'n' and e[2] == FROM_P2:
1604 if e[0] == b'n' and e[2] == FROM_P2:
1605 otherparent.add(fname)
1605 otherparent.add(fname)
1606 return nonnorm, otherparent
1606 return nonnorm, otherparent
1607
1607
1608 @propertycache
1608 @propertycache
1609 def filefoldmap(self):
1609 def filefoldmap(self):
1610 """Returns a dictionary mapping normalized case paths to their
1610 """Returns a dictionary mapping normalized case paths to their
1611 non-normalized versions.
1611 non-normalized versions.
1612 """
1612 """
1613 try:
1613 try:
1614 makefilefoldmap = parsers.make_file_foldmap
1614 makefilefoldmap = parsers.make_file_foldmap
1615 except AttributeError:
1615 except AttributeError:
1616 pass
1616 pass
1617 else:
1617 else:
1618 return makefilefoldmap(
1618 return makefilefoldmap(
1619 self._map, util.normcasespec, util.normcasefallback
1619 self._map, util.normcasespec, util.normcasefallback
1620 )
1620 )
1621
1621
1622 f = {}
1622 f = {}
1623 normcase = util.normcase
1623 normcase = util.normcase
1624 for name, s in pycompat.iteritems(self._map):
1624 for name, s in pycompat.iteritems(self._map):
1625 if s[0] != b'r':
1625 if s[0] != b'r':
1626 f[normcase(name)] = name
1626 f[normcase(name)] = name
1627 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1627 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1628 return f
1628 return f
1629
1629
1630 def hastrackeddir(self, d):
1630 def hastrackeddir(self, d):
1631 """
1631 """
1632 Returns True if the dirstate contains a tracked (not removed) file
1632 Returns True if the dirstate contains a tracked (not removed) file
1633 in this directory.
1633 in this directory.
1634 """
1634 """
1635 return d in self._dirs
1635 return d in self._dirs
1636
1636
1637 def hasdir(self, d):
1637 def hasdir(self, d):
1638 """
1638 """
1639 Returns True if the dirstate contains a file (tracked or removed)
1639 Returns True if the dirstate contains a file (tracked or removed)
1640 in this directory.
1640 in this directory.
1641 """
1641 """
1642 return d in self._alldirs
1642 return d in self._alldirs
1643
1643
1644 @propertycache
1644 @propertycache
1645 def _dirs(self):
1645 def _dirs(self):
1646 return pathutil.dirs(self._map, b'r')
1646 return pathutil.dirs(self._map, b'r')
1647
1647
1648 @propertycache
1648 @propertycache
1649 def _alldirs(self):
1649 def _alldirs(self):
1650 return pathutil.dirs(self._map)
1650 return pathutil.dirs(self._map)
1651
1651
1652 def _opendirstatefile(self):
1652 def _opendirstatefile(self):
1653 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1653 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1654 if self._pendingmode is not None and self._pendingmode != mode:
1654 if self._pendingmode is not None and self._pendingmode != mode:
1655 fp.close()
1655 fp.close()
1656 raise error.Abort(
1656 raise error.Abort(
1657 _(b'working directory state may be changed parallelly')
1657 _(b'working directory state may be changed parallelly')
1658 )
1658 )
1659 self._pendingmode = mode
1659 self._pendingmode = mode
1660 return fp
1660 return fp
1661
1661
1662 def parents(self):
1662 def parents(self):
1663 if not self._parents:
1663 if not self._parents:
1664 try:
1664 try:
1665 fp = self._opendirstatefile()
1665 fp = self._opendirstatefile()
1666 st = fp.read(2 * self._nodelen)
1666 st = fp.read(2 * self._nodelen)
1667 fp.close()
1667 fp.close()
1668 except IOError as err:
1668 except IOError as err:
1669 if err.errno != errno.ENOENT:
1669 if err.errno != errno.ENOENT:
1670 raise
1670 raise
1671 # File doesn't exist, so the current state is empty
1671 # File doesn't exist, so the current state is empty
1672 st = b''
1672 st = b''
1673
1673
1674 l = len(st)
1674 l = len(st)
1675 if l == self._nodelen * 2:
1675 if l == self._nodelen * 2:
1676 self._parents = (
1676 self._parents = (
1677 st[: self._nodelen],
1677 st[: self._nodelen],
1678 st[self._nodelen : 2 * self._nodelen],
1678 st[self._nodelen : 2 * self._nodelen],
1679 )
1679 )
1680 elif l == 0:
1680 elif l == 0:
1681 self._parents = (
1681 self._parents = (
1682 self._nodeconstants.nullid,
1682 self._nodeconstants.nullid,
1683 self._nodeconstants.nullid,
1683 self._nodeconstants.nullid,
1684 )
1684 )
1685 else:
1685 else:
1686 raise error.Abort(
1686 raise error.Abort(
1687 _(b'working directory state appears damaged!')
1687 _(b'working directory state appears damaged!')
1688 )
1688 )
1689
1689
1690 return self._parents
1690 return self._parents
1691
1691
1692 def setparents(self, p1, p2):
1692 def setparents(self, p1, p2):
1693 self._parents = (p1, p2)
1693 self._parents = (p1, p2)
1694 self._dirtyparents = True
1694 self._dirtyparents = True
1695
1695
1696 def read(self):
1696 def read(self):
1697 # ignore HG_PENDING because identity is used only for writing
1697 # ignore HG_PENDING because identity is used only for writing
1698 self.identity = util.filestat.frompath(
1698 self.identity = util.filestat.frompath(
1699 self._opener.join(self._filename)
1699 self._opener.join(self._filename)
1700 )
1700 )
1701
1701
1702 try:
1702 try:
1703 fp = self._opendirstatefile()
1703 fp = self._opendirstatefile()
1704 try:
1704 try:
1705 st = fp.read()
1705 st = fp.read()
1706 finally:
1706 finally:
1707 fp.close()
1707 fp.close()
1708 except IOError as err:
1708 except IOError as err:
1709 if err.errno != errno.ENOENT:
1709 if err.errno != errno.ENOENT:
1710 raise
1710 raise
1711 return
1711 return
1712 if not st:
1712 if not st:
1713 return
1713 return
1714
1714
1715 if util.safehasattr(parsers, b'dict_new_presized'):
1715 if util.safehasattr(parsers, b'dict_new_presized'):
1716 # Make an estimate of the number of files in the dirstate based on
1716 # Make an estimate of the number of files in the dirstate based on
1717 # its size. This trades wasting some memory for avoiding costly
1717 # its size. This trades wasting some memory for avoiding costly
1718 # resizes. Each entry have a prefix of 17 bytes followed by one or
1718 # resizes. Each entry have a prefix of 17 bytes followed by one or
1719 # two path names. Studies on various large-scale real-world repositories
1719 # two path names. Studies on various large-scale real-world repositories
1720 # found 54 bytes a reasonable upper limit for the average path names.
1720 # found 54 bytes a reasonable upper limit for the average path names.
1721 # Copy entries are ignored for the sake of this estimate.
1721 # Copy entries are ignored for the sake of this estimate.
1722 self._map = parsers.dict_new_presized(len(st) // 71)
1722 self._map = parsers.dict_new_presized(len(st) // 71)
1723
1723
1724 # Python's garbage collector triggers a GC each time a certain number
1724 # Python's garbage collector triggers a GC each time a certain number
1725 # of container objects (the number being defined by
1725 # of container objects (the number being defined by
1726 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1726 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1727 # for each file in the dirstate. The C version then immediately marks
1727 # for each file in the dirstate. The C version then immediately marks
1728 # them as not to be tracked by the collector. However, this has no
1728 # them as not to be tracked by the collector. However, this has no
1729 # effect on when GCs are triggered, only on what objects the GC looks
1729 # effect on when GCs are triggered, only on what objects the GC looks
1730 # into. This means that O(number of files) GCs are unavoidable.
1730 # into. This means that O(number of files) GCs are unavoidable.
1731 # Depending on when in the process's lifetime the dirstate is parsed,
1731 # Depending on when in the process's lifetime the dirstate is parsed,
1732 # this can get very expensive. As a workaround, disable GC while
1732 # this can get very expensive. As a workaround, disable GC while
1733 # parsing the dirstate.
1733 # parsing the dirstate.
1734 #
1734 #
1735 # (we cannot decorate the function directly since it is in a C module)
1735 # (we cannot decorate the function directly since it is in a C module)
1736 parse_dirstate = util.nogc(parsers.parse_dirstate)
1736 parse_dirstate = util.nogc(parsers.parse_dirstate)
1737 p = parse_dirstate(self._map, self.copymap, st)
1737 p = parse_dirstate(self._map, self.copymap, st)
1738 if not self._dirtyparents:
1738 if not self._dirtyparents:
1739 self.setparents(*p)
1739 self.setparents(*p)
1740
1740
1741 # Avoid excess attribute lookups by fast pathing certain checks
1741 # Avoid excess attribute lookups by fast pathing certain checks
1742 self.__contains__ = self._map.__contains__
1742 self.__contains__ = self._map.__contains__
1743 self.__getitem__ = self._map.__getitem__
1743 self.__getitem__ = self._map.__getitem__
1744 self.get = self._map.get
1744 self.get = self._map.get
1745
1745
1746 def write(self, st, now):
1746 def write(self, st, now):
1747 st.write(
1747 st.write(
1748 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1748 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1749 )
1749 )
1750 st.close()
1750 st.close()
1751 self._dirtyparents = False
1751 self._dirtyparents = False
1752 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1752 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1753
1753
1754 @propertycache
1754 @propertycache
1755 def nonnormalset(self):
1755 def nonnormalset(self):
1756 nonnorm, otherparents = self.nonnormalentries()
1756 nonnorm, otherparents = self.nonnormalentries()
1757 self.otherparentset = otherparents
1757 self.otherparentset = otherparents
1758 return nonnorm
1758 return nonnorm
1759
1759
1760 @propertycache
1760 @propertycache
1761 def otherparentset(self):
1761 def otherparentset(self):
1762 nonnorm, otherparents = self.nonnormalentries()
1762 nonnorm, otherparents = self.nonnormalentries()
1763 self.nonnormalset = nonnorm
1763 self.nonnormalset = nonnorm
1764 return otherparents
1764 return otherparents
1765
1765
1766 def non_normal_or_other_parent_paths(self):
1766 def non_normal_or_other_parent_paths(self):
1767 return self.nonnormalset.union(self.otherparentset)
1767 return self.nonnormalset.union(self.otherparentset)
1768
1768
1769 @propertycache
1769 @propertycache
1770 def identity(self):
1770 def identity(self):
1771 self._map
1771 self._map
1772 return self.identity
1772 return self.identity
1773
1773
1774 @propertycache
1774 @propertycache
1775 def dirfoldmap(self):
1775 def dirfoldmap(self):
1776 f = {}
1776 f = {}
1777 normcase = util.normcase
1777 normcase = util.normcase
1778 for name in self._dirs:
1778 for name in self._dirs:
1779 f[normcase(name)] = name
1779 f[normcase(name)] = name
1780 return f
1780 return f
1781
1781
1782
1782
1783 if rustmod is not None:
1783 if rustmod is not None:
1784
1784
1785 class dirstatemap(object):
1785 class dirstatemap(object):
1786 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1786 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1787 self._use_dirstate_v2 = use_dirstate_v2
1787 self._use_dirstate_v2 = use_dirstate_v2
1788 self._nodeconstants = nodeconstants
1788 self._nodeconstants = nodeconstants
1789 self._ui = ui
1789 self._ui = ui
1790 self._opener = opener
1790 self._opener = opener
1791 self._root = root
1791 self._root = root
1792 self._filename = b'dirstate'
1792 self._filename = b'dirstate'
1793 self._nodelen = 20 # Also update Rust code when changing this!
1793 self._nodelen = 20 # Also update Rust code when changing this!
1794 self._parents = None
1794 self._parents = None
1795 self._dirtyparents = False
1795 self._dirtyparents = False
1796
1796
1797 # for consistent view between _pl() and _read() invocations
1797 # for consistent view between _pl() and _read() invocations
1798 self._pendingmode = None
1798 self._pendingmode = None
1799
1799
1800 self._use_dirstate_tree = self._ui.configbool(
1800 self._use_dirstate_tree = self._ui.configbool(
1801 b"experimental",
1801 b"experimental",
1802 b"dirstate-tree.in-memory",
1802 b"dirstate-tree.in-memory",
1803 False,
1803 False,
1804 )
1804 )
1805
1805
1806 def addfile(self, *args, **kwargs):
1806 def addfile(self, *args, **kwargs):
1807 return self._rustmap.addfile(*args, **kwargs)
1807 return self._rustmap.addfile(*args, **kwargs)
1808
1808
1809 def removefile(self, *args, **kwargs):
1809 def removefile(self, *args, **kwargs):
1810 return self._rustmap.removefile(*args, **kwargs)
1810 return self._rustmap.removefile(*args, **kwargs)
1811
1811
1812 def dropfile(self, *args, **kwargs):
1812 def dropfile(self, *args, **kwargs):
1813 return self._rustmap.dropfile(*args, **kwargs)
1813 return self._rustmap.dropfile(*args, **kwargs)
1814
1814
1815 def clearambiguoustimes(self, *args, **kwargs):
1815 def clearambiguoustimes(self, *args, **kwargs):
1816 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1816 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1817
1817
1818 def nonnormalentries(self):
1818 def nonnormalentries(self):
1819 return self._rustmap.nonnormalentries()
1819 return self._rustmap.nonnormalentries()
1820
1820
1821 def get(self, *args, **kwargs):
1821 def get(self, *args, **kwargs):
1822 return self._rustmap.get(*args, **kwargs)
1822 return self._rustmap.get(*args, **kwargs)
1823
1823
1824 @property
1824 @property
1825 def copymap(self):
1825 def copymap(self):
1826 return self._rustmap.copymap()
1826 return self._rustmap.copymap()
1827
1827
1828 def directories(self):
1828 def directories(self):
1829 return self._rustmap.directories()
1829 return self._rustmap.directories()
1830
1830
1831 def preload(self):
1831 def preload(self):
1832 self._rustmap
1832 self._rustmap
1833
1833
1834 def clear(self):
1834 def clear(self):
1835 self._rustmap.clear()
1835 self._rustmap.clear()
1836 self.setparents(
1836 self.setparents(
1837 self._nodeconstants.nullid, self._nodeconstants.nullid
1837 self._nodeconstants.nullid, self._nodeconstants.nullid
1838 )
1838 )
1839 util.clearcachedproperty(self, b"_dirs")
1839 util.clearcachedproperty(self, b"_dirs")
1840 util.clearcachedproperty(self, b"_alldirs")
1840 util.clearcachedproperty(self, b"_alldirs")
1841 util.clearcachedproperty(self, b"dirfoldmap")
1841 util.clearcachedproperty(self, b"dirfoldmap")
1842
1842
1843 def items(self):
1843 def items(self):
1844 return self._rustmap.items()
1844 return self._rustmap.items()
1845
1845
1846 def keys(self):
1846 def keys(self):
1847 return iter(self._rustmap)
1847 return iter(self._rustmap)
1848
1848
1849 def __contains__(self, key):
1849 def __contains__(self, key):
1850 return key in self._rustmap
1850 return key in self._rustmap
1851
1851
1852 def __getitem__(self, item):
1852 def __getitem__(self, item):
1853 return self._rustmap[item]
1853 return self._rustmap[item]
1854
1854
1855 def __len__(self):
1855 def __len__(self):
1856 return len(self._rustmap)
1856 return len(self._rustmap)
1857
1857
1858 def __iter__(self):
1858 def __iter__(self):
1859 return iter(self._rustmap)
1859 return iter(self._rustmap)
1860
1860
1861 # forward for python2,3 compat
1861 # forward for python2,3 compat
1862 iteritems = items
1862 iteritems = items
1863
1863
1864 def _opendirstatefile(self):
1864 def _opendirstatefile(self):
1865 fp, mode = txnutil.trypending(
1865 fp, mode = txnutil.trypending(
1866 self._root, self._opener, self._filename
1866 self._root, self._opener, self._filename
1867 )
1867 )
1868 if self._pendingmode is not None and self._pendingmode != mode:
1868 if self._pendingmode is not None and self._pendingmode != mode:
1869 fp.close()
1869 fp.close()
1870 raise error.Abort(
1870 raise error.Abort(
1871 _(b'working directory state may be changed parallelly')
1871 _(b'working directory state may be changed parallelly')
1872 )
1872 )
1873 self._pendingmode = mode
1873 self._pendingmode = mode
1874 return fp
1874 return fp
1875
1875
1876 def setparents(self, p1, p2):
1876 def setparents(self, p1, p2):
1877 self._parents = (p1, p2)
1877 self._parents = (p1, p2)
1878 self._dirtyparents = True
1878 self._dirtyparents = True
1879
1879
1880 def parents(self):
1880 def parents(self):
1881 if not self._parents:
1881 if not self._parents:
1882 if self._use_dirstate_v2:
1882 if self._use_dirstate_v2:
1883 offset = len(rustmod.V2_FORMAT_MARKER)
1883 offset = len(rustmod.V2_FORMAT_MARKER)
1884 else:
1884 else:
1885 offset = 0
1885 offset = 0
1886 read_len = offset + self._nodelen * 2
1886 read_len = offset + self._nodelen * 2
1887 try:
1887 try:
1888 fp = self._opendirstatefile()
1888 fp = self._opendirstatefile()
1889 st = fp.read(read_len)
1889 st = fp.read(read_len)
1890 fp.close()
1890 fp.close()
1891 except IOError as err:
1891 except IOError as err:
1892 if err.errno != errno.ENOENT:
1892 if err.errno != errno.ENOENT:
1893 raise
1893 raise
1894 # File doesn't exist, so the current state is empty
1894 # File doesn't exist, so the current state is empty
1895 st = b''
1895 st = b''
1896
1896
1897 l = len(st)
1897 l = len(st)
1898 if l == read_len:
1898 if l == read_len:
1899 st = st[offset:]
1899 st = st[offset:]
1900 self._parents = (
1900 self._parents = (
1901 st[: self._nodelen],
1901 st[: self._nodelen],
1902 st[self._nodelen : 2 * self._nodelen],
1902 st[self._nodelen : 2 * self._nodelen],
1903 )
1903 )
1904 elif l == 0:
1904 elif l == 0:
1905 self._parents = (
1905 self._parents = (
1906 self._nodeconstants.nullid,
1906 self._nodeconstants.nullid,
1907 self._nodeconstants.nullid,
1907 self._nodeconstants.nullid,
1908 )
1908 )
1909 else:
1909 else:
1910 raise error.Abort(
1910 raise error.Abort(
1911 _(b'working directory state appears damaged!')
1911 _(b'working directory state appears damaged!')
1912 )
1912 )
1913
1913
1914 return self._parents
1914 return self._parents
1915
1915
1916 @propertycache
1916 @propertycache
1917 def _rustmap(self):
1917 def _rustmap(self):
1918 """
1918 """
1919 Fills the Dirstatemap when called.
1919 Fills the Dirstatemap when called.
1920 """
1920 """
1921 # ignore HG_PENDING because identity is used only for writing
1921 # ignore HG_PENDING because identity is used only for writing
1922 self.identity = util.filestat.frompath(
1922 self.identity = util.filestat.frompath(
1923 self._opener.join(self._filename)
1923 self._opener.join(self._filename)
1924 )
1924 )
1925
1925
1926 try:
1926 try:
1927 fp = self._opendirstatefile()
1927 fp = self._opendirstatefile()
1928 try:
1928 try:
1929 st = fp.read()
1929 st = fp.read()
1930 finally:
1930 finally:
1931 fp.close()
1931 fp.close()
1932 except IOError as err:
1932 except IOError as err:
1933 if err.errno != errno.ENOENT:
1933 if err.errno != errno.ENOENT:
1934 raise
1934 raise
1935 st = b''
1935 st = b''
1936
1936
1937 self._rustmap, parents = rustmod.DirstateMap.new(
1937 self._rustmap, parents = rustmod.DirstateMap.new(
1938 self._use_dirstate_tree, self._use_dirstate_v2, st
1938 self._use_dirstate_tree, self._use_dirstate_v2, st
1939 )
1939 )
1940
1940
1941 if parents and not self._dirtyparents:
1941 if parents and not self._dirtyparents:
1942 self.setparents(*parents)
1942 self.setparents(*parents)
1943
1943
1944 self.__contains__ = self._rustmap.__contains__
1944 self.__contains__ = self._rustmap.__contains__
1945 self.__getitem__ = self._rustmap.__getitem__
1945 self.__getitem__ = self._rustmap.__getitem__
1946 self.get = self._rustmap.get
1946 self.get = self._rustmap.get
1947 return self._rustmap
1947 return self._rustmap
1948
1948
1949 def write(self, st, now):
1949 def write(self, st, now):
1950 parents = self.parents()
1950 parents = self.parents()
1951 packed = self._rustmap.write(
1951 packed = self._rustmap.write(
1952 self._use_dirstate_v2, parents[0], parents[1], now
1952 self._use_dirstate_v2, parents[0], parents[1], now
1953 )
1953 )
1954 st.write(packed)
1954 st.write(packed)
1955 st.close()
1955 st.close()
1956 self._dirtyparents = False
1956 self._dirtyparents = False
1957
1957
1958 @propertycache
1958 @propertycache
1959 def filefoldmap(self):
1959 def filefoldmap(self):
1960 """Returns a dictionary mapping normalized case paths to their
1960 """Returns a dictionary mapping normalized case paths to their
1961 non-normalized versions.
1961 non-normalized versions.
1962 """
1962 """
1963 return self._rustmap.filefoldmapasdict()
1963 return self._rustmap.filefoldmapasdict()
1964
1964
1965 def hastrackeddir(self, d):
1965 def hastrackeddir(self, d):
1966 return self._rustmap.hastrackeddir(d)
1966 return self._rustmap.hastrackeddir(d)
1967
1967
1968 def hasdir(self, d):
1968 def hasdir(self, d):
1969 return self._rustmap.hasdir(d)
1969 return self._rustmap.hasdir(d)
1970
1970
1971 @propertycache
1971 @propertycache
1972 def identity(self):
1972 def identity(self):
1973 self._rustmap
1973 self._rustmap
1974 return self.identity
1974 return self.identity
1975
1975
1976 @property
1976 @property
1977 def nonnormalset(self):
1977 def nonnormalset(self):
1978 nonnorm = self._rustmap.non_normal_entries()
1978 nonnorm = self._rustmap.non_normal_entries()
1979 return nonnorm
1979 return nonnorm
1980
1980
1981 @propertycache
1981 @propertycache
1982 def otherparentset(self):
1982 def otherparentset(self):
1983 otherparents = self._rustmap.other_parent_entries()
1983 otherparents = self._rustmap.other_parent_entries()
1984 return otherparents
1984 return otherparents
1985
1985
1986 def non_normal_or_other_parent_paths(self):
1986 def non_normal_or_other_parent_paths(self):
1987 return self._rustmap.non_normal_or_other_parent_paths()
1987 return self._rustmap.non_normal_or_other_parent_paths()
1988
1988
1989 @propertycache
1989 @propertycache
1990 def dirfoldmap(self):
1990 def dirfoldmap(self):
1991 f = {}
1991 f = {}
1992 normcase = util.normcase
1992 normcase = util.normcase
1993 for name, _pseudo_entry in self.directories():
1993 for name, _pseudo_entry in self.directories():
1994 f[normcase(name)] = name
1994 f[normcase(name)] = name
1995 return f
1995 return f
General Comments 0
You need to be logged in to leave comments. Login now