##// END OF EJS Templates
dirstate: add an explicit `from_p2` parameter to `_addpath`...
marmoute -
r48281:1f571077 default
parent child Browse files
Show More
@@ -1,1995 +1,2008 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = -2
52 FROM_P2 = -2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
55 NONNORMAL = -1
56
56
57 # a special value used internally for `time` if the time is ambigeous
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = -1
58 AMBIGUOUS_TIME = -1
59
59
60
60
61 class repocache(filecache):
61 class repocache(filecache):
62 """filecache for files in .hg/"""
62 """filecache for files in .hg/"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._opener.join(fname)
65 return obj._opener.join(fname)
66
66
67
67
68 class rootcache(filecache):
68 class rootcache(filecache):
69 """filecache for files in the repository root"""
69 """filecache for files in the repository root"""
70
70
71 def join(self, obj, fname):
71 def join(self, obj, fname):
72 return obj._join(fname)
72 return obj._join(fname)
73
73
74
74
75 def _getfsnow(vfs):
75 def _getfsnow(vfs):
76 '''Get "now" timestamp on filesystem'''
76 '''Get "now" timestamp on filesystem'''
77 tmpfd, tmpname = vfs.mkstemp()
77 tmpfd, tmpname = vfs.mkstemp()
78 try:
78 try:
79 return os.fstat(tmpfd)[stat.ST_MTIME]
79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 finally:
80 finally:
81 os.close(tmpfd)
81 os.close(tmpfd)
82 vfs.unlink(tmpname)
82 vfs.unlink(tmpname)
83
83
84
84
85 @interfaceutil.implementer(intdirstate.idirstate)
85 @interfaceutil.implementer(intdirstate.idirstate)
86 class dirstate(object):
86 class dirstate(object):
87 def __init__(
87 def __init__(
88 self,
88 self,
89 opener,
89 opener,
90 ui,
90 ui,
91 root,
91 root,
92 validate,
92 validate,
93 sparsematchfn,
93 sparsematchfn,
94 nodeconstants,
94 nodeconstants,
95 use_dirstate_v2,
95 use_dirstate_v2,
96 ):
96 ):
97 """Create a new dirstate object.
97 """Create a new dirstate object.
98
98
99 opener is an open()-like callable that can be used to open the
99 opener is an open()-like callable that can be used to open the
100 dirstate file; root is the root of the directory tracked by
100 dirstate file; root is the root of the directory tracked by
101 the dirstate.
101 the dirstate.
102 """
102 """
103 self._use_dirstate_v2 = use_dirstate_v2
103 self._use_dirstate_v2 = use_dirstate_v2
104 self._nodeconstants = nodeconstants
104 self._nodeconstants = nodeconstants
105 self._opener = opener
105 self._opener = opener
106 self._validate = validate
106 self._validate = validate
107 self._root = root
107 self._root = root
108 self._sparsematchfn = sparsematchfn
108 self._sparsematchfn = sparsematchfn
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 # UNC path pointing to root share (issue4557)
110 # UNC path pointing to root share (issue4557)
111 self._rootdir = pathutil.normasprefix(root)
111 self._rootdir = pathutil.normasprefix(root)
112 self._dirty = False
112 self._dirty = False
113 self._lastnormaltime = 0
113 self._lastnormaltime = 0
114 self._ui = ui
114 self._ui = ui
115 self._filecache = {}
115 self._filecache = {}
116 self._parentwriters = 0
116 self._parentwriters = 0
117 self._filename = b'dirstate'
117 self._filename = b'dirstate'
118 self._pendingfilename = b'%s.pending' % self._filename
118 self._pendingfilename = b'%s.pending' % self._filename
119 self._plchangecallbacks = {}
119 self._plchangecallbacks = {}
120 self._origpl = None
120 self._origpl = None
121 self._updatedfiles = set()
121 self._updatedfiles = set()
122 self._mapcls = dirstatemap
122 self._mapcls = dirstatemap
123 # Access and cache cwd early, so we don't access it for the first time
123 # Access and cache cwd early, so we don't access it for the first time
124 # after a working-copy update caused it to not exist (accessing it then
124 # after a working-copy update caused it to not exist (accessing it then
125 # raises an exception).
125 # raises an exception).
126 self._cwd
126 self._cwd
127
127
128 def prefetch_parents(self):
128 def prefetch_parents(self):
129 """make sure the parents are loaded
129 """make sure the parents are loaded
130
130
131 Used to avoid a race condition.
131 Used to avoid a race condition.
132 """
132 """
133 self._pl
133 self._pl
134
134
135 @contextlib.contextmanager
135 @contextlib.contextmanager
136 def parentchange(self):
136 def parentchange(self):
137 """Context manager for handling dirstate parents.
137 """Context manager for handling dirstate parents.
138
138
139 If an exception occurs in the scope of the context manager,
139 If an exception occurs in the scope of the context manager,
140 the incoherent dirstate won't be written when wlock is
140 the incoherent dirstate won't be written when wlock is
141 released.
141 released.
142 """
142 """
143 self._parentwriters += 1
143 self._parentwriters += 1
144 yield
144 yield
145 # Typically we want the "undo" step of a context manager in a
145 # Typically we want the "undo" step of a context manager in a
146 # finally block so it happens even when an exception
146 # finally block so it happens even when an exception
147 # occurs. In this case, however, we only want to decrement
147 # occurs. In this case, however, we only want to decrement
148 # parentwriters if the code in the with statement exits
148 # parentwriters if the code in the with statement exits
149 # normally, so we don't have a try/finally here on purpose.
149 # normally, so we don't have a try/finally here on purpose.
150 self._parentwriters -= 1
150 self._parentwriters -= 1
151
151
152 def pendingparentchange(self):
152 def pendingparentchange(self):
153 """Returns true if the dirstate is in the middle of a set of changes
153 """Returns true if the dirstate is in the middle of a set of changes
154 that modify the dirstate parent.
154 that modify the dirstate parent.
155 """
155 """
156 return self._parentwriters > 0
156 return self._parentwriters > 0
157
157
158 @propertycache
158 @propertycache
159 def _map(self):
159 def _map(self):
160 """Return the dirstate contents (see documentation for dirstatemap)."""
160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 self._map = self._mapcls(
161 self._map = self._mapcls(
162 self._ui,
162 self._ui,
163 self._opener,
163 self._opener,
164 self._root,
164 self._root,
165 self._nodeconstants,
165 self._nodeconstants,
166 self._use_dirstate_v2,
166 self._use_dirstate_v2,
167 )
167 )
168 return self._map
168 return self._map
169
169
170 @property
170 @property
171 def _sparsematcher(self):
171 def _sparsematcher(self):
172 """The matcher for the sparse checkout.
172 """The matcher for the sparse checkout.
173
173
174 The working directory may not include every file from a manifest. The
174 The working directory may not include every file from a manifest. The
175 matcher obtained by this property will match a path if it is to be
175 matcher obtained by this property will match a path if it is to be
176 included in the working directory.
176 included in the working directory.
177 """
177 """
178 # TODO there is potential to cache this property. For now, the matcher
178 # TODO there is potential to cache this property. For now, the matcher
179 # is resolved on every access. (But the called function does use a
179 # is resolved on every access. (But the called function does use a
180 # cache to keep the lookup fast.)
180 # cache to keep the lookup fast.)
181 return self._sparsematchfn()
181 return self._sparsematchfn()
182
182
183 @repocache(b'branch')
183 @repocache(b'branch')
184 def _branch(self):
184 def _branch(self):
185 try:
185 try:
186 return self._opener.read(b"branch").strip() or b"default"
186 return self._opener.read(b"branch").strip() or b"default"
187 except IOError as inst:
187 except IOError as inst:
188 if inst.errno != errno.ENOENT:
188 if inst.errno != errno.ENOENT:
189 raise
189 raise
190 return b"default"
190 return b"default"
191
191
192 @property
192 @property
193 def _pl(self):
193 def _pl(self):
194 return self._map.parents()
194 return self._map.parents()
195
195
196 def hasdir(self, d):
196 def hasdir(self, d):
197 return self._map.hastrackeddir(d)
197 return self._map.hastrackeddir(d)
198
198
199 @rootcache(b'.hgignore')
199 @rootcache(b'.hgignore')
200 def _ignore(self):
200 def _ignore(self):
201 files = self._ignorefiles()
201 files = self._ignorefiles()
202 if not files:
202 if not files:
203 return matchmod.never()
203 return matchmod.never()
204
204
205 pats = [b'include:%s' % f for f in files]
205 pats = [b'include:%s' % f for f in files]
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207
207
208 @propertycache
208 @propertycache
209 def _slash(self):
209 def _slash(self):
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211
211
212 @propertycache
212 @propertycache
213 def _checklink(self):
213 def _checklink(self):
214 return util.checklink(self._root)
214 return util.checklink(self._root)
215
215
216 @propertycache
216 @propertycache
217 def _checkexec(self):
217 def _checkexec(self):
218 return bool(util.checkexec(self._root))
218 return bool(util.checkexec(self._root))
219
219
220 @propertycache
220 @propertycache
221 def _checkcase(self):
221 def _checkcase(self):
222 return not util.fscasesensitive(self._join(b'.hg'))
222 return not util.fscasesensitive(self._join(b'.hg'))
223
223
224 def _join(self, f):
224 def _join(self, f):
225 # much faster than os.path.join()
225 # much faster than os.path.join()
226 # it's safe because f is always a relative path
226 # it's safe because f is always a relative path
227 return self._rootdir + f
227 return self._rootdir + f
228
228
229 def flagfunc(self, buildfallback):
229 def flagfunc(self, buildfallback):
230 if self._checklink and self._checkexec:
230 if self._checklink and self._checkexec:
231
231
232 def f(x):
232 def f(x):
233 try:
233 try:
234 st = os.lstat(self._join(x))
234 st = os.lstat(self._join(x))
235 if util.statislink(st):
235 if util.statislink(st):
236 return b'l'
236 return b'l'
237 if util.statisexec(st):
237 if util.statisexec(st):
238 return b'x'
238 return b'x'
239 except OSError:
239 except OSError:
240 pass
240 pass
241 return b''
241 return b''
242
242
243 return f
243 return f
244
244
245 fallback = buildfallback()
245 fallback = buildfallback()
246 if self._checklink:
246 if self._checklink:
247
247
248 def f(x):
248 def f(x):
249 if os.path.islink(self._join(x)):
249 if os.path.islink(self._join(x)):
250 return b'l'
250 return b'l'
251 if b'x' in fallback(x):
251 if b'x' in fallback(x):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 if self._checkexec:
256 if self._checkexec:
257
257
258 def f(x):
258 def f(x):
259 if b'l' in fallback(x):
259 if b'l' in fallback(x):
260 return b'l'
260 return b'l'
261 if util.isexec(self._join(x)):
261 if util.isexec(self._join(x)):
262 return b'x'
262 return b'x'
263 return b''
263 return b''
264
264
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 # internal config: ui.forcecwd
271 # internal config: ui.forcecwd
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 if forcecwd:
273 if forcecwd:
274 return forcecwd
274 return forcecwd
275 return encoding.getcwd()
275 return encoding.getcwd()
276
276
277 def getcwd(self):
277 def getcwd(self):
278 """Return the path from which a canonical path is calculated.
278 """Return the path from which a canonical path is calculated.
279
279
280 This path should be used to resolve file patterns or to convert
280 This path should be used to resolve file patterns or to convert
281 canonical paths back to file paths for display. It shouldn't be
281 canonical paths back to file paths for display. It shouldn't be
282 used to get real file paths. Use vfs functions instead.
282 used to get real file paths. Use vfs functions instead.
283 """
283 """
284 cwd = self._cwd
284 cwd = self._cwd
285 if cwd == self._root:
285 if cwd == self._root:
286 return b''
286 return b''
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 rootsep = self._root
288 rootsep = self._root
289 if not util.endswithsep(rootsep):
289 if not util.endswithsep(rootsep):
290 rootsep += pycompat.ossep
290 rootsep += pycompat.ossep
291 if cwd.startswith(rootsep):
291 if cwd.startswith(rootsep):
292 return cwd[len(rootsep) :]
292 return cwd[len(rootsep) :]
293 else:
293 else:
294 # we're outside the repo. return an absolute path.
294 # we're outside the repo. return an absolute path.
295 return cwd
295 return cwd
296
296
297 def pathto(self, f, cwd=None):
297 def pathto(self, f, cwd=None):
298 if cwd is None:
298 if cwd is None:
299 cwd = self.getcwd()
299 cwd = self.getcwd()
300 path = util.pathto(self._root, cwd, f)
300 path = util.pathto(self._root, cwd, f)
301 if self._slash:
301 if self._slash:
302 return util.pconvert(path)
302 return util.pconvert(path)
303 return path
303 return path
304
304
305 def __getitem__(self, key):
305 def __getitem__(self, key):
306 """Return the current state of key (a filename) in the dirstate.
306 """Return the current state of key (a filename) in the dirstate.
307
307
308 States are:
308 States are:
309 n normal
309 n normal
310 m needs merging
310 m needs merging
311 r marked for removal
311 r marked for removal
312 a marked for addition
312 a marked for addition
313 ? not tracked
313 ? not tracked
314 """
314 """
315 return self._map.get(key, (b"?",))[0]
315 return self._map.get(key, (b"?",))[0]
316
316
317 def __contains__(self, key):
317 def __contains__(self, key):
318 return key in self._map
318 return key in self._map
319
319
320 def __iter__(self):
320 def __iter__(self):
321 return iter(sorted(self._map))
321 return iter(sorted(self._map))
322
322
323 def items(self):
323 def items(self):
324 return pycompat.iteritems(self._map)
324 return pycompat.iteritems(self._map)
325
325
326 iteritems = items
326 iteritems = items
327
327
328 def directories(self):
328 def directories(self):
329 return self._map.directories()
329 return self._map.directories()
330
330
331 def parents(self):
331 def parents(self):
332 return [self._validate(p) for p in self._pl]
332 return [self._validate(p) for p in self._pl]
333
333
334 def p1(self):
334 def p1(self):
335 return self._validate(self._pl[0])
335 return self._validate(self._pl[0])
336
336
337 def p2(self):
337 def p2(self):
338 return self._validate(self._pl[1])
338 return self._validate(self._pl[1])
339
339
340 def branch(self):
340 def branch(self):
341 return encoding.tolocal(self._branch)
341 return encoding.tolocal(self._branch)
342
342
343 def setparents(self, p1, p2=None):
343 def setparents(self, p1, p2=None):
344 """Set dirstate parents to p1 and p2.
344 """Set dirstate parents to p1 and p2.
345
345
346 When moving from two parents to one, 'm' merged entries a
346 When moving from two parents to one, 'm' merged entries a
347 adjusted to normal and previous copy records discarded and
347 adjusted to normal and previous copy records discarded and
348 returned by the call.
348 returned by the call.
349
349
350 See localrepo.setparents()
350 See localrepo.setparents()
351 """
351 """
352 if p2 is None:
352 if p2 is None:
353 p2 = self._nodeconstants.nullid
353 p2 = self._nodeconstants.nullid
354 if self._parentwriters == 0:
354 if self._parentwriters == 0:
355 raise ValueError(
355 raise ValueError(
356 b"cannot set dirstate parent outside of "
356 b"cannot set dirstate parent outside of "
357 b"dirstate.parentchange context manager"
357 b"dirstate.parentchange context manager"
358 )
358 )
359
359
360 self._dirty = True
360 self._dirty = True
361 oldp2 = self._pl[1]
361 oldp2 = self._pl[1]
362 if self._origpl is None:
362 if self._origpl is None:
363 self._origpl = self._pl
363 self._origpl = self._pl
364 self._map.setparents(p1, p2)
364 self._map.setparents(p1, p2)
365 copies = {}
365 copies = {}
366 if (
366 if (
367 oldp2 != self._nodeconstants.nullid
367 oldp2 != self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
369 ):
369 ):
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
371
371
372 for f in candidatefiles:
372 for f in candidatefiles:
373 s = self._map.get(f)
373 s = self._map.get(f)
374 if s is None:
374 if s is None:
375 continue
375 continue
376
376
377 # Discard 'm' markers when moving away from a merge state
377 # Discard 'm' markers when moving away from a merge state
378 if s[0] == b'm':
378 if s[0] == b'm':
379 source = self._map.copymap.get(f)
379 source = self._map.copymap.get(f)
380 if source:
380 if source:
381 copies[f] = source
381 copies[f] = source
382 self.normallookup(f)
382 self.normallookup(f)
383 # Also fix up otherparent markers
383 # Also fix up otherparent markers
384 elif s[0] == b'n' and s[2] == FROM_P2:
384 elif s[0] == b'n' and s[2] == FROM_P2:
385 source = self._map.copymap.get(f)
385 source = self._map.copymap.get(f)
386 if source:
386 if source:
387 copies[f] = source
387 copies[f] = source
388 self.add(f)
388 self.add(f)
389 return copies
389 return copies
390
390
391 def setbranch(self, branch):
391 def setbranch(self, branch):
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 try:
394 try:
395 f.write(self._branch + b'\n')
395 f.write(self._branch + b'\n')
396 f.close()
396 f.close()
397
397
398 # make sure filecache has the correct stat info for _branch after
398 # make sure filecache has the correct stat info for _branch after
399 # replacing the underlying file
399 # replacing the underlying file
400 ce = self._filecache[b'_branch']
400 ce = self._filecache[b'_branch']
401 if ce:
401 if ce:
402 ce.refresh()
402 ce.refresh()
403 except: # re-raises
403 except: # re-raises
404 f.discard()
404 f.discard()
405 raise
405 raise
406
406
407 def invalidate(self):
407 def invalidate(self):
408 """Causes the next access to reread the dirstate.
408 """Causes the next access to reread the dirstate.
409
409
410 This is different from localrepo.invalidatedirstate() because it always
410 This is different from localrepo.invalidatedirstate() because it always
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 check whether the dirstate has changed before rereading it."""
412 check whether the dirstate has changed before rereading it."""
413
413
414 for a in ("_map", "_branch", "_ignore"):
414 for a in ("_map", "_branch", "_ignore"):
415 if a in self.__dict__:
415 if a in self.__dict__:
416 delattr(self, a)
416 delattr(self, a)
417 self._lastnormaltime = 0
417 self._lastnormaltime = 0
418 self._dirty = False
418 self._dirty = False
419 self._updatedfiles.clear()
419 self._updatedfiles.clear()
420 self._parentwriters = 0
420 self._parentwriters = 0
421 self._origpl = None
421 self._origpl = None
422
422
423 def copy(self, source, dest):
423 def copy(self, source, dest):
424 """Mark dest as a copy of source. Unmark dest if source is None."""
424 """Mark dest as a copy of source. Unmark dest if source is None."""
425 if source == dest:
425 if source == dest:
426 return
426 return
427 self._dirty = True
427 self._dirty = True
428 if source is not None:
428 if source is not None:
429 self._map.copymap[dest] = source
429 self._map.copymap[dest] = source
430 self._updatedfiles.add(source)
430 self._updatedfiles.add(source)
431 self._updatedfiles.add(dest)
431 self._updatedfiles.add(dest)
432 elif self._map.copymap.pop(dest, None):
432 elif self._map.copymap.pop(dest, None):
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434
434
435 def copied(self, file):
435 def copied(self, file):
436 return self._map.copymap.get(file, None)
436 return self._map.copymap.get(file, None)
437
437
438 def copies(self):
438 def copies(self):
439 return self._map.copymap
439 return self._map.copymap
440
440
441 def _addpath(self, f, state, mode, size=NONNORMAL, mtime=AMBIGUOUS_TIME):
441 def _addpath(
442 self,
443 f,
444 state,
445 mode,
446 size=NONNORMAL,
447 mtime=AMBIGUOUS_TIME,
448 from_p2=False,
449 ):
442 oldstate = self[f]
450 oldstate = self[f]
443 if state == b'a' or oldstate == b'r':
451 if state == b'a' or oldstate == b'r':
444 scmutil.checkfilename(f)
452 scmutil.checkfilename(f)
445 if self._map.hastrackeddir(f):
453 if self._map.hastrackeddir(f):
446 msg = _(b'directory %r already in dirstate')
454 msg = _(b'directory %r already in dirstate')
447 msg %= pycompat.bytestr(f)
455 msg %= pycompat.bytestr(f)
448 raise error.Abort(msg)
456 raise error.Abort(msg)
449 # shadows
457 # shadows
450 for d in pathutil.finddirs(f):
458 for d in pathutil.finddirs(f):
451 if self._map.hastrackeddir(d):
459 if self._map.hastrackeddir(d):
452 break
460 break
453 entry = self._map.get(d)
461 entry = self._map.get(d)
454 if entry is not None and entry[0] != b'r':
462 if entry is not None and entry[0] != b'r':
455 msg = _(b'file %r in dirstate clashes with %r')
463 msg = _(b'file %r in dirstate clashes with %r')
456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
464 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
457 raise error.Abort(msg)
465 raise error.Abort(msg)
458 if size != NONNORMAL and size != FROM_P2:
466 if from_p2:
459 size = size & _rangemask
467 size = FROM_P2
460 if mtime != AMBIGUOUS_TIME:
468 mtime = AMBIGUOUS_TIME
461 mtime = mtime & _rangemask
469 else:
470 assert size != FROM_P2
471 if size != NONNORMAL:
472 size = size & _rangemask
473 if mtime != AMBIGUOUS_TIME:
474 mtime = mtime & _rangemask
462 self._dirty = True
475 self._dirty = True
463 self._updatedfiles.add(f)
476 self._updatedfiles.add(f)
464 self._map.addfile(f, oldstate, state, mode, size, mtime)
477 self._map.addfile(f, oldstate, state, mode, size, mtime)
465
478
466 def normal(self, f, parentfiledata=None):
479 def normal(self, f, parentfiledata=None):
467 """Mark a file normal and clean.
480 """Mark a file normal and clean.
468
481
469 parentfiledata: (mode, size, mtime) of the clean file
482 parentfiledata: (mode, size, mtime) of the clean file
470
483
471 parentfiledata should be computed from memory (for mode,
484 parentfiledata should be computed from memory (for mode,
472 size), as or close as possible from the point where we
485 size), as or close as possible from the point where we
473 determined the file was clean, to limit the risk of the
486 determined the file was clean, to limit the risk of the
474 file having been changed by an external process between the
487 file having been changed by an external process between the
475 moment where the file was determined to be clean and now."""
488 moment where the file was determined to be clean and now."""
476 if parentfiledata:
489 if parentfiledata:
477 (mode, size, mtime) = parentfiledata
490 (mode, size, mtime) = parentfiledata
478 else:
491 else:
479 s = os.lstat(self._join(f))
492 s = os.lstat(self._join(f))
480 mode = s.st_mode
493 mode = s.st_mode
481 size = s.st_size
494 size = s.st_size
482 mtime = s[stat.ST_MTIME]
495 mtime = s[stat.ST_MTIME]
483 self._addpath(f, b'n', mode, size, mtime)
496 self._addpath(f, b'n', mode, size, mtime)
484 self._map.copymap.pop(f, None)
497 self._map.copymap.pop(f, None)
485 if f in self._map.nonnormalset:
498 if f in self._map.nonnormalset:
486 self._map.nonnormalset.remove(f)
499 self._map.nonnormalset.remove(f)
487 if mtime > self._lastnormaltime:
500 if mtime > self._lastnormaltime:
488 # Remember the most recent modification timeslot for status(),
501 # Remember the most recent modification timeslot for status(),
489 # to make sure we won't miss future size-preserving file content
502 # to make sure we won't miss future size-preserving file content
490 # modifications that happen within the same timeslot.
503 # modifications that happen within the same timeslot.
491 self._lastnormaltime = mtime
504 self._lastnormaltime = mtime
492
505
493 def normallookup(self, f):
506 def normallookup(self, f):
494 '''Mark a file normal, but possibly dirty.'''
507 '''Mark a file normal, but possibly dirty.'''
495 if self._pl[1] != self._nodeconstants.nullid:
508 if self._pl[1] != self._nodeconstants.nullid:
496 # if there is a merge going on and the file was either
509 # if there is a merge going on and the file was either
497 # in state 'm' (-1) or coming from other parent (-2) before
510 # in state 'm' (-1) or coming from other parent (-2) before
498 # being removed, restore that state.
511 # being removed, restore that state.
499 entry = self._map.get(f)
512 entry = self._map.get(f)
500 if entry is not None:
513 if entry is not None:
501 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
514 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
502 source = self._map.copymap.get(f)
515 source = self._map.copymap.get(f)
503 if entry[2] == NONNORMAL:
516 if entry[2] == NONNORMAL:
504 self.merge(f)
517 self.merge(f)
505 elif entry[2] == FROM_P2:
518 elif entry[2] == FROM_P2:
506 self.otherparent(f)
519 self.otherparent(f)
507 if source:
520 if source:
508 self.copy(source, f)
521 self.copy(source, f)
509 return
522 return
510 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
523 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
511 return
524 return
512 self._addpath(f, b'n', 0)
525 self._addpath(f, b'n', 0)
513 self._map.copymap.pop(f, None)
526 self._map.copymap.pop(f, None)
514
527
515 def otherparent(self, f):
528 def otherparent(self, f):
516 '''Mark as coming from the other parent, always dirty.'''
529 '''Mark as coming from the other parent, always dirty.'''
517 if self._pl[1] == self._nodeconstants.nullid:
530 if self._pl[1] == self._nodeconstants.nullid:
518 msg = _(b"setting %r to other parent only allowed in merges") % f
531 msg = _(b"setting %r to other parent only allowed in merges") % f
519 raise error.Abort(msg)
532 raise error.Abort(msg)
520 if f in self and self[f] == b'n':
533 if f in self and self[f] == b'n':
521 # merge-like
534 # merge-like
522 self._addpath(f, b'm', 0, FROM_P2)
535 self._addpath(f, b'm', 0, from_p2=True)
523 else:
536 else:
524 # add-like
537 # add-like
525 self._addpath(f, b'n', 0, FROM_P2)
538 self._addpath(f, b'n', 0, from_p2=True)
526 self._map.copymap.pop(f, None)
539 self._map.copymap.pop(f, None)
527
540
528 def add(self, f):
541 def add(self, f):
529 '''Mark a file added.'''
542 '''Mark a file added.'''
530 self._addpath(f, b'a', 0)
543 self._addpath(f, b'a', 0)
531 self._map.copymap.pop(f, None)
544 self._map.copymap.pop(f, None)
532
545
533 def remove(self, f):
546 def remove(self, f):
534 '''Mark a file removed.'''
547 '''Mark a file removed.'''
535 self._dirty = True
548 self._dirty = True
536 oldstate = self[f]
549 oldstate = self[f]
537 size = 0
550 size = 0
538 if self._pl[1] != self._nodeconstants.nullid:
551 if self._pl[1] != self._nodeconstants.nullid:
539 entry = self._map.get(f)
552 entry = self._map.get(f)
540 if entry is not None:
553 if entry is not None:
541 # backup the previous state
554 # backup the previous state
542 if entry[0] == b'm': # merge
555 if entry[0] == b'm': # merge
543 size = NONNORMAL
556 size = NONNORMAL
544 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
557 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
545 size = FROM_P2
558 size = FROM_P2
546 self._map.otherparentset.add(f)
559 self._map.otherparentset.add(f)
547 self._updatedfiles.add(f)
560 self._updatedfiles.add(f)
548 self._map.removefile(f, oldstate, size)
561 self._map.removefile(f, oldstate, size)
549 if size == 0:
562 if size == 0:
550 self._map.copymap.pop(f, None)
563 self._map.copymap.pop(f, None)
551
564
552 def merge(self, f):
565 def merge(self, f):
553 '''Mark a file merged.'''
566 '''Mark a file merged.'''
554 if self._pl[1] == self._nodeconstants.nullid:
567 if self._pl[1] == self._nodeconstants.nullid:
555 return self.normallookup(f)
568 return self.normallookup(f)
556 return self.otherparent(f)
569 return self.otherparent(f)
557
570
558 def drop(self, f):
571 def drop(self, f):
559 '''Drop a file from the dirstate'''
572 '''Drop a file from the dirstate'''
560 oldstate = self[f]
573 oldstate = self[f]
561 if self._map.dropfile(f, oldstate):
574 if self._map.dropfile(f, oldstate):
562 self._dirty = True
575 self._dirty = True
563 self._updatedfiles.add(f)
576 self._updatedfiles.add(f)
564 self._map.copymap.pop(f, None)
577 self._map.copymap.pop(f, None)
565
578
566 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
579 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
567 if exists is None:
580 if exists is None:
568 exists = os.path.lexists(os.path.join(self._root, path))
581 exists = os.path.lexists(os.path.join(self._root, path))
569 if not exists:
582 if not exists:
570 # Maybe a path component exists
583 # Maybe a path component exists
571 if not ignoremissing and b'/' in path:
584 if not ignoremissing and b'/' in path:
572 d, f = path.rsplit(b'/', 1)
585 d, f = path.rsplit(b'/', 1)
573 d = self._normalize(d, False, ignoremissing, None)
586 d = self._normalize(d, False, ignoremissing, None)
574 folded = d + b"/" + f
587 folded = d + b"/" + f
575 else:
588 else:
576 # No path components, preserve original case
589 # No path components, preserve original case
577 folded = path
590 folded = path
578 else:
591 else:
579 # recursively normalize leading directory components
592 # recursively normalize leading directory components
580 # against dirstate
593 # against dirstate
581 if b'/' in normed:
594 if b'/' in normed:
582 d, f = normed.rsplit(b'/', 1)
595 d, f = normed.rsplit(b'/', 1)
583 d = self._normalize(d, False, ignoremissing, True)
596 d = self._normalize(d, False, ignoremissing, True)
584 r = self._root + b"/" + d
597 r = self._root + b"/" + d
585 folded = d + b"/" + util.fspath(f, r)
598 folded = d + b"/" + util.fspath(f, r)
586 else:
599 else:
587 folded = util.fspath(normed, self._root)
600 folded = util.fspath(normed, self._root)
588 storemap[normed] = folded
601 storemap[normed] = folded
589
602
590 return folded
603 return folded
591
604
592 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
605 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
593 normed = util.normcase(path)
606 normed = util.normcase(path)
594 folded = self._map.filefoldmap.get(normed, None)
607 folded = self._map.filefoldmap.get(normed, None)
595 if folded is None:
608 if folded is None:
596 if isknown:
609 if isknown:
597 folded = path
610 folded = path
598 else:
611 else:
599 folded = self._discoverpath(
612 folded = self._discoverpath(
600 path, normed, ignoremissing, exists, self._map.filefoldmap
613 path, normed, ignoremissing, exists, self._map.filefoldmap
601 )
614 )
602 return folded
615 return folded
603
616
604 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
617 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
605 normed = util.normcase(path)
618 normed = util.normcase(path)
606 folded = self._map.filefoldmap.get(normed, None)
619 folded = self._map.filefoldmap.get(normed, None)
607 if folded is None:
620 if folded is None:
608 folded = self._map.dirfoldmap.get(normed, None)
621 folded = self._map.dirfoldmap.get(normed, None)
609 if folded is None:
622 if folded is None:
610 if isknown:
623 if isknown:
611 folded = path
624 folded = path
612 else:
625 else:
613 # store discovered result in dirfoldmap so that future
626 # store discovered result in dirfoldmap so that future
614 # normalizefile calls don't start matching directories
627 # normalizefile calls don't start matching directories
615 folded = self._discoverpath(
628 folded = self._discoverpath(
616 path, normed, ignoremissing, exists, self._map.dirfoldmap
629 path, normed, ignoremissing, exists, self._map.dirfoldmap
617 )
630 )
618 return folded
631 return folded
619
632
620 def normalize(self, path, isknown=False, ignoremissing=False):
633 def normalize(self, path, isknown=False, ignoremissing=False):
621 """
634 """
622 normalize the case of a pathname when on a casefolding filesystem
635 normalize the case of a pathname when on a casefolding filesystem
623
636
624 isknown specifies whether the filename came from walking the
637 isknown specifies whether the filename came from walking the
625 disk, to avoid extra filesystem access.
638 disk, to avoid extra filesystem access.
626
639
627 If ignoremissing is True, missing path are returned
640 If ignoremissing is True, missing path are returned
628 unchanged. Otherwise, we try harder to normalize possibly
641 unchanged. Otherwise, we try harder to normalize possibly
629 existing path components.
642 existing path components.
630
643
631 The normalized case is determined based on the following precedence:
644 The normalized case is determined based on the following precedence:
632
645
633 - version of name already stored in the dirstate
646 - version of name already stored in the dirstate
634 - version of name stored on disk
647 - version of name stored on disk
635 - version provided via command arguments
648 - version provided via command arguments
636 """
649 """
637
650
638 if self._checkcase:
651 if self._checkcase:
639 return self._normalize(path, isknown, ignoremissing)
652 return self._normalize(path, isknown, ignoremissing)
640 return path
653 return path
641
654
642 def clear(self):
655 def clear(self):
643 self._map.clear()
656 self._map.clear()
644 self._lastnormaltime = 0
657 self._lastnormaltime = 0
645 self._updatedfiles.clear()
658 self._updatedfiles.clear()
646 self._dirty = True
659 self._dirty = True
647
660
648 def rebuild(self, parent, allfiles, changedfiles=None):
661 def rebuild(self, parent, allfiles, changedfiles=None):
649 if changedfiles is None:
662 if changedfiles is None:
650 # Rebuild entire dirstate
663 # Rebuild entire dirstate
651 to_lookup = allfiles
664 to_lookup = allfiles
652 to_drop = []
665 to_drop = []
653 lastnormaltime = self._lastnormaltime
666 lastnormaltime = self._lastnormaltime
654 self.clear()
667 self.clear()
655 self._lastnormaltime = lastnormaltime
668 self._lastnormaltime = lastnormaltime
656 elif len(changedfiles) < 10:
669 elif len(changedfiles) < 10:
657 # Avoid turning allfiles into a set, which can be expensive if it's
670 # Avoid turning allfiles into a set, which can be expensive if it's
658 # large.
671 # large.
659 to_lookup = []
672 to_lookup = []
660 to_drop = []
673 to_drop = []
661 for f in changedfiles:
674 for f in changedfiles:
662 if f in allfiles:
675 if f in allfiles:
663 to_lookup.append(f)
676 to_lookup.append(f)
664 else:
677 else:
665 to_drop.append(f)
678 to_drop.append(f)
666 else:
679 else:
667 changedfilesset = set(changedfiles)
680 changedfilesset = set(changedfiles)
668 to_lookup = changedfilesset & set(allfiles)
681 to_lookup = changedfilesset & set(allfiles)
669 to_drop = changedfilesset - to_lookup
682 to_drop = changedfilesset - to_lookup
670
683
671 if self._origpl is None:
684 if self._origpl is None:
672 self._origpl = self._pl
685 self._origpl = self._pl
673 self._map.setparents(parent, self._nodeconstants.nullid)
686 self._map.setparents(parent, self._nodeconstants.nullid)
674
687
675 for f in to_lookup:
688 for f in to_lookup:
676 self.normallookup(f)
689 self.normallookup(f)
677 for f in to_drop:
690 for f in to_drop:
678 self.drop(f)
691 self.drop(f)
679
692
680 self._dirty = True
693 self._dirty = True
681
694
682 def identity(self):
695 def identity(self):
683 """Return identity of dirstate itself to detect changing in storage
696 """Return identity of dirstate itself to detect changing in storage
684
697
685 If identity of previous dirstate is equal to this, writing
698 If identity of previous dirstate is equal to this, writing
686 changes based on the former dirstate out can keep consistency.
699 changes based on the former dirstate out can keep consistency.
687 """
700 """
688 return self._map.identity
701 return self._map.identity
689
702
690 def write(self, tr):
703 def write(self, tr):
691 if not self._dirty:
704 if not self._dirty:
692 return
705 return
693
706
694 filename = self._filename
707 filename = self._filename
695 if tr:
708 if tr:
696 # 'dirstate.write()' is not only for writing in-memory
709 # 'dirstate.write()' is not only for writing in-memory
697 # changes out, but also for dropping ambiguous timestamp.
710 # changes out, but also for dropping ambiguous timestamp.
698 # delayed writing re-raise "ambiguous timestamp issue".
711 # delayed writing re-raise "ambiguous timestamp issue".
699 # See also the wiki page below for detail:
712 # See also the wiki page below for detail:
700 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
701
714
702 # emulate dropping timestamp in 'parsers.pack_dirstate'
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
703 now = _getfsnow(self._opener)
716 now = _getfsnow(self._opener)
704 self._map.clearambiguoustimes(self._updatedfiles, now)
717 self._map.clearambiguoustimes(self._updatedfiles, now)
705
718
706 # emulate that all 'dirstate.normal' results are written out
719 # emulate that all 'dirstate.normal' results are written out
707 self._lastnormaltime = 0
720 self._lastnormaltime = 0
708 self._updatedfiles.clear()
721 self._updatedfiles.clear()
709
722
710 # delay writing in-memory changes out
723 # delay writing in-memory changes out
711 tr.addfilegenerator(
724 tr.addfilegenerator(
712 b'dirstate',
725 b'dirstate',
713 (self._filename,),
726 (self._filename,),
714 self._writedirstate,
727 self._writedirstate,
715 location=b'plain',
728 location=b'plain',
716 )
729 )
717 return
730 return
718
731
719 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
732 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
720 self._writedirstate(st)
733 self._writedirstate(st)
721
734
722 def addparentchangecallback(self, category, callback):
735 def addparentchangecallback(self, category, callback):
723 """add a callback to be called when the wd parents are changed
736 """add a callback to be called when the wd parents are changed
724
737
725 Callback will be called with the following arguments:
738 Callback will be called with the following arguments:
726 dirstate, (oldp1, oldp2), (newp1, newp2)
739 dirstate, (oldp1, oldp2), (newp1, newp2)
727
740
728 Category is a unique identifier to allow overwriting an old callback
741 Category is a unique identifier to allow overwriting an old callback
729 with a newer callback.
742 with a newer callback.
730 """
743 """
731 self._plchangecallbacks[category] = callback
744 self._plchangecallbacks[category] = callback
732
745
733 def _writedirstate(self, st):
746 def _writedirstate(self, st):
734 # notify callbacks about parents change
747 # notify callbacks about parents change
735 if self._origpl is not None and self._origpl != self._pl:
748 if self._origpl is not None and self._origpl != self._pl:
736 for c, callback in sorted(
749 for c, callback in sorted(
737 pycompat.iteritems(self._plchangecallbacks)
750 pycompat.iteritems(self._plchangecallbacks)
738 ):
751 ):
739 callback(self, self._origpl, self._pl)
752 callback(self, self._origpl, self._pl)
740 self._origpl = None
753 self._origpl = None
741 # use the modification time of the newly created temporary file as the
754 # use the modification time of the newly created temporary file as the
742 # filesystem's notion of 'now'
755 # filesystem's notion of 'now'
743 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
756 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
744
757
745 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
758 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
746 # timestamp of each entries in dirstate, because of 'now > mtime'
759 # timestamp of each entries in dirstate, because of 'now > mtime'
747 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
760 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
748 if delaywrite > 0:
761 if delaywrite > 0:
749 # do we have any files to delay for?
762 # do we have any files to delay for?
750 for f, e in pycompat.iteritems(self._map):
763 for f, e in pycompat.iteritems(self._map):
751 if e[0] == b'n' and e[3] == now:
764 if e[0] == b'n' and e[3] == now:
752 import time # to avoid useless import
765 import time # to avoid useless import
753
766
754 # rather than sleep n seconds, sleep until the next
767 # rather than sleep n seconds, sleep until the next
755 # multiple of n seconds
768 # multiple of n seconds
756 clock = time.time()
769 clock = time.time()
757 start = int(clock) - (int(clock) % delaywrite)
770 start = int(clock) - (int(clock) % delaywrite)
758 end = start + delaywrite
771 end = start + delaywrite
759 time.sleep(end - clock)
772 time.sleep(end - clock)
760 now = end # trust our estimate that the end is near now
773 now = end # trust our estimate that the end is near now
761 break
774 break
762
775
763 self._map.write(st, now)
776 self._map.write(st, now)
764 self._lastnormaltime = 0
777 self._lastnormaltime = 0
765 self._dirty = False
778 self._dirty = False
766
779
767 def _dirignore(self, f):
780 def _dirignore(self, f):
768 if self._ignore(f):
781 if self._ignore(f):
769 return True
782 return True
770 for p in pathutil.finddirs(f):
783 for p in pathutil.finddirs(f):
771 if self._ignore(p):
784 if self._ignore(p):
772 return True
785 return True
773 return False
786 return False
774
787
775 def _ignorefiles(self):
788 def _ignorefiles(self):
776 files = []
789 files = []
777 if os.path.exists(self._join(b'.hgignore')):
790 if os.path.exists(self._join(b'.hgignore')):
778 files.append(self._join(b'.hgignore'))
791 files.append(self._join(b'.hgignore'))
779 for name, path in self._ui.configitems(b"ui"):
792 for name, path in self._ui.configitems(b"ui"):
780 if name == b'ignore' or name.startswith(b'ignore.'):
793 if name == b'ignore' or name.startswith(b'ignore.'):
781 # we need to use os.path.join here rather than self._join
794 # we need to use os.path.join here rather than self._join
782 # because path is arbitrary and user-specified
795 # because path is arbitrary and user-specified
783 files.append(os.path.join(self._rootdir, util.expandpath(path)))
796 files.append(os.path.join(self._rootdir, util.expandpath(path)))
784 return files
797 return files
785
798
786 def _ignorefileandline(self, f):
799 def _ignorefileandline(self, f):
787 files = collections.deque(self._ignorefiles())
800 files = collections.deque(self._ignorefiles())
788 visited = set()
801 visited = set()
789 while files:
802 while files:
790 i = files.popleft()
803 i = files.popleft()
791 patterns = matchmod.readpatternfile(
804 patterns = matchmod.readpatternfile(
792 i, self._ui.warn, sourceinfo=True
805 i, self._ui.warn, sourceinfo=True
793 )
806 )
794 for pattern, lineno, line in patterns:
807 for pattern, lineno, line in patterns:
795 kind, p = matchmod._patsplit(pattern, b'glob')
808 kind, p = matchmod._patsplit(pattern, b'glob')
796 if kind == b"subinclude":
809 if kind == b"subinclude":
797 if p not in visited:
810 if p not in visited:
798 files.append(p)
811 files.append(p)
799 continue
812 continue
800 m = matchmod.match(
813 m = matchmod.match(
801 self._root, b'', [], [pattern], warn=self._ui.warn
814 self._root, b'', [], [pattern], warn=self._ui.warn
802 )
815 )
803 if m(f):
816 if m(f):
804 return (i, lineno, line)
817 return (i, lineno, line)
805 visited.add(i)
818 visited.add(i)
806 return (None, -1, b"")
819 return (None, -1, b"")
807
820
808 def _walkexplicit(self, match, subrepos):
821 def _walkexplicit(self, match, subrepos):
809 """Get stat data about the files explicitly specified by match.
822 """Get stat data about the files explicitly specified by match.
810
823
811 Return a triple (results, dirsfound, dirsnotfound).
824 Return a triple (results, dirsfound, dirsnotfound).
812 - results is a mapping from filename to stat result. It also contains
825 - results is a mapping from filename to stat result. It also contains
813 listings mapping subrepos and .hg to None.
826 listings mapping subrepos and .hg to None.
814 - dirsfound is a list of files found to be directories.
827 - dirsfound is a list of files found to be directories.
815 - dirsnotfound is a list of files that the dirstate thinks are
828 - dirsnotfound is a list of files that the dirstate thinks are
816 directories and that were not found."""
829 directories and that were not found."""
817
830
818 def badtype(mode):
831 def badtype(mode):
819 kind = _(b'unknown')
832 kind = _(b'unknown')
820 if stat.S_ISCHR(mode):
833 if stat.S_ISCHR(mode):
821 kind = _(b'character device')
834 kind = _(b'character device')
822 elif stat.S_ISBLK(mode):
835 elif stat.S_ISBLK(mode):
823 kind = _(b'block device')
836 kind = _(b'block device')
824 elif stat.S_ISFIFO(mode):
837 elif stat.S_ISFIFO(mode):
825 kind = _(b'fifo')
838 kind = _(b'fifo')
826 elif stat.S_ISSOCK(mode):
839 elif stat.S_ISSOCK(mode):
827 kind = _(b'socket')
840 kind = _(b'socket')
828 elif stat.S_ISDIR(mode):
841 elif stat.S_ISDIR(mode):
829 kind = _(b'directory')
842 kind = _(b'directory')
830 return _(b'unsupported file type (type is %s)') % kind
843 return _(b'unsupported file type (type is %s)') % kind
831
844
832 badfn = match.bad
845 badfn = match.bad
833 dmap = self._map
846 dmap = self._map
834 lstat = os.lstat
847 lstat = os.lstat
835 getkind = stat.S_IFMT
848 getkind = stat.S_IFMT
836 dirkind = stat.S_IFDIR
849 dirkind = stat.S_IFDIR
837 regkind = stat.S_IFREG
850 regkind = stat.S_IFREG
838 lnkkind = stat.S_IFLNK
851 lnkkind = stat.S_IFLNK
839 join = self._join
852 join = self._join
840 dirsfound = []
853 dirsfound = []
841 foundadd = dirsfound.append
854 foundadd = dirsfound.append
842 dirsnotfound = []
855 dirsnotfound = []
843 notfoundadd = dirsnotfound.append
856 notfoundadd = dirsnotfound.append
844
857
845 if not match.isexact() and self._checkcase:
858 if not match.isexact() and self._checkcase:
846 normalize = self._normalize
859 normalize = self._normalize
847 else:
860 else:
848 normalize = None
861 normalize = None
849
862
850 files = sorted(match.files())
863 files = sorted(match.files())
851 subrepos.sort()
864 subrepos.sort()
852 i, j = 0, 0
865 i, j = 0, 0
853 while i < len(files) and j < len(subrepos):
866 while i < len(files) and j < len(subrepos):
854 subpath = subrepos[j] + b"/"
867 subpath = subrepos[j] + b"/"
855 if files[i] < subpath:
868 if files[i] < subpath:
856 i += 1
869 i += 1
857 continue
870 continue
858 while i < len(files) and files[i].startswith(subpath):
871 while i < len(files) and files[i].startswith(subpath):
859 del files[i]
872 del files[i]
860 j += 1
873 j += 1
861
874
862 if not files or b'' in files:
875 if not files or b'' in files:
863 files = [b'']
876 files = [b'']
864 # constructing the foldmap is expensive, so don't do it for the
877 # constructing the foldmap is expensive, so don't do it for the
865 # common case where files is ['']
878 # common case where files is ['']
866 normalize = None
879 normalize = None
867 results = dict.fromkeys(subrepos)
880 results = dict.fromkeys(subrepos)
868 results[b'.hg'] = None
881 results[b'.hg'] = None
869
882
870 for ff in files:
883 for ff in files:
871 if normalize:
884 if normalize:
872 nf = normalize(ff, False, True)
885 nf = normalize(ff, False, True)
873 else:
886 else:
874 nf = ff
887 nf = ff
875 if nf in results:
888 if nf in results:
876 continue
889 continue
877
890
878 try:
891 try:
879 st = lstat(join(nf))
892 st = lstat(join(nf))
880 kind = getkind(st.st_mode)
893 kind = getkind(st.st_mode)
881 if kind == dirkind:
894 if kind == dirkind:
882 if nf in dmap:
895 if nf in dmap:
883 # file replaced by dir on disk but still in dirstate
896 # file replaced by dir on disk but still in dirstate
884 results[nf] = None
897 results[nf] = None
885 foundadd((nf, ff))
898 foundadd((nf, ff))
886 elif kind == regkind or kind == lnkkind:
899 elif kind == regkind or kind == lnkkind:
887 results[nf] = st
900 results[nf] = st
888 else:
901 else:
889 badfn(ff, badtype(kind))
902 badfn(ff, badtype(kind))
890 if nf in dmap:
903 if nf in dmap:
891 results[nf] = None
904 results[nf] = None
892 except OSError as inst: # nf not found on disk - it is dirstate only
905 except OSError as inst: # nf not found on disk - it is dirstate only
893 if nf in dmap: # does it exactly match a missing file?
906 if nf in dmap: # does it exactly match a missing file?
894 results[nf] = None
907 results[nf] = None
895 else: # does it match a missing directory?
908 else: # does it match a missing directory?
896 if self._map.hasdir(nf):
909 if self._map.hasdir(nf):
897 notfoundadd(nf)
910 notfoundadd(nf)
898 else:
911 else:
899 badfn(ff, encoding.strtolocal(inst.strerror))
912 badfn(ff, encoding.strtolocal(inst.strerror))
900
913
901 # match.files() may contain explicitly-specified paths that shouldn't
914 # match.files() may contain explicitly-specified paths that shouldn't
902 # be taken; drop them from the list of files found. dirsfound/notfound
915 # be taken; drop them from the list of files found. dirsfound/notfound
903 # aren't filtered here because they will be tested later.
916 # aren't filtered here because they will be tested later.
904 if match.anypats():
917 if match.anypats():
905 for f in list(results):
918 for f in list(results):
906 if f == b'.hg' or f in subrepos:
919 if f == b'.hg' or f in subrepos:
907 # keep sentinel to disable further out-of-repo walks
920 # keep sentinel to disable further out-of-repo walks
908 continue
921 continue
909 if not match(f):
922 if not match(f):
910 del results[f]
923 del results[f]
911
924
912 # Case insensitive filesystems cannot rely on lstat() failing to detect
925 # Case insensitive filesystems cannot rely on lstat() failing to detect
913 # a case-only rename. Prune the stat object for any file that does not
926 # a case-only rename. Prune the stat object for any file that does not
914 # match the case in the filesystem, if there are multiple files that
927 # match the case in the filesystem, if there are multiple files that
915 # normalize to the same path.
928 # normalize to the same path.
916 if match.isexact() and self._checkcase:
929 if match.isexact() and self._checkcase:
917 normed = {}
930 normed = {}
918
931
919 for f, st in pycompat.iteritems(results):
932 for f, st in pycompat.iteritems(results):
920 if st is None:
933 if st is None:
921 continue
934 continue
922
935
923 nc = util.normcase(f)
936 nc = util.normcase(f)
924 paths = normed.get(nc)
937 paths = normed.get(nc)
925
938
926 if paths is None:
939 if paths is None:
927 paths = set()
940 paths = set()
928 normed[nc] = paths
941 normed[nc] = paths
929
942
930 paths.add(f)
943 paths.add(f)
931
944
932 for norm, paths in pycompat.iteritems(normed):
945 for norm, paths in pycompat.iteritems(normed):
933 if len(paths) > 1:
946 if len(paths) > 1:
934 for path in paths:
947 for path in paths:
935 folded = self._discoverpath(
948 folded = self._discoverpath(
936 path, norm, True, None, self._map.dirfoldmap
949 path, norm, True, None, self._map.dirfoldmap
937 )
950 )
938 if path != folded:
951 if path != folded:
939 results[path] = None
952 results[path] = None
940
953
941 return results, dirsfound, dirsnotfound
954 return results, dirsfound, dirsnotfound
942
955
943 def walk(self, match, subrepos, unknown, ignored, full=True):
956 def walk(self, match, subrepos, unknown, ignored, full=True):
944 """
957 """
945 Walk recursively through the directory tree, finding all files
958 Walk recursively through the directory tree, finding all files
946 matched by match.
959 matched by match.
947
960
948 If full is False, maybe skip some known-clean files.
961 If full is False, maybe skip some known-clean files.
949
962
950 Return a dict mapping filename to stat-like object (either
963 Return a dict mapping filename to stat-like object (either
951 mercurial.osutil.stat instance or return value of os.stat()).
964 mercurial.osutil.stat instance or return value of os.stat()).
952
965
953 """
966 """
954 # full is a flag that extensions that hook into walk can use -- this
967 # full is a flag that extensions that hook into walk can use -- this
955 # implementation doesn't use it at all. This satisfies the contract
968 # implementation doesn't use it at all. This satisfies the contract
956 # because we only guarantee a "maybe".
969 # because we only guarantee a "maybe".
957
970
958 if ignored:
971 if ignored:
959 ignore = util.never
972 ignore = util.never
960 dirignore = util.never
973 dirignore = util.never
961 elif unknown:
974 elif unknown:
962 ignore = self._ignore
975 ignore = self._ignore
963 dirignore = self._dirignore
976 dirignore = self._dirignore
964 else:
977 else:
965 # if not unknown and not ignored, drop dir recursion and step 2
978 # if not unknown and not ignored, drop dir recursion and step 2
966 ignore = util.always
979 ignore = util.always
967 dirignore = util.always
980 dirignore = util.always
968
981
969 matchfn = match.matchfn
982 matchfn = match.matchfn
970 matchalways = match.always()
983 matchalways = match.always()
971 matchtdir = match.traversedir
984 matchtdir = match.traversedir
972 dmap = self._map
985 dmap = self._map
973 listdir = util.listdir
986 listdir = util.listdir
974 lstat = os.lstat
987 lstat = os.lstat
975 dirkind = stat.S_IFDIR
988 dirkind = stat.S_IFDIR
976 regkind = stat.S_IFREG
989 regkind = stat.S_IFREG
977 lnkkind = stat.S_IFLNK
990 lnkkind = stat.S_IFLNK
978 join = self._join
991 join = self._join
979
992
980 exact = skipstep3 = False
993 exact = skipstep3 = False
981 if match.isexact(): # match.exact
994 if match.isexact(): # match.exact
982 exact = True
995 exact = True
983 dirignore = util.always # skip step 2
996 dirignore = util.always # skip step 2
984 elif match.prefix(): # match.match, no patterns
997 elif match.prefix(): # match.match, no patterns
985 skipstep3 = True
998 skipstep3 = True
986
999
987 if not exact and self._checkcase:
1000 if not exact and self._checkcase:
988 normalize = self._normalize
1001 normalize = self._normalize
989 normalizefile = self._normalizefile
1002 normalizefile = self._normalizefile
990 skipstep3 = False
1003 skipstep3 = False
991 else:
1004 else:
992 normalize = self._normalize
1005 normalize = self._normalize
993 normalizefile = None
1006 normalizefile = None
994
1007
995 # step 1: find all explicit files
1008 # step 1: find all explicit files
996 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1009 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
997 if matchtdir:
1010 if matchtdir:
998 for d in work:
1011 for d in work:
999 matchtdir(d[0])
1012 matchtdir(d[0])
1000 for d in dirsnotfound:
1013 for d in dirsnotfound:
1001 matchtdir(d)
1014 matchtdir(d)
1002
1015
1003 skipstep3 = skipstep3 and not (work or dirsnotfound)
1016 skipstep3 = skipstep3 and not (work or dirsnotfound)
1004 work = [d for d in work if not dirignore(d[0])]
1017 work = [d for d in work if not dirignore(d[0])]
1005
1018
1006 # step 2: visit subdirectories
1019 # step 2: visit subdirectories
1007 def traverse(work, alreadynormed):
1020 def traverse(work, alreadynormed):
1008 wadd = work.append
1021 wadd = work.append
1009 while work:
1022 while work:
1010 tracing.counter('dirstate.walk work', len(work))
1023 tracing.counter('dirstate.walk work', len(work))
1011 nd = work.pop()
1024 nd = work.pop()
1012 visitentries = match.visitchildrenset(nd)
1025 visitentries = match.visitchildrenset(nd)
1013 if not visitentries:
1026 if not visitentries:
1014 continue
1027 continue
1015 if visitentries == b'this' or visitentries == b'all':
1028 if visitentries == b'this' or visitentries == b'all':
1016 visitentries = None
1029 visitentries = None
1017 skip = None
1030 skip = None
1018 if nd != b'':
1031 if nd != b'':
1019 skip = b'.hg'
1032 skip = b'.hg'
1020 try:
1033 try:
1021 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1034 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1022 entries = listdir(join(nd), stat=True, skip=skip)
1035 entries = listdir(join(nd), stat=True, skip=skip)
1023 except OSError as inst:
1036 except OSError as inst:
1024 if inst.errno in (errno.EACCES, errno.ENOENT):
1037 if inst.errno in (errno.EACCES, errno.ENOENT):
1025 match.bad(
1038 match.bad(
1026 self.pathto(nd), encoding.strtolocal(inst.strerror)
1039 self.pathto(nd), encoding.strtolocal(inst.strerror)
1027 )
1040 )
1028 continue
1041 continue
1029 raise
1042 raise
1030 for f, kind, st in entries:
1043 for f, kind, st in entries:
1031 # Some matchers may return files in the visitentries set,
1044 # Some matchers may return files in the visitentries set,
1032 # instead of 'this', if the matcher explicitly mentions them
1045 # instead of 'this', if the matcher explicitly mentions them
1033 # and is not an exactmatcher. This is acceptable; we do not
1046 # and is not an exactmatcher. This is acceptable; we do not
1034 # make any hard assumptions about file-or-directory below
1047 # make any hard assumptions about file-or-directory below
1035 # based on the presence of `f` in visitentries. If
1048 # based on the presence of `f` in visitentries. If
1036 # visitchildrenset returned a set, we can always skip the
1049 # visitchildrenset returned a set, we can always skip the
1037 # entries *not* in the set it provided regardless of whether
1050 # entries *not* in the set it provided regardless of whether
1038 # they're actually a file or a directory.
1051 # they're actually a file or a directory.
1039 if visitentries and f not in visitentries:
1052 if visitentries and f not in visitentries:
1040 continue
1053 continue
1041 if normalizefile:
1054 if normalizefile:
1042 # even though f might be a directory, we're only
1055 # even though f might be a directory, we're only
1043 # interested in comparing it to files currently in the
1056 # interested in comparing it to files currently in the
1044 # dmap -- therefore normalizefile is enough
1057 # dmap -- therefore normalizefile is enough
1045 nf = normalizefile(
1058 nf = normalizefile(
1046 nd and (nd + b"/" + f) or f, True, True
1059 nd and (nd + b"/" + f) or f, True, True
1047 )
1060 )
1048 else:
1061 else:
1049 nf = nd and (nd + b"/" + f) or f
1062 nf = nd and (nd + b"/" + f) or f
1050 if nf not in results:
1063 if nf not in results:
1051 if kind == dirkind:
1064 if kind == dirkind:
1052 if not ignore(nf):
1065 if not ignore(nf):
1053 if matchtdir:
1066 if matchtdir:
1054 matchtdir(nf)
1067 matchtdir(nf)
1055 wadd(nf)
1068 wadd(nf)
1056 if nf in dmap and (matchalways or matchfn(nf)):
1069 if nf in dmap and (matchalways or matchfn(nf)):
1057 results[nf] = None
1070 results[nf] = None
1058 elif kind == regkind or kind == lnkkind:
1071 elif kind == regkind or kind == lnkkind:
1059 if nf in dmap:
1072 if nf in dmap:
1060 if matchalways or matchfn(nf):
1073 if matchalways or matchfn(nf):
1061 results[nf] = st
1074 results[nf] = st
1062 elif (matchalways or matchfn(nf)) and not ignore(
1075 elif (matchalways or matchfn(nf)) and not ignore(
1063 nf
1076 nf
1064 ):
1077 ):
1065 # unknown file -- normalize if necessary
1078 # unknown file -- normalize if necessary
1066 if not alreadynormed:
1079 if not alreadynormed:
1067 nf = normalize(nf, False, True)
1080 nf = normalize(nf, False, True)
1068 results[nf] = st
1081 results[nf] = st
1069 elif nf in dmap and (matchalways or matchfn(nf)):
1082 elif nf in dmap and (matchalways or matchfn(nf)):
1070 results[nf] = None
1083 results[nf] = None
1071
1084
1072 for nd, d in work:
1085 for nd, d in work:
1073 # alreadynormed means that processwork doesn't have to do any
1086 # alreadynormed means that processwork doesn't have to do any
1074 # expensive directory normalization
1087 # expensive directory normalization
1075 alreadynormed = not normalize or nd == d
1088 alreadynormed = not normalize or nd == d
1076 traverse([d], alreadynormed)
1089 traverse([d], alreadynormed)
1077
1090
1078 for s in subrepos:
1091 for s in subrepos:
1079 del results[s]
1092 del results[s]
1080 del results[b'.hg']
1093 del results[b'.hg']
1081
1094
1082 # step 3: visit remaining files from dmap
1095 # step 3: visit remaining files from dmap
1083 if not skipstep3 and not exact:
1096 if not skipstep3 and not exact:
1084 # If a dmap file is not in results yet, it was either
1097 # If a dmap file is not in results yet, it was either
1085 # a) not matching matchfn b) ignored, c) missing, or d) under a
1098 # a) not matching matchfn b) ignored, c) missing, or d) under a
1086 # symlink directory.
1099 # symlink directory.
1087 if not results and matchalways:
1100 if not results and matchalways:
1088 visit = [f for f in dmap]
1101 visit = [f for f in dmap]
1089 else:
1102 else:
1090 visit = [f for f in dmap if f not in results and matchfn(f)]
1103 visit = [f for f in dmap if f not in results and matchfn(f)]
1091 visit.sort()
1104 visit.sort()
1092
1105
1093 if unknown:
1106 if unknown:
1094 # unknown == True means we walked all dirs under the roots
1107 # unknown == True means we walked all dirs under the roots
1095 # that wasn't ignored, and everything that matched was stat'ed
1108 # that wasn't ignored, and everything that matched was stat'ed
1096 # and is already in results.
1109 # and is already in results.
1097 # The rest must thus be ignored or under a symlink.
1110 # The rest must thus be ignored or under a symlink.
1098 audit_path = pathutil.pathauditor(self._root, cached=True)
1111 audit_path = pathutil.pathauditor(self._root, cached=True)
1099
1112
1100 for nf in iter(visit):
1113 for nf in iter(visit):
1101 # If a stat for the same file was already added with a
1114 # If a stat for the same file was already added with a
1102 # different case, don't add one for this, since that would
1115 # different case, don't add one for this, since that would
1103 # make it appear as if the file exists under both names
1116 # make it appear as if the file exists under both names
1104 # on disk.
1117 # on disk.
1105 if (
1118 if (
1106 normalizefile
1119 normalizefile
1107 and normalizefile(nf, True, True) in results
1120 and normalizefile(nf, True, True) in results
1108 ):
1121 ):
1109 results[nf] = None
1122 results[nf] = None
1110 # Report ignored items in the dmap as long as they are not
1123 # Report ignored items in the dmap as long as they are not
1111 # under a symlink directory.
1124 # under a symlink directory.
1112 elif audit_path.check(nf):
1125 elif audit_path.check(nf):
1113 try:
1126 try:
1114 results[nf] = lstat(join(nf))
1127 results[nf] = lstat(join(nf))
1115 # file was just ignored, no links, and exists
1128 # file was just ignored, no links, and exists
1116 except OSError:
1129 except OSError:
1117 # file doesn't exist
1130 # file doesn't exist
1118 results[nf] = None
1131 results[nf] = None
1119 else:
1132 else:
1120 # It's either missing or under a symlink directory
1133 # It's either missing or under a symlink directory
1121 # which we in this case report as missing
1134 # which we in this case report as missing
1122 results[nf] = None
1135 results[nf] = None
1123 else:
1136 else:
1124 # We may not have walked the full directory tree above,
1137 # We may not have walked the full directory tree above,
1125 # so stat and check everything we missed.
1138 # so stat and check everything we missed.
1126 iv = iter(visit)
1139 iv = iter(visit)
1127 for st in util.statfiles([join(i) for i in visit]):
1140 for st in util.statfiles([join(i) for i in visit]):
1128 results[next(iv)] = st
1141 results[next(iv)] = st
1129 return results
1142 return results
1130
1143
1131 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1144 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1132 # Force Rayon (Rust parallelism library) to respect the number of
1145 # Force Rayon (Rust parallelism library) to respect the number of
1133 # workers. This is a temporary workaround until Rust code knows
1146 # workers. This is a temporary workaround until Rust code knows
1134 # how to read the config file.
1147 # how to read the config file.
1135 numcpus = self._ui.configint(b"worker", b"numcpus")
1148 numcpus = self._ui.configint(b"worker", b"numcpus")
1136 if numcpus is not None:
1149 if numcpus is not None:
1137 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1150 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1138
1151
1139 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1152 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1140 if not workers_enabled:
1153 if not workers_enabled:
1141 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1154 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1142
1155
1143 (
1156 (
1144 lookup,
1157 lookup,
1145 modified,
1158 modified,
1146 added,
1159 added,
1147 removed,
1160 removed,
1148 deleted,
1161 deleted,
1149 clean,
1162 clean,
1150 ignored,
1163 ignored,
1151 unknown,
1164 unknown,
1152 warnings,
1165 warnings,
1153 bad,
1166 bad,
1154 traversed,
1167 traversed,
1155 dirty,
1168 dirty,
1156 ) = rustmod.status(
1169 ) = rustmod.status(
1157 self._map._rustmap,
1170 self._map._rustmap,
1158 matcher,
1171 matcher,
1159 self._rootdir,
1172 self._rootdir,
1160 self._ignorefiles(),
1173 self._ignorefiles(),
1161 self._checkexec,
1174 self._checkexec,
1162 self._lastnormaltime,
1175 self._lastnormaltime,
1163 bool(list_clean),
1176 bool(list_clean),
1164 bool(list_ignored),
1177 bool(list_ignored),
1165 bool(list_unknown),
1178 bool(list_unknown),
1166 bool(matcher.traversedir),
1179 bool(matcher.traversedir),
1167 )
1180 )
1168
1181
1169 self._dirty |= dirty
1182 self._dirty |= dirty
1170
1183
1171 if matcher.traversedir:
1184 if matcher.traversedir:
1172 for dir in traversed:
1185 for dir in traversed:
1173 matcher.traversedir(dir)
1186 matcher.traversedir(dir)
1174
1187
1175 if self._ui.warn:
1188 if self._ui.warn:
1176 for item in warnings:
1189 for item in warnings:
1177 if isinstance(item, tuple):
1190 if isinstance(item, tuple):
1178 file_path, syntax = item
1191 file_path, syntax = item
1179 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1192 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1180 file_path,
1193 file_path,
1181 syntax,
1194 syntax,
1182 )
1195 )
1183 self._ui.warn(msg)
1196 self._ui.warn(msg)
1184 else:
1197 else:
1185 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1198 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1186 self._ui.warn(
1199 self._ui.warn(
1187 msg
1200 msg
1188 % (
1201 % (
1189 pathutil.canonpath(
1202 pathutil.canonpath(
1190 self._rootdir, self._rootdir, item
1203 self._rootdir, self._rootdir, item
1191 ),
1204 ),
1192 b"No such file or directory",
1205 b"No such file or directory",
1193 )
1206 )
1194 )
1207 )
1195
1208
1196 for (fn, message) in bad:
1209 for (fn, message) in bad:
1197 matcher.bad(fn, encoding.strtolocal(message))
1210 matcher.bad(fn, encoding.strtolocal(message))
1198
1211
1199 status = scmutil.status(
1212 status = scmutil.status(
1200 modified=modified,
1213 modified=modified,
1201 added=added,
1214 added=added,
1202 removed=removed,
1215 removed=removed,
1203 deleted=deleted,
1216 deleted=deleted,
1204 unknown=unknown,
1217 unknown=unknown,
1205 ignored=ignored,
1218 ignored=ignored,
1206 clean=clean,
1219 clean=clean,
1207 )
1220 )
1208 return (lookup, status)
1221 return (lookup, status)
1209
1222
1210 def status(self, match, subrepos, ignored, clean, unknown):
1223 def status(self, match, subrepos, ignored, clean, unknown):
1211 """Determine the status of the working copy relative to the
1224 """Determine the status of the working copy relative to the
1212 dirstate and return a pair of (unsure, status), where status is of type
1225 dirstate and return a pair of (unsure, status), where status is of type
1213 scmutil.status and:
1226 scmutil.status and:
1214
1227
1215 unsure:
1228 unsure:
1216 files that might have been modified since the dirstate was
1229 files that might have been modified since the dirstate was
1217 written, but need to be read to be sure (size is the same
1230 written, but need to be read to be sure (size is the same
1218 but mtime differs)
1231 but mtime differs)
1219 status.modified:
1232 status.modified:
1220 files that have definitely been modified since the dirstate
1233 files that have definitely been modified since the dirstate
1221 was written (different size or mode)
1234 was written (different size or mode)
1222 status.clean:
1235 status.clean:
1223 files that have definitely not been modified since the
1236 files that have definitely not been modified since the
1224 dirstate was written
1237 dirstate was written
1225 """
1238 """
1226 listignored, listclean, listunknown = ignored, clean, unknown
1239 listignored, listclean, listunknown = ignored, clean, unknown
1227 lookup, modified, added, unknown, ignored = [], [], [], [], []
1240 lookup, modified, added, unknown, ignored = [], [], [], [], []
1228 removed, deleted, clean = [], [], []
1241 removed, deleted, clean = [], [], []
1229
1242
1230 dmap = self._map
1243 dmap = self._map
1231 dmap.preload()
1244 dmap.preload()
1232
1245
1233 use_rust = True
1246 use_rust = True
1234
1247
1235 allowed_matchers = (
1248 allowed_matchers = (
1236 matchmod.alwaysmatcher,
1249 matchmod.alwaysmatcher,
1237 matchmod.exactmatcher,
1250 matchmod.exactmatcher,
1238 matchmod.includematcher,
1251 matchmod.includematcher,
1239 )
1252 )
1240
1253
1241 if rustmod is None:
1254 if rustmod is None:
1242 use_rust = False
1255 use_rust = False
1243 elif self._checkcase:
1256 elif self._checkcase:
1244 # Case-insensitive filesystems are not handled yet
1257 # Case-insensitive filesystems are not handled yet
1245 use_rust = False
1258 use_rust = False
1246 elif subrepos:
1259 elif subrepos:
1247 use_rust = False
1260 use_rust = False
1248 elif sparse.enabled:
1261 elif sparse.enabled:
1249 use_rust = False
1262 use_rust = False
1250 elif not isinstance(match, allowed_matchers):
1263 elif not isinstance(match, allowed_matchers):
1251 # Some matchers have yet to be implemented
1264 # Some matchers have yet to be implemented
1252 use_rust = False
1265 use_rust = False
1253
1266
1254 if use_rust:
1267 if use_rust:
1255 try:
1268 try:
1256 return self._rust_status(
1269 return self._rust_status(
1257 match, listclean, listignored, listunknown
1270 match, listclean, listignored, listunknown
1258 )
1271 )
1259 except rustmod.FallbackError:
1272 except rustmod.FallbackError:
1260 pass
1273 pass
1261
1274
1262 def noop(f):
1275 def noop(f):
1263 pass
1276 pass
1264
1277
1265 dcontains = dmap.__contains__
1278 dcontains = dmap.__contains__
1266 dget = dmap.__getitem__
1279 dget = dmap.__getitem__
1267 ladd = lookup.append # aka "unsure"
1280 ladd = lookup.append # aka "unsure"
1268 madd = modified.append
1281 madd = modified.append
1269 aadd = added.append
1282 aadd = added.append
1270 uadd = unknown.append if listunknown else noop
1283 uadd = unknown.append if listunknown else noop
1271 iadd = ignored.append if listignored else noop
1284 iadd = ignored.append if listignored else noop
1272 radd = removed.append
1285 radd = removed.append
1273 dadd = deleted.append
1286 dadd = deleted.append
1274 cadd = clean.append if listclean else noop
1287 cadd = clean.append if listclean else noop
1275 mexact = match.exact
1288 mexact = match.exact
1276 dirignore = self._dirignore
1289 dirignore = self._dirignore
1277 checkexec = self._checkexec
1290 checkexec = self._checkexec
1278 copymap = self._map.copymap
1291 copymap = self._map.copymap
1279 lastnormaltime = self._lastnormaltime
1292 lastnormaltime = self._lastnormaltime
1280
1293
1281 # We need to do full walks when either
1294 # We need to do full walks when either
1282 # - we're listing all clean files, or
1295 # - we're listing all clean files, or
1283 # - match.traversedir does something, because match.traversedir should
1296 # - match.traversedir does something, because match.traversedir should
1284 # be called for every dir in the working dir
1297 # be called for every dir in the working dir
1285 full = listclean or match.traversedir is not None
1298 full = listclean or match.traversedir is not None
1286 for fn, st in pycompat.iteritems(
1299 for fn, st in pycompat.iteritems(
1287 self.walk(match, subrepos, listunknown, listignored, full=full)
1300 self.walk(match, subrepos, listunknown, listignored, full=full)
1288 ):
1301 ):
1289 if not dcontains(fn):
1302 if not dcontains(fn):
1290 if (listignored or mexact(fn)) and dirignore(fn):
1303 if (listignored or mexact(fn)) and dirignore(fn):
1291 if listignored:
1304 if listignored:
1292 iadd(fn)
1305 iadd(fn)
1293 else:
1306 else:
1294 uadd(fn)
1307 uadd(fn)
1295 continue
1308 continue
1296
1309
1297 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1310 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1298 # written like that for performance reasons. dmap[fn] is not a
1311 # written like that for performance reasons. dmap[fn] is not a
1299 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1312 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1300 # opcode has fast paths when the value to be unpacked is a tuple or
1313 # opcode has fast paths when the value to be unpacked is a tuple or
1301 # a list, but falls back to creating a full-fledged iterator in
1314 # a list, but falls back to creating a full-fledged iterator in
1302 # general. That is much slower than simply accessing and storing the
1315 # general. That is much slower than simply accessing and storing the
1303 # tuple members one by one.
1316 # tuple members one by one.
1304 t = dget(fn)
1317 t = dget(fn)
1305 state = t[0]
1318 state = t[0]
1306 mode = t[1]
1319 mode = t[1]
1307 size = t[2]
1320 size = t[2]
1308 time = t[3]
1321 time = t[3]
1309
1322
1310 if not st and state in b"nma":
1323 if not st and state in b"nma":
1311 dadd(fn)
1324 dadd(fn)
1312 elif state == b'n':
1325 elif state == b'n':
1313 if (
1326 if (
1314 size >= 0
1327 size >= 0
1315 and (
1328 and (
1316 (size != st.st_size and size != st.st_size & _rangemask)
1329 (size != st.st_size and size != st.st_size & _rangemask)
1317 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1330 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1318 )
1331 )
1319 or size == FROM_P2 # other parent
1332 or size == FROM_P2 # other parent
1320 or fn in copymap
1333 or fn in copymap
1321 ):
1334 ):
1322 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1335 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1323 # issue6456: Size returned may be longer due to
1336 # issue6456: Size returned may be longer due to
1324 # encryption on EXT-4 fscrypt, undecided.
1337 # encryption on EXT-4 fscrypt, undecided.
1325 ladd(fn)
1338 ladd(fn)
1326 else:
1339 else:
1327 madd(fn)
1340 madd(fn)
1328 elif (
1341 elif (
1329 time != st[stat.ST_MTIME]
1342 time != st[stat.ST_MTIME]
1330 and time != st[stat.ST_MTIME] & _rangemask
1343 and time != st[stat.ST_MTIME] & _rangemask
1331 ):
1344 ):
1332 ladd(fn)
1345 ladd(fn)
1333 elif st[stat.ST_MTIME] == lastnormaltime:
1346 elif st[stat.ST_MTIME] == lastnormaltime:
1334 # fn may have just been marked as normal and it may have
1347 # fn may have just been marked as normal and it may have
1335 # changed in the same second without changing its size.
1348 # changed in the same second without changing its size.
1336 # This can happen if we quickly do multiple commits.
1349 # This can happen if we quickly do multiple commits.
1337 # Force lookup, so we don't miss such a racy file change.
1350 # Force lookup, so we don't miss such a racy file change.
1338 ladd(fn)
1351 ladd(fn)
1339 elif listclean:
1352 elif listclean:
1340 cadd(fn)
1353 cadd(fn)
1341 elif state == b'm':
1354 elif state == b'm':
1342 madd(fn)
1355 madd(fn)
1343 elif state == b'a':
1356 elif state == b'a':
1344 aadd(fn)
1357 aadd(fn)
1345 elif state == b'r':
1358 elif state == b'r':
1346 radd(fn)
1359 radd(fn)
1347 status = scmutil.status(
1360 status = scmutil.status(
1348 modified, added, removed, deleted, unknown, ignored, clean
1361 modified, added, removed, deleted, unknown, ignored, clean
1349 )
1362 )
1350 return (lookup, status)
1363 return (lookup, status)
1351
1364
1352 def matches(self, match):
1365 def matches(self, match):
1353 """
1366 """
1354 return files in the dirstate (in whatever state) filtered by match
1367 return files in the dirstate (in whatever state) filtered by match
1355 """
1368 """
1356 dmap = self._map
1369 dmap = self._map
1357 if rustmod is not None:
1370 if rustmod is not None:
1358 dmap = self._map._rustmap
1371 dmap = self._map._rustmap
1359
1372
1360 if match.always():
1373 if match.always():
1361 return dmap.keys()
1374 return dmap.keys()
1362 files = match.files()
1375 files = match.files()
1363 if match.isexact():
1376 if match.isexact():
1364 # fast path -- filter the other way around, since typically files is
1377 # fast path -- filter the other way around, since typically files is
1365 # much smaller than dmap
1378 # much smaller than dmap
1366 return [f for f in files if f in dmap]
1379 return [f for f in files if f in dmap]
1367 if match.prefix() and all(fn in dmap for fn in files):
1380 if match.prefix() and all(fn in dmap for fn in files):
1368 # fast path -- all the values are known to be files, so just return
1381 # fast path -- all the values are known to be files, so just return
1369 # that
1382 # that
1370 return list(files)
1383 return list(files)
1371 return [f for f in dmap if match(f)]
1384 return [f for f in dmap if match(f)]
1372
1385
1373 def _actualfilename(self, tr):
1386 def _actualfilename(self, tr):
1374 if tr:
1387 if tr:
1375 return self._pendingfilename
1388 return self._pendingfilename
1376 else:
1389 else:
1377 return self._filename
1390 return self._filename
1378
1391
1379 def savebackup(self, tr, backupname):
1392 def savebackup(self, tr, backupname):
1380 '''Save current dirstate into backup file'''
1393 '''Save current dirstate into backup file'''
1381 filename = self._actualfilename(tr)
1394 filename = self._actualfilename(tr)
1382 assert backupname != filename
1395 assert backupname != filename
1383
1396
1384 # use '_writedirstate' instead of 'write' to write changes certainly,
1397 # use '_writedirstate' instead of 'write' to write changes certainly,
1385 # because the latter omits writing out if transaction is running.
1398 # because the latter omits writing out if transaction is running.
1386 # output file will be used to create backup of dirstate at this point.
1399 # output file will be used to create backup of dirstate at this point.
1387 if self._dirty or not self._opener.exists(filename):
1400 if self._dirty or not self._opener.exists(filename):
1388 self._writedirstate(
1401 self._writedirstate(
1389 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1402 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1390 )
1403 )
1391
1404
1392 if tr:
1405 if tr:
1393 # ensure that subsequent tr.writepending returns True for
1406 # ensure that subsequent tr.writepending returns True for
1394 # changes written out above, even if dirstate is never
1407 # changes written out above, even if dirstate is never
1395 # changed after this
1408 # changed after this
1396 tr.addfilegenerator(
1409 tr.addfilegenerator(
1397 b'dirstate',
1410 b'dirstate',
1398 (self._filename,),
1411 (self._filename,),
1399 self._writedirstate,
1412 self._writedirstate,
1400 location=b'plain',
1413 location=b'plain',
1401 )
1414 )
1402
1415
1403 # ensure that pending file written above is unlinked at
1416 # ensure that pending file written above is unlinked at
1404 # failure, even if tr.writepending isn't invoked until the
1417 # failure, even if tr.writepending isn't invoked until the
1405 # end of this transaction
1418 # end of this transaction
1406 tr.registertmp(filename, location=b'plain')
1419 tr.registertmp(filename, location=b'plain')
1407
1420
1408 self._opener.tryunlink(backupname)
1421 self._opener.tryunlink(backupname)
1409 # hardlink backup is okay because _writedirstate is always called
1422 # hardlink backup is okay because _writedirstate is always called
1410 # with an "atomictemp=True" file.
1423 # with an "atomictemp=True" file.
1411 util.copyfile(
1424 util.copyfile(
1412 self._opener.join(filename),
1425 self._opener.join(filename),
1413 self._opener.join(backupname),
1426 self._opener.join(backupname),
1414 hardlink=True,
1427 hardlink=True,
1415 )
1428 )
1416
1429
1417 def restorebackup(self, tr, backupname):
1430 def restorebackup(self, tr, backupname):
1418 '''Restore dirstate by backup file'''
1431 '''Restore dirstate by backup file'''
1419 # this "invalidate()" prevents "wlock.release()" from writing
1432 # this "invalidate()" prevents "wlock.release()" from writing
1420 # changes of dirstate out after restoring from backup file
1433 # changes of dirstate out after restoring from backup file
1421 self.invalidate()
1434 self.invalidate()
1422 filename = self._actualfilename(tr)
1435 filename = self._actualfilename(tr)
1423 o = self._opener
1436 o = self._opener
1424 if util.samefile(o.join(backupname), o.join(filename)):
1437 if util.samefile(o.join(backupname), o.join(filename)):
1425 o.unlink(backupname)
1438 o.unlink(backupname)
1426 else:
1439 else:
1427 o.rename(backupname, filename, checkambig=True)
1440 o.rename(backupname, filename, checkambig=True)
1428
1441
1429 def clearbackup(self, tr, backupname):
1442 def clearbackup(self, tr, backupname):
1430 '''Clear backup file'''
1443 '''Clear backup file'''
1431 self._opener.unlink(backupname)
1444 self._opener.unlink(backupname)
1432
1445
1433
1446
1434 class dirstatemap(object):
1447 class dirstatemap(object):
1435 """Map encapsulating the dirstate's contents.
1448 """Map encapsulating the dirstate's contents.
1436
1449
1437 The dirstate contains the following state:
1450 The dirstate contains the following state:
1438
1451
1439 - `identity` is the identity of the dirstate file, which can be used to
1452 - `identity` is the identity of the dirstate file, which can be used to
1440 detect when changes have occurred to the dirstate file.
1453 detect when changes have occurred to the dirstate file.
1441
1454
1442 - `parents` is a pair containing the parents of the working copy. The
1455 - `parents` is a pair containing the parents of the working copy. The
1443 parents are updated by calling `setparents`.
1456 parents are updated by calling `setparents`.
1444
1457
1445 - the state map maps filenames to tuples of (state, mode, size, mtime),
1458 - the state map maps filenames to tuples of (state, mode, size, mtime),
1446 where state is a single character representing 'normal', 'added',
1459 where state is a single character representing 'normal', 'added',
1447 'removed', or 'merged'. It is read by treating the dirstate as a
1460 'removed', or 'merged'. It is read by treating the dirstate as a
1448 dict. File state is updated by calling the `addfile`, `removefile` and
1461 dict. File state is updated by calling the `addfile`, `removefile` and
1449 `dropfile` methods.
1462 `dropfile` methods.
1450
1463
1451 - `copymap` maps destination filenames to their source filename.
1464 - `copymap` maps destination filenames to their source filename.
1452
1465
1453 The dirstate also provides the following views onto the state:
1466 The dirstate also provides the following views onto the state:
1454
1467
1455 - `nonnormalset` is a set of the filenames that have state other
1468 - `nonnormalset` is a set of the filenames that have state other
1456 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1469 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1457
1470
1458 - `otherparentset` is a set of the filenames that are marked as coming
1471 - `otherparentset` is a set of the filenames that are marked as coming
1459 from the second parent when the dirstate is currently being merged.
1472 from the second parent when the dirstate is currently being merged.
1460
1473
1461 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1474 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1462 form that they appear as in the dirstate.
1475 form that they appear as in the dirstate.
1463
1476
1464 - `dirfoldmap` is a dict mapping normalized directory names to the
1477 - `dirfoldmap` is a dict mapping normalized directory names to the
1465 denormalized form that they appear as in the dirstate.
1478 denormalized form that they appear as in the dirstate.
1466 """
1479 """
1467
1480
1468 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1481 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1469 self._ui = ui
1482 self._ui = ui
1470 self._opener = opener
1483 self._opener = opener
1471 self._root = root
1484 self._root = root
1472 self._filename = b'dirstate'
1485 self._filename = b'dirstate'
1473 self._nodelen = 20
1486 self._nodelen = 20
1474 self._nodeconstants = nodeconstants
1487 self._nodeconstants = nodeconstants
1475 assert (
1488 assert (
1476 not use_dirstate_v2
1489 not use_dirstate_v2
1477 ), "should have detected unsupported requirement"
1490 ), "should have detected unsupported requirement"
1478
1491
1479 self._parents = None
1492 self._parents = None
1480 self._dirtyparents = False
1493 self._dirtyparents = False
1481
1494
1482 # for consistent view between _pl() and _read() invocations
1495 # for consistent view between _pl() and _read() invocations
1483 self._pendingmode = None
1496 self._pendingmode = None
1484
1497
1485 @propertycache
1498 @propertycache
1486 def _map(self):
1499 def _map(self):
1487 self._map = {}
1500 self._map = {}
1488 self.read()
1501 self.read()
1489 return self._map
1502 return self._map
1490
1503
1491 @propertycache
1504 @propertycache
1492 def copymap(self):
1505 def copymap(self):
1493 self.copymap = {}
1506 self.copymap = {}
1494 self._map
1507 self._map
1495 return self.copymap
1508 return self.copymap
1496
1509
1497 def directories(self):
1510 def directories(self):
1498 # Rust / dirstate-v2 only
1511 # Rust / dirstate-v2 only
1499 return []
1512 return []
1500
1513
1501 def clear(self):
1514 def clear(self):
1502 self._map.clear()
1515 self._map.clear()
1503 self.copymap.clear()
1516 self.copymap.clear()
1504 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1517 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1505 util.clearcachedproperty(self, b"_dirs")
1518 util.clearcachedproperty(self, b"_dirs")
1506 util.clearcachedproperty(self, b"_alldirs")
1519 util.clearcachedproperty(self, b"_alldirs")
1507 util.clearcachedproperty(self, b"filefoldmap")
1520 util.clearcachedproperty(self, b"filefoldmap")
1508 util.clearcachedproperty(self, b"dirfoldmap")
1521 util.clearcachedproperty(self, b"dirfoldmap")
1509 util.clearcachedproperty(self, b"nonnormalset")
1522 util.clearcachedproperty(self, b"nonnormalset")
1510 util.clearcachedproperty(self, b"otherparentset")
1523 util.clearcachedproperty(self, b"otherparentset")
1511
1524
1512 def items(self):
1525 def items(self):
1513 return pycompat.iteritems(self._map)
1526 return pycompat.iteritems(self._map)
1514
1527
1515 # forward for python2,3 compat
1528 # forward for python2,3 compat
1516 iteritems = items
1529 iteritems = items
1517
1530
1518 def __len__(self):
1531 def __len__(self):
1519 return len(self._map)
1532 return len(self._map)
1520
1533
1521 def __iter__(self):
1534 def __iter__(self):
1522 return iter(self._map)
1535 return iter(self._map)
1523
1536
1524 def get(self, key, default=None):
1537 def get(self, key, default=None):
1525 return self._map.get(key, default)
1538 return self._map.get(key, default)
1526
1539
1527 def __contains__(self, key):
1540 def __contains__(self, key):
1528 return key in self._map
1541 return key in self._map
1529
1542
1530 def __getitem__(self, key):
1543 def __getitem__(self, key):
1531 return self._map[key]
1544 return self._map[key]
1532
1545
1533 def keys(self):
1546 def keys(self):
1534 return self._map.keys()
1547 return self._map.keys()
1535
1548
1536 def preload(self):
1549 def preload(self):
1537 """Loads the underlying data, if it's not already loaded"""
1550 """Loads the underlying data, if it's not already loaded"""
1538 self._map
1551 self._map
1539
1552
1540 def addfile(self, f, oldstate, state, mode, size, mtime):
1553 def addfile(self, f, oldstate, state, mode, size, mtime):
1541 """Add a tracked file to the dirstate."""
1554 """Add a tracked file to the dirstate."""
1542 if oldstate in b"?r" and "_dirs" in self.__dict__:
1555 if oldstate in b"?r" and "_dirs" in self.__dict__:
1543 self._dirs.addpath(f)
1556 self._dirs.addpath(f)
1544 if oldstate == b"?" and "_alldirs" in self.__dict__:
1557 if oldstate == b"?" and "_alldirs" in self.__dict__:
1545 self._alldirs.addpath(f)
1558 self._alldirs.addpath(f)
1546 self._map[f] = dirstatetuple(state, mode, size, mtime)
1559 self._map[f] = dirstatetuple(state, mode, size, mtime)
1547 if state != b'n' or mtime == AMBIGUOUS_TIME:
1560 if state != b'n' or mtime == AMBIGUOUS_TIME:
1548 self.nonnormalset.add(f)
1561 self.nonnormalset.add(f)
1549 if size == FROM_P2:
1562 if size == FROM_P2:
1550 self.otherparentset.add(f)
1563 self.otherparentset.add(f)
1551
1564
1552 def removefile(self, f, oldstate, size):
1565 def removefile(self, f, oldstate, size):
1553 """
1566 """
1554 Mark a file as removed in the dirstate.
1567 Mark a file as removed in the dirstate.
1555
1568
1556 The `size` parameter is used to store sentinel values that indicate
1569 The `size` parameter is used to store sentinel values that indicate
1557 the file's previous state. In the future, we should refactor this
1570 the file's previous state. In the future, we should refactor this
1558 to be more explicit about what that state is.
1571 to be more explicit about what that state is.
1559 """
1572 """
1560 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1573 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1561 self._dirs.delpath(f)
1574 self._dirs.delpath(f)
1562 if oldstate == b"?" and "_alldirs" in self.__dict__:
1575 if oldstate == b"?" and "_alldirs" in self.__dict__:
1563 self._alldirs.addpath(f)
1576 self._alldirs.addpath(f)
1564 if "filefoldmap" in self.__dict__:
1577 if "filefoldmap" in self.__dict__:
1565 normed = util.normcase(f)
1578 normed = util.normcase(f)
1566 self.filefoldmap.pop(normed, None)
1579 self.filefoldmap.pop(normed, None)
1567 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1580 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1568 self.nonnormalset.add(f)
1581 self.nonnormalset.add(f)
1569
1582
1570 def dropfile(self, f, oldstate):
1583 def dropfile(self, f, oldstate):
1571 """
1584 """
1572 Remove a file from the dirstate. Returns True if the file was
1585 Remove a file from the dirstate. Returns True if the file was
1573 previously recorded.
1586 previously recorded.
1574 """
1587 """
1575 exists = self._map.pop(f, None) is not None
1588 exists = self._map.pop(f, None) is not None
1576 if exists:
1589 if exists:
1577 if oldstate != b"r" and "_dirs" in self.__dict__:
1590 if oldstate != b"r" and "_dirs" in self.__dict__:
1578 self._dirs.delpath(f)
1591 self._dirs.delpath(f)
1579 if "_alldirs" in self.__dict__:
1592 if "_alldirs" in self.__dict__:
1580 self._alldirs.delpath(f)
1593 self._alldirs.delpath(f)
1581 if "filefoldmap" in self.__dict__:
1594 if "filefoldmap" in self.__dict__:
1582 normed = util.normcase(f)
1595 normed = util.normcase(f)
1583 self.filefoldmap.pop(normed, None)
1596 self.filefoldmap.pop(normed, None)
1584 self.nonnormalset.discard(f)
1597 self.nonnormalset.discard(f)
1585 return exists
1598 return exists
1586
1599
1587 def clearambiguoustimes(self, files, now):
1600 def clearambiguoustimes(self, files, now):
1588 for f in files:
1601 for f in files:
1589 e = self.get(f)
1602 e = self.get(f)
1590 if e is not None and e[0] == b'n' and e[3] == now:
1603 if e is not None and e[0] == b'n' and e[3] == now:
1591 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1604 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1592 self.nonnormalset.add(f)
1605 self.nonnormalset.add(f)
1593
1606
1594 def nonnormalentries(self):
1607 def nonnormalentries(self):
1595 '''Compute the nonnormal dirstate entries from the dmap'''
1608 '''Compute the nonnormal dirstate entries from the dmap'''
1596 try:
1609 try:
1597 return parsers.nonnormalotherparententries(self._map)
1610 return parsers.nonnormalotherparententries(self._map)
1598 except AttributeError:
1611 except AttributeError:
1599 nonnorm = set()
1612 nonnorm = set()
1600 otherparent = set()
1613 otherparent = set()
1601 for fname, e in pycompat.iteritems(self._map):
1614 for fname, e in pycompat.iteritems(self._map):
1602 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1615 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1603 nonnorm.add(fname)
1616 nonnorm.add(fname)
1604 if e[0] == b'n' and e[2] == FROM_P2:
1617 if e[0] == b'n' and e[2] == FROM_P2:
1605 otherparent.add(fname)
1618 otherparent.add(fname)
1606 return nonnorm, otherparent
1619 return nonnorm, otherparent
1607
1620
1608 @propertycache
1621 @propertycache
1609 def filefoldmap(self):
1622 def filefoldmap(self):
1610 """Returns a dictionary mapping normalized case paths to their
1623 """Returns a dictionary mapping normalized case paths to their
1611 non-normalized versions.
1624 non-normalized versions.
1612 """
1625 """
1613 try:
1626 try:
1614 makefilefoldmap = parsers.make_file_foldmap
1627 makefilefoldmap = parsers.make_file_foldmap
1615 except AttributeError:
1628 except AttributeError:
1616 pass
1629 pass
1617 else:
1630 else:
1618 return makefilefoldmap(
1631 return makefilefoldmap(
1619 self._map, util.normcasespec, util.normcasefallback
1632 self._map, util.normcasespec, util.normcasefallback
1620 )
1633 )
1621
1634
1622 f = {}
1635 f = {}
1623 normcase = util.normcase
1636 normcase = util.normcase
1624 for name, s in pycompat.iteritems(self._map):
1637 for name, s in pycompat.iteritems(self._map):
1625 if s[0] != b'r':
1638 if s[0] != b'r':
1626 f[normcase(name)] = name
1639 f[normcase(name)] = name
1627 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1640 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1628 return f
1641 return f
1629
1642
1630 def hastrackeddir(self, d):
1643 def hastrackeddir(self, d):
1631 """
1644 """
1632 Returns True if the dirstate contains a tracked (not removed) file
1645 Returns True if the dirstate contains a tracked (not removed) file
1633 in this directory.
1646 in this directory.
1634 """
1647 """
1635 return d in self._dirs
1648 return d in self._dirs
1636
1649
1637 def hasdir(self, d):
1650 def hasdir(self, d):
1638 """
1651 """
1639 Returns True if the dirstate contains a file (tracked or removed)
1652 Returns True if the dirstate contains a file (tracked or removed)
1640 in this directory.
1653 in this directory.
1641 """
1654 """
1642 return d in self._alldirs
1655 return d in self._alldirs
1643
1656
1644 @propertycache
1657 @propertycache
1645 def _dirs(self):
1658 def _dirs(self):
1646 return pathutil.dirs(self._map, b'r')
1659 return pathutil.dirs(self._map, b'r')
1647
1660
1648 @propertycache
1661 @propertycache
1649 def _alldirs(self):
1662 def _alldirs(self):
1650 return pathutil.dirs(self._map)
1663 return pathutil.dirs(self._map)
1651
1664
1652 def _opendirstatefile(self):
1665 def _opendirstatefile(self):
1653 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1666 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1654 if self._pendingmode is not None and self._pendingmode != mode:
1667 if self._pendingmode is not None and self._pendingmode != mode:
1655 fp.close()
1668 fp.close()
1656 raise error.Abort(
1669 raise error.Abort(
1657 _(b'working directory state may be changed parallelly')
1670 _(b'working directory state may be changed parallelly')
1658 )
1671 )
1659 self._pendingmode = mode
1672 self._pendingmode = mode
1660 return fp
1673 return fp
1661
1674
1662 def parents(self):
1675 def parents(self):
1663 if not self._parents:
1676 if not self._parents:
1664 try:
1677 try:
1665 fp = self._opendirstatefile()
1678 fp = self._opendirstatefile()
1666 st = fp.read(2 * self._nodelen)
1679 st = fp.read(2 * self._nodelen)
1667 fp.close()
1680 fp.close()
1668 except IOError as err:
1681 except IOError as err:
1669 if err.errno != errno.ENOENT:
1682 if err.errno != errno.ENOENT:
1670 raise
1683 raise
1671 # File doesn't exist, so the current state is empty
1684 # File doesn't exist, so the current state is empty
1672 st = b''
1685 st = b''
1673
1686
1674 l = len(st)
1687 l = len(st)
1675 if l == self._nodelen * 2:
1688 if l == self._nodelen * 2:
1676 self._parents = (
1689 self._parents = (
1677 st[: self._nodelen],
1690 st[: self._nodelen],
1678 st[self._nodelen : 2 * self._nodelen],
1691 st[self._nodelen : 2 * self._nodelen],
1679 )
1692 )
1680 elif l == 0:
1693 elif l == 0:
1681 self._parents = (
1694 self._parents = (
1682 self._nodeconstants.nullid,
1695 self._nodeconstants.nullid,
1683 self._nodeconstants.nullid,
1696 self._nodeconstants.nullid,
1684 )
1697 )
1685 else:
1698 else:
1686 raise error.Abort(
1699 raise error.Abort(
1687 _(b'working directory state appears damaged!')
1700 _(b'working directory state appears damaged!')
1688 )
1701 )
1689
1702
1690 return self._parents
1703 return self._parents
1691
1704
1692 def setparents(self, p1, p2):
1705 def setparents(self, p1, p2):
1693 self._parents = (p1, p2)
1706 self._parents = (p1, p2)
1694 self._dirtyparents = True
1707 self._dirtyparents = True
1695
1708
1696 def read(self):
1709 def read(self):
1697 # ignore HG_PENDING because identity is used only for writing
1710 # ignore HG_PENDING because identity is used only for writing
1698 self.identity = util.filestat.frompath(
1711 self.identity = util.filestat.frompath(
1699 self._opener.join(self._filename)
1712 self._opener.join(self._filename)
1700 )
1713 )
1701
1714
1702 try:
1715 try:
1703 fp = self._opendirstatefile()
1716 fp = self._opendirstatefile()
1704 try:
1717 try:
1705 st = fp.read()
1718 st = fp.read()
1706 finally:
1719 finally:
1707 fp.close()
1720 fp.close()
1708 except IOError as err:
1721 except IOError as err:
1709 if err.errno != errno.ENOENT:
1722 if err.errno != errno.ENOENT:
1710 raise
1723 raise
1711 return
1724 return
1712 if not st:
1725 if not st:
1713 return
1726 return
1714
1727
1715 if util.safehasattr(parsers, b'dict_new_presized'):
1728 if util.safehasattr(parsers, b'dict_new_presized'):
1716 # Make an estimate of the number of files in the dirstate based on
1729 # Make an estimate of the number of files in the dirstate based on
1717 # its size. This trades wasting some memory for avoiding costly
1730 # its size. This trades wasting some memory for avoiding costly
1718 # resizes. Each entry have a prefix of 17 bytes followed by one or
1731 # resizes. Each entry have a prefix of 17 bytes followed by one or
1719 # two path names. Studies on various large-scale real-world repositories
1732 # two path names. Studies on various large-scale real-world repositories
1720 # found 54 bytes a reasonable upper limit for the average path names.
1733 # found 54 bytes a reasonable upper limit for the average path names.
1721 # Copy entries are ignored for the sake of this estimate.
1734 # Copy entries are ignored for the sake of this estimate.
1722 self._map = parsers.dict_new_presized(len(st) // 71)
1735 self._map = parsers.dict_new_presized(len(st) // 71)
1723
1736
1724 # Python's garbage collector triggers a GC each time a certain number
1737 # Python's garbage collector triggers a GC each time a certain number
1725 # of container objects (the number being defined by
1738 # of container objects (the number being defined by
1726 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1739 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1727 # for each file in the dirstate. The C version then immediately marks
1740 # for each file in the dirstate. The C version then immediately marks
1728 # them as not to be tracked by the collector. However, this has no
1741 # them as not to be tracked by the collector. However, this has no
1729 # effect on when GCs are triggered, only on what objects the GC looks
1742 # effect on when GCs are triggered, only on what objects the GC looks
1730 # into. This means that O(number of files) GCs are unavoidable.
1743 # into. This means that O(number of files) GCs are unavoidable.
1731 # Depending on when in the process's lifetime the dirstate is parsed,
1744 # Depending on when in the process's lifetime the dirstate is parsed,
1732 # this can get very expensive. As a workaround, disable GC while
1745 # this can get very expensive. As a workaround, disable GC while
1733 # parsing the dirstate.
1746 # parsing the dirstate.
1734 #
1747 #
1735 # (we cannot decorate the function directly since it is in a C module)
1748 # (we cannot decorate the function directly since it is in a C module)
1736 parse_dirstate = util.nogc(parsers.parse_dirstate)
1749 parse_dirstate = util.nogc(parsers.parse_dirstate)
1737 p = parse_dirstate(self._map, self.copymap, st)
1750 p = parse_dirstate(self._map, self.copymap, st)
1738 if not self._dirtyparents:
1751 if not self._dirtyparents:
1739 self.setparents(*p)
1752 self.setparents(*p)
1740
1753
1741 # Avoid excess attribute lookups by fast pathing certain checks
1754 # Avoid excess attribute lookups by fast pathing certain checks
1742 self.__contains__ = self._map.__contains__
1755 self.__contains__ = self._map.__contains__
1743 self.__getitem__ = self._map.__getitem__
1756 self.__getitem__ = self._map.__getitem__
1744 self.get = self._map.get
1757 self.get = self._map.get
1745
1758
1746 def write(self, st, now):
1759 def write(self, st, now):
1747 st.write(
1760 st.write(
1748 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1761 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1749 )
1762 )
1750 st.close()
1763 st.close()
1751 self._dirtyparents = False
1764 self._dirtyparents = False
1752 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1765 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1753
1766
1754 @propertycache
1767 @propertycache
1755 def nonnormalset(self):
1768 def nonnormalset(self):
1756 nonnorm, otherparents = self.nonnormalentries()
1769 nonnorm, otherparents = self.nonnormalentries()
1757 self.otherparentset = otherparents
1770 self.otherparentset = otherparents
1758 return nonnorm
1771 return nonnorm
1759
1772
1760 @propertycache
1773 @propertycache
1761 def otherparentset(self):
1774 def otherparentset(self):
1762 nonnorm, otherparents = self.nonnormalentries()
1775 nonnorm, otherparents = self.nonnormalentries()
1763 self.nonnormalset = nonnorm
1776 self.nonnormalset = nonnorm
1764 return otherparents
1777 return otherparents
1765
1778
1766 def non_normal_or_other_parent_paths(self):
1779 def non_normal_or_other_parent_paths(self):
1767 return self.nonnormalset.union(self.otherparentset)
1780 return self.nonnormalset.union(self.otherparentset)
1768
1781
1769 @propertycache
1782 @propertycache
1770 def identity(self):
1783 def identity(self):
1771 self._map
1784 self._map
1772 return self.identity
1785 return self.identity
1773
1786
1774 @propertycache
1787 @propertycache
1775 def dirfoldmap(self):
1788 def dirfoldmap(self):
1776 f = {}
1789 f = {}
1777 normcase = util.normcase
1790 normcase = util.normcase
1778 for name in self._dirs:
1791 for name in self._dirs:
1779 f[normcase(name)] = name
1792 f[normcase(name)] = name
1780 return f
1793 return f
1781
1794
1782
1795
1783 if rustmod is not None:
1796 if rustmod is not None:
1784
1797
1785 class dirstatemap(object):
1798 class dirstatemap(object):
1786 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1799 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1787 self._use_dirstate_v2 = use_dirstate_v2
1800 self._use_dirstate_v2 = use_dirstate_v2
1788 self._nodeconstants = nodeconstants
1801 self._nodeconstants = nodeconstants
1789 self._ui = ui
1802 self._ui = ui
1790 self._opener = opener
1803 self._opener = opener
1791 self._root = root
1804 self._root = root
1792 self._filename = b'dirstate'
1805 self._filename = b'dirstate'
1793 self._nodelen = 20 # Also update Rust code when changing this!
1806 self._nodelen = 20 # Also update Rust code when changing this!
1794 self._parents = None
1807 self._parents = None
1795 self._dirtyparents = False
1808 self._dirtyparents = False
1796
1809
1797 # for consistent view between _pl() and _read() invocations
1810 # for consistent view between _pl() and _read() invocations
1798 self._pendingmode = None
1811 self._pendingmode = None
1799
1812
1800 self._use_dirstate_tree = self._ui.configbool(
1813 self._use_dirstate_tree = self._ui.configbool(
1801 b"experimental",
1814 b"experimental",
1802 b"dirstate-tree.in-memory",
1815 b"dirstate-tree.in-memory",
1803 False,
1816 False,
1804 )
1817 )
1805
1818
1806 def addfile(self, *args, **kwargs):
1819 def addfile(self, *args, **kwargs):
1807 return self._rustmap.addfile(*args, **kwargs)
1820 return self._rustmap.addfile(*args, **kwargs)
1808
1821
1809 def removefile(self, *args, **kwargs):
1822 def removefile(self, *args, **kwargs):
1810 return self._rustmap.removefile(*args, **kwargs)
1823 return self._rustmap.removefile(*args, **kwargs)
1811
1824
1812 def dropfile(self, *args, **kwargs):
1825 def dropfile(self, *args, **kwargs):
1813 return self._rustmap.dropfile(*args, **kwargs)
1826 return self._rustmap.dropfile(*args, **kwargs)
1814
1827
1815 def clearambiguoustimes(self, *args, **kwargs):
1828 def clearambiguoustimes(self, *args, **kwargs):
1816 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1829 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1817
1830
1818 def nonnormalentries(self):
1831 def nonnormalentries(self):
1819 return self._rustmap.nonnormalentries()
1832 return self._rustmap.nonnormalentries()
1820
1833
1821 def get(self, *args, **kwargs):
1834 def get(self, *args, **kwargs):
1822 return self._rustmap.get(*args, **kwargs)
1835 return self._rustmap.get(*args, **kwargs)
1823
1836
1824 @property
1837 @property
1825 def copymap(self):
1838 def copymap(self):
1826 return self._rustmap.copymap()
1839 return self._rustmap.copymap()
1827
1840
1828 def directories(self):
1841 def directories(self):
1829 return self._rustmap.directories()
1842 return self._rustmap.directories()
1830
1843
1831 def preload(self):
1844 def preload(self):
1832 self._rustmap
1845 self._rustmap
1833
1846
1834 def clear(self):
1847 def clear(self):
1835 self._rustmap.clear()
1848 self._rustmap.clear()
1836 self.setparents(
1849 self.setparents(
1837 self._nodeconstants.nullid, self._nodeconstants.nullid
1850 self._nodeconstants.nullid, self._nodeconstants.nullid
1838 )
1851 )
1839 util.clearcachedproperty(self, b"_dirs")
1852 util.clearcachedproperty(self, b"_dirs")
1840 util.clearcachedproperty(self, b"_alldirs")
1853 util.clearcachedproperty(self, b"_alldirs")
1841 util.clearcachedproperty(self, b"dirfoldmap")
1854 util.clearcachedproperty(self, b"dirfoldmap")
1842
1855
1843 def items(self):
1856 def items(self):
1844 return self._rustmap.items()
1857 return self._rustmap.items()
1845
1858
1846 def keys(self):
1859 def keys(self):
1847 return iter(self._rustmap)
1860 return iter(self._rustmap)
1848
1861
1849 def __contains__(self, key):
1862 def __contains__(self, key):
1850 return key in self._rustmap
1863 return key in self._rustmap
1851
1864
1852 def __getitem__(self, item):
1865 def __getitem__(self, item):
1853 return self._rustmap[item]
1866 return self._rustmap[item]
1854
1867
1855 def __len__(self):
1868 def __len__(self):
1856 return len(self._rustmap)
1869 return len(self._rustmap)
1857
1870
1858 def __iter__(self):
1871 def __iter__(self):
1859 return iter(self._rustmap)
1872 return iter(self._rustmap)
1860
1873
1861 # forward for python2,3 compat
1874 # forward for python2,3 compat
1862 iteritems = items
1875 iteritems = items
1863
1876
1864 def _opendirstatefile(self):
1877 def _opendirstatefile(self):
1865 fp, mode = txnutil.trypending(
1878 fp, mode = txnutil.trypending(
1866 self._root, self._opener, self._filename
1879 self._root, self._opener, self._filename
1867 )
1880 )
1868 if self._pendingmode is not None and self._pendingmode != mode:
1881 if self._pendingmode is not None and self._pendingmode != mode:
1869 fp.close()
1882 fp.close()
1870 raise error.Abort(
1883 raise error.Abort(
1871 _(b'working directory state may be changed parallelly')
1884 _(b'working directory state may be changed parallelly')
1872 )
1885 )
1873 self._pendingmode = mode
1886 self._pendingmode = mode
1874 return fp
1887 return fp
1875
1888
1876 def setparents(self, p1, p2):
1889 def setparents(self, p1, p2):
1877 self._parents = (p1, p2)
1890 self._parents = (p1, p2)
1878 self._dirtyparents = True
1891 self._dirtyparents = True
1879
1892
1880 def parents(self):
1893 def parents(self):
1881 if not self._parents:
1894 if not self._parents:
1882 if self._use_dirstate_v2:
1895 if self._use_dirstate_v2:
1883 offset = len(rustmod.V2_FORMAT_MARKER)
1896 offset = len(rustmod.V2_FORMAT_MARKER)
1884 else:
1897 else:
1885 offset = 0
1898 offset = 0
1886 read_len = offset + self._nodelen * 2
1899 read_len = offset + self._nodelen * 2
1887 try:
1900 try:
1888 fp = self._opendirstatefile()
1901 fp = self._opendirstatefile()
1889 st = fp.read(read_len)
1902 st = fp.read(read_len)
1890 fp.close()
1903 fp.close()
1891 except IOError as err:
1904 except IOError as err:
1892 if err.errno != errno.ENOENT:
1905 if err.errno != errno.ENOENT:
1893 raise
1906 raise
1894 # File doesn't exist, so the current state is empty
1907 # File doesn't exist, so the current state is empty
1895 st = b''
1908 st = b''
1896
1909
1897 l = len(st)
1910 l = len(st)
1898 if l == read_len:
1911 if l == read_len:
1899 st = st[offset:]
1912 st = st[offset:]
1900 self._parents = (
1913 self._parents = (
1901 st[: self._nodelen],
1914 st[: self._nodelen],
1902 st[self._nodelen : 2 * self._nodelen],
1915 st[self._nodelen : 2 * self._nodelen],
1903 )
1916 )
1904 elif l == 0:
1917 elif l == 0:
1905 self._parents = (
1918 self._parents = (
1906 self._nodeconstants.nullid,
1919 self._nodeconstants.nullid,
1907 self._nodeconstants.nullid,
1920 self._nodeconstants.nullid,
1908 )
1921 )
1909 else:
1922 else:
1910 raise error.Abort(
1923 raise error.Abort(
1911 _(b'working directory state appears damaged!')
1924 _(b'working directory state appears damaged!')
1912 )
1925 )
1913
1926
1914 return self._parents
1927 return self._parents
1915
1928
1916 @propertycache
1929 @propertycache
1917 def _rustmap(self):
1930 def _rustmap(self):
1918 """
1931 """
1919 Fills the Dirstatemap when called.
1932 Fills the Dirstatemap when called.
1920 """
1933 """
1921 # ignore HG_PENDING because identity is used only for writing
1934 # ignore HG_PENDING because identity is used only for writing
1922 self.identity = util.filestat.frompath(
1935 self.identity = util.filestat.frompath(
1923 self._opener.join(self._filename)
1936 self._opener.join(self._filename)
1924 )
1937 )
1925
1938
1926 try:
1939 try:
1927 fp = self._opendirstatefile()
1940 fp = self._opendirstatefile()
1928 try:
1941 try:
1929 st = fp.read()
1942 st = fp.read()
1930 finally:
1943 finally:
1931 fp.close()
1944 fp.close()
1932 except IOError as err:
1945 except IOError as err:
1933 if err.errno != errno.ENOENT:
1946 if err.errno != errno.ENOENT:
1934 raise
1947 raise
1935 st = b''
1948 st = b''
1936
1949
1937 self._rustmap, parents = rustmod.DirstateMap.new(
1950 self._rustmap, parents = rustmod.DirstateMap.new(
1938 self._use_dirstate_tree, self._use_dirstate_v2, st
1951 self._use_dirstate_tree, self._use_dirstate_v2, st
1939 )
1952 )
1940
1953
1941 if parents and not self._dirtyparents:
1954 if parents and not self._dirtyparents:
1942 self.setparents(*parents)
1955 self.setparents(*parents)
1943
1956
1944 self.__contains__ = self._rustmap.__contains__
1957 self.__contains__ = self._rustmap.__contains__
1945 self.__getitem__ = self._rustmap.__getitem__
1958 self.__getitem__ = self._rustmap.__getitem__
1946 self.get = self._rustmap.get
1959 self.get = self._rustmap.get
1947 return self._rustmap
1960 return self._rustmap
1948
1961
1949 def write(self, st, now):
1962 def write(self, st, now):
1950 parents = self.parents()
1963 parents = self.parents()
1951 packed = self._rustmap.write(
1964 packed = self._rustmap.write(
1952 self._use_dirstate_v2, parents[0], parents[1], now
1965 self._use_dirstate_v2, parents[0], parents[1], now
1953 )
1966 )
1954 st.write(packed)
1967 st.write(packed)
1955 st.close()
1968 st.close()
1956 self._dirtyparents = False
1969 self._dirtyparents = False
1957
1970
1958 @propertycache
1971 @propertycache
1959 def filefoldmap(self):
1972 def filefoldmap(self):
1960 """Returns a dictionary mapping normalized case paths to their
1973 """Returns a dictionary mapping normalized case paths to their
1961 non-normalized versions.
1974 non-normalized versions.
1962 """
1975 """
1963 return self._rustmap.filefoldmapasdict()
1976 return self._rustmap.filefoldmapasdict()
1964
1977
1965 def hastrackeddir(self, d):
1978 def hastrackeddir(self, d):
1966 return self._rustmap.hastrackeddir(d)
1979 return self._rustmap.hastrackeddir(d)
1967
1980
1968 def hasdir(self, d):
1981 def hasdir(self, d):
1969 return self._rustmap.hasdir(d)
1982 return self._rustmap.hasdir(d)
1970
1983
1971 @propertycache
1984 @propertycache
1972 def identity(self):
1985 def identity(self):
1973 self._rustmap
1986 self._rustmap
1974 return self.identity
1987 return self.identity
1975
1988
1976 @property
1989 @property
1977 def nonnormalset(self):
1990 def nonnormalset(self):
1978 nonnorm = self._rustmap.non_normal_entries()
1991 nonnorm = self._rustmap.non_normal_entries()
1979 return nonnorm
1992 return nonnorm
1980
1993
1981 @propertycache
1994 @propertycache
1982 def otherparentset(self):
1995 def otherparentset(self):
1983 otherparents = self._rustmap.other_parent_entries()
1996 otherparents = self._rustmap.other_parent_entries()
1984 return otherparents
1997 return otherparents
1985
1998
1986 def non_normal_or_other_parent_paths(self):
1999 def non_normal_or_other_parent_paths(self):
1987 return self._rustmap.non_normal_or_other_parent_paths()
2000 return self._rustmap.non_normal_or_other_parent_paths()
1988
2001
1989 @propertycache
2002 @propertycache
1990 def dirfoldmap(self):
2003 def dirfoldmap(self):
1991 f = {}
2004 f = {}
1992 normcase = util.normcase
2005 normcase = util.normcase
1993 for name, _pseudo_entry in self.directories():
2006 for name, _pseudo_entry in self.directories():
1994 f[normcase(name)] = name
2007 f[normcase(name)] = name
1995 return f
2008 return f
General Comments 0
You need to be logged in to leave comments. Login now