##// END OF EJS Templates
dirstate: introduce a symbolic constant for the AMBIGUOUS_TIME marker...
marmoute -
r48278:3f13dfa1 default
parent child Browse files
Show More
@@ -1,1988 +1,1991 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = -2
52 FROM_P2 = -2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
55 NONNORMAL = -1
56
56
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = -1
59
57
60
58 class repocache(filecache):
61 class repocache(filecache):
59 """filecache for files in .hg/"""
62 """filecache for files in .hg/"""
60
63
61 def join(self, obj, fname):
64 def join(self, obj, fname):
62 return obj._opener.join(fname)
65 return obj._opener.join(fname)
63
66
64
67
65 class rootcache(filecache):
68 class rootcache(filecache):
66 """filecache for files in the repository root"""
69 """filecache for files in the repository root"""
67
70
68 def join(self, obj, fname):
71 def join(self, obj, fname):
69 return obj._join(fname)
72 return obj._join(fname)
70
73
71
74
72 def _getfsnow(vfs):
75 def _getfsnow(vfs):
73 '''Get "now" timestamp on filesystem'''
76 '''Get "now" timestamp on filesystem'''
74 tmpfd, tmpname = vfs.mkstemp()
77 tmpfd, tmpname = vfs.mkstemp()
75 try:
78 try:
76 return os.fstat(tmpfd)[stat.ST_MTIME]
79 return os.fstat(tmpfd)[stat.ST_MTIME]
77 finally:
80 finally:
78 os.close(tmpfd)
81 os.close(tmpfd)
79 vfs.unlink(tmpname)
82 vfs.unlink(tmpname)
80
83
81
84
82 @interfaceutil.implementer(intdirstate.idirstate)
85 @interfaceutil.implementer(intdirstate.idirstate)
83 class dirstate(object):
86 class dirstate(object):
84 def __init__(
87 def __init__(
85 self,
88 self,
86 opener,
89 opener,
87 ui,
90 ui,
88 root,
91 root,
89 validate,
92 validate,
90 sparsematchfn,
93 sparsematchfn,
91 nodeconstants,
94 nodeconstants,
92 use_dirstate_v2,
95 use_dirstate_v2,
93 ):
96 ):
94 """Create a new dirstate object.
97 """Create a new dirstate object.
95
98
96 opener is an open()-like callable that can be used to open the
99 opener is an open()-like callable that can be used to open the
97 dirstate file; root is the root of the directory tracked by
100 dirstate file; root is the root of the directory tracked by
98 the dirstate.
101 the dirstate.
99 """
102 """
100 self._use_dirstate_v2 = use_dirstate_v2
103 self._use_dirstate_v2 = use_dirstate_v2
101 self._nodeconstants = nodeconstants
104 self._nodeconstants = nodeconstants
102 self._opener = opener
105 self._opener = opener
103 self._validate = validate
106 self._validate = validate
104 self._root = root
107 self._root = root
105 self._sparsematchfn = sparsematchfn
108 self._sparsematchfn = sparsematchfn
106 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
107 # UNC path pointing to root share (issue4557)
110 # UNC path pointing to root share (issue4557)
108 self._rootdir = pathutil.normasprefix(root)
111 self._rootdir = pathutil.normasprefix(root)
109 self._dirty = False
112 self._dirty = False
110 self._lastnormaltime = 0
113 self._lastnormaltime = 0
111 self._ui = ui
114 self._ui = ui
112 self._filecache = {}
115 self._filecache = {}
113 self._parentwriters = 0
116 self._parentwriters = 0
114 self._filename = b'dirstate'
117 self._filename = b'dirstate'
115 self._pendingfilename = b'%s.pending' % self._filename
118 self._pendingfilename = b'%s.pending' % self._filename
116 self._plchangecallbacks = {}
119 self._plchangecallbacks = {}
117 self._origpl = None
120 self._origpl = None
118 self._updatedfiles = set()
121 self._updatedfiles = set()
119 self._mapcls = dirstatemap
122 self._mapcls = dirstatemap
120 # Access and cache cwd early, so we don't access it for the first time
123 # Access and cache cwd early, so we don't access it for the first time
121 # after a working-copy update caused it to not exist (accessing it then
124 # after a working-copy update caused it to not exist (accessing it then
122 # raises an exception).
125 # raises an exception).
123 self._cwd
126 self._cwd
124
127
125 def prefetch_parents(self):
128 def prefetch_parents(self):
126 """make sure the parents are loaded
129 """make sure the parents are loaded
127
130
128 Used to avoid a race condition.
131 Used to avoid a race condition.
129 """
132 """
130 self._pl
133 self._pl
131
134
132 @contextlib.contextmanager
135 @contextlib.contextmanager
133 def parentchange(self):
136 def parentchange(self):
134 """Context manager for handling dirstate parents.
137 """Context manager for handling dirstate parents.
135
138
136 If an exception occurs in the scope of the context manager,
139 If an exception occurs in the scope of the context manager,
137 the incoherent dirstate won't be written when wlock is
140 the incoherent dirstate won't be written when wlock is
138 released.
141 released.
139 """
142 """
140 self._parentwriters += 1
143 self._parentwriters += 1
141 yield
144 yield
142 # Typically we want the "undo" step of a context manager in a
145 # Typically we want the "undo" step of a context manager in a
143 # finally block so it happens even when an exception
146 # finally block so it happens even when an exception
144 # occurs. In this case, however, we only want to decrement
147 # occurs. In this case, however, we only want to decrement
145 # parentwriters if the code in the with statement exits
148 # parentwriters if the code in the with statement exits
146 # normally, so we don't have a try/finally here on purpose.
149 # normally, so we don't have a try/finally here on purpose.
147 self._parentwriters -= 1
150 self._parentwriters -= 1
148
151
149 def pendingparentchange(self):
152 def pendingparentchange(self):
150 """Returns true if the dirstate is in the middle of a set of changes
153 """Returns true if the dirstate is in the middle of a set of changes
151 that modify the dirstate parent.
154 that modify the dirstate parent.
152 """
155 """
153 return self._parentwriters > 0
156 return self._parentwriters > 0
154
157
155 @propertycache
158 @propertycache
156 def _map(self):
159 def _map(self):
157 """Return the dirstate contents (see documentation for dirstatemap)."""
160 """Return the dirstate contents (see documentation for dirstatemap)."""
158 self._map = self._mapcls(
161 self._map = self._mapcls(
159 self._ui,
162 self._ui,
160 self._opener,
163 self._opener,
161 self._root,
164 self._root,
162 self._nodeconstants,
165 self._nodeconstants,
163 self._use_dirstate_v2,
166 self._use_dirstate_v2,
164 )
167 )
165 return self._map
168 return self._map
166
169
167 @property
170 @property
168 def _sparsematcher(self):
171 def _sparsematcher(self):
169 """The matcher for the sparse checkout.
172 """The matcher for the sparse checkout.
170
173
171 The working directory may not include every file from a manifest. The
174 The working directory may not include every file from a manifest. The
172 matcher obtained by this property will match a path if it is to be
175 matcher obtained by this property will match a path if it is to be
173 included in the working directory.
176 included in the working directory.
174 """
177 """
175 # TODO there is potential to cache this property. For now, the matcher
178 # TODO there is potential to cache this property. For now, the matcher
176 # is resolved on every access. (But the called function does use a
179 # is resolved on every access. (But the called function does use a
177 # cache to keep the lookup fast.)
180 # cache to keep the lookup fast.)
178 return self._sparsematchfn()
181 return self._sparsematchfn()
179
182
180 @repocache(b'branch')
183 @repocache(b'branch')
181 def _branch(self):
184 def _branch(self):
182 try:
185 try:
183 return self._opener.read(b"branch").strip() or b"default"
186 return self._opener.read(b"branch").strip() or b"default"
184 except IOError as inst:
187 except IOError as inst:
185 if inst.errno != errno.ENOENT:
188 if inst.errno != errno.ENOENT:
186 raise
189 raise
187 return b"default"
190 return b"default"
188
191
189 @property
192 @property
190 def _pl(self):
193 def _pl(self):
191 return self._map.parents()
194 return self._map.parents()
192
195
193 def hasdir(self, d):
196 def hasdir(self, d):
194 return self._map.hastrackeddir(d)
197 return self._map.hastrackeddir(d)
195
198
196 @rootcache(b'.hgignore')
199 @rootcache(b'.hgignore')
197 def _ignore(self):
200 def _ignore(self):
198 files = self._ignorefiles()
201 files = self._ignorefiles()
199 if not files:
202 if not files:
200 return matchmod.never()
203 return matchmod.never()
201
204
202 pats = [b'include:%s' % f for f in files]
205 pats = [b'include:%s' % f for f in files]
203 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
204
207
205 @propertycache
208 @propertycache
206 def _slash(self):
209 def _slash(self):
207 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
208
211
209 @propertycache
212 @propertycache
210 def _checklink(self):
213 def _checklink(self):
211 return util.checklink(self._root)
214 return util.checklink(self._root)
212
215
213 @propertycache
216 @propertycache
214 def _checkexec(self):
217 def _checkexec(self):
215 return bool(util.checkexec(self._root))
218 return bool(util.checkexec(self._root))
216
219
217 @propertycache
220 @propertycache
218 def _checkcase(self):
221 def _checkcase(self):
219 return not util.fscasesensitive(self._join(b'.hg'))
222 return not util.fscasesensitive(self._join(b'.hg'))
220
223
221 def _join(self, f):
224 def _join(self, f):
222 # much faster than os.path.join()
225 # much faster than os.path.join()
223 # it's safe because f is always a relative path
226 # it's safe because f is always a relative path
224 return self._rootdir + f
227 return self._rootdir + f
225
228
226 def flagfunc(self, buildfallback):
229 def flagfunc(self, buildfallback):
227 if self._checklink and self._checkexec:
230 if self._checklink and self._checkexec:
228
231
229 def f(x):
232 def f(x):
230 try:
233 try:
231 st = os.lstat(self._join(x))
234 st = os.lstat(self._join(x))
232 if util.statislink(st):
235 if util.statislink(st):
233 return b'l'
236 return b'l'
234 if util.statisexec(st):
237 if util.statisexec(st):
235 return b'x'
238 return b'x'
236 except OSError:
239 except OSError:
237 pass
240 pass
238 return b''
241 return b''
239
242
240 return f
243 return f
241
244
242 fallback = buildfallback()
245 fallback = buildfallback()
243 if self._checklink:
246 if self._checklink:
244
247
245 def f(x):
248 def f(x):
246 if os.path.islink(self._join(x)):
249 if os.path.islink(self._join(x)):
247 return b'l'
250 return b'l'
248 if b'x' in fallback(x):
251 if b'x' in fallback(x):
249 return b'x'
252 return b'x'
250 return b''
253 return b''
251
254
252 return f
255 return f
253 if self._checkexec:
256 if self._checkexec:
254
257
255 def f(x):
258 def f(x):
256 if b'l' in fallback(x):
259 if b'l' in fallback(x):
257 return b'l'
260 return b'l'
258 if util.isexec(self._join(x)):
261 if util.isexec(self._join(x)):
259 return b'x'
262 return b'x'
260 return b''
263 return b''
261
264
262 return f
265 return f
263 else:
266 else:
264 return fallback
267 return fallback
265
268
266 @propertycache
269 @propertycache
267 def _cwd(self):
270 def _cwd(self):
268 # internal config: ui.forcecwd
271 # internal config: ui.forcecwd
269 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
270 if forcecwd:
273 if forcecwd:
271 return forcecwd
274 return forcecwd
272 return encoding.getcwd()
275 return encoding.getcwd()
273
276
274 def getcwd(self):
277 def getcwd(self):
275 """Return the path from which a canonical path is calculated.
278 """Return the path from which a canonical path is calculated.
276
279
277 This path should be used to resolve file patterns or to convert
280 This path should be used to resolve file patterns or to convert
278 canonical paths back to file paths for display. It shouldn't be
281 canonical paths back to file paths for display. It shouldn't be
279 used to get real file paths. Use vfs functions instead.
282 used to get real file paths. Use vfs functions instead.
280 """
283 """
281 cwd = self._cwd
284 cwd = self._cwd
282 if cwd == self._root:
285 if cwd == self._root:
283 return b''
286 return b''
284 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
285 rootsep = self._root
288 rootsep = self._root
286 if not util.endswithsep(rootsep):
289 if not util.endswithsep(rootsep):
287 rootsep += pycompat.ossep
290 rootsep += pycompat.ossep
288 if cwd.startswith(rootsep):
291 if cwd.startswith(rootsep):
289 return cwd[len(rootsep) :]
292 return cwd[len(rootsep) :]
290 else:
293 else:
291 # we're outside the repo. return an absolute path.
294 # we're outside the repo. return an absolute path.
292 return cwd
295 return cwd
293
296
294 def pathto(self, f, cwd=None):
297 def pathto(self, f, cwd=None):
295 if cwd is None:
298 if cwd is None:
296 cwd = self.getcwd()
299 cwd = self.getcwd()
297 path = util.pathto(self._root, cwd, f)
300 path = util.pathto(self._root, cwd, f)
298 if self._slash:
301 if self._slash:
299 return util.pconvert(path)
302 return util.pconvert(path)
300 return path
303 return path
301
304
302 def __getitem__(self, key):
305 def __getitem__(self, key):
303 """Return the current state of key (a filename) in the dirstate.
306 """Return the current state of key (a filename) in the dirstate.
304
307
305 States are:
308 States are:
306 n normal
309 n normal
307 m needs merging
310 m needs merging
308 r marked for removal
311 r marked for removal
309 a marked for addition
312 a marked for addition
310 ? not tracked
313 ? not tracked
311 """
314 """
312 return self._map.get(key, (b"?",))[0]
315 return self._map.get(key, (b"?",))[0]
313
316
314 def __contains__(self, key):
317 def __contains__(self, key):
315 return key in self._map
318 return key in self._map
316
319
317 def __iter__(self):
320 def __iter__(self):
318 return iter(sorted(self._map))
321 return iter(sorted(self._map))
319
322
320 def items(self):
323 def items(self):
321 return pycompat.iteritems(self._map)
324 return pycompat.iteritems(self._map)
322
325
323 iteritems = items
326 iteritems = items
324
327
325 def directories(self):
328 def directories(self):
326 return self._map.directories()
329 return self._map.directories()
327
330
328 def parents(self):
331 def parents(self):
329 return [self._validate(p) for p in self._pl]
332 return [self._validate(p) for p in self._pl]
330
333
331 def p1(self):
334 def p1(self):
332 return self._validate(self._pl[0])
335 return self._validate(self._pl[0])
333
336
334 def p2(self):
337 def p2(self):
335 return self._validate(self._pl[1])
338 return self._validate(self._pl[1])
336
339
337 def branch(self):
340 def branch(self):
338 return encoding.tolocal(self._branch)
341 return encoding.tolocal(self._branch)
339
342
340 def setparents(self, p1, p2=None):
343 def setparents(self, p1, p2=None):
341 """Set dirstate parents to p1 and p2.
344 """Set dirstate parents to p1 and p2.
342
345
343 When moving from two parents to one, 'm' merged entries a
346 When moving from two parents to one, 'm' merged entries a
344 adjusted to normal and previous copy records discarded and
347 adjusted to normal and previous copy records discarded and
345 returned by the call.
348 returned by the call.
346
349
347 See localrepo.setparents()
350 See localrepo.setparents()
348 """
351 """
349 if p2 is None:
352 if p2 is None:
350 p2 = self._nodeconstants.nullid
353 p2 = self._nodeconstants.nullid
351 if self._parentwriters == 0:
354 if self._parentwriters == 0:
352 raise ValueError(
355 raise ValueError(
353 b"cannot set dirstate parent outside of "
356 b"cannot set dirstate parent outside of "
354 b"dirstate.parentchange context manager"
357 b"dirstate.parentchange context manager"
355 )
358 )
356
359
357 self._dirty = True
360 self._dirty = True
358 oldp2 = self._pl[1]
361 oldp2 = self._pl[1]
359 if self._origpl is None:
362 if self._origpl is None:
360 self._origpl = self._pl
363 self._origpl = self._pl
361 self._map.setparents(p1, p2)
364 self._map.setparents(p1, p2)
362 copies = {}
365 copies = {}
363 if (
366 if (
364 oldp2 != self._nodeconstants.nullid
367 oldp2 != self._nodeconstants.nullid
365 and p2 == self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
366 ):
369 ):
367 candidatefiles = self._map.non_normal_or_other_parent_paths()
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
368
371
369 for f in candidatefiles:
372 for f in candidatefiles:
370 s = self._map.get(f)
373 s = self._map.get(f)
371 if s is None:
374 if s is None:
372 continue
375 continue
373
376
374 # Discard 'm' markers when moving away from a merge state
377 # Discard 'm' markers when moving away from a merge state
375 if s[0] == b'm':
378 if s[0] == b'm':
376 source = self._map.copymap.get(f)
379 source = self._map.copymap.get(f)
377 if source:
380 if source:
378 copies[f] = source
381 copies[f] = source
379 self.normallookup(f)
382 self.normallookup(f)
380 # Also fix up otherparent markers
383 # Also fix up otherparent markers
381 elif s[0] == b'n' and s[2] == FROM_P2:
384 elif s[0] == b'n' and s[2] == FROM_P2:
382 source = self._map.copymap.get(f)
385 source = self._map.copymap.get(f)
383 if source:
386 if source:
384 copies[f] = source
387 copies[f] = source
385 self.add(f)
388 self.add(f)
386 return copies
389 return copies
387
390
388 def setbranch(self, branch):
391 def setbranch(self, branch):
389 self.__class__._branch.set(self, encoding.fromlocal(branch))
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
390 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
391 try:
394 try:
392 f.write(self._branch + b'\n')
395 f.write(self._branch + b'\n')
393 f.close()
396 f.close()
394
397
395 # make sure filecache has the correct stat info for _branch after
398 # make sure filecache has the correct stat info for _branch after
396 # replacing the underlying file
399 # replacing the underlying file
397 ce = self._filecache[b'_branch']
400 ce = self._filecache[b'_branch']
398 if ce:
401 if ce:
399 ce.refresh()
402 ce.refresh()
400 except: # re-raises
403 except: # re-raises
401 f.discard()
404 f.discard()
402 raise
405 raise
403
406
404 def invalidate(self):
407 def invalidate(self):
405 """Causes the next access to reread the dirstate.
408 """Causes the next access to reread the dirstate.
406
409
407 This is different from localrepo.invalidatedirstate() because it always
410 This is different from localrepo.invalidatedirstate() because it always
408 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
409 check whether the dirstate has changed before rereading it."""
412 check whether the dirstate has changed before rereading it."""
410
413
411 for a in ("_map", "_branch", "_ignore"):
414 for a in ("_map", "_branch", "_ignore"):
412 if a in self.__dict__:
415 if a in self.__dict__:
413 delattr(self, a)
416 delattr(self, a)
414 self._lastnormaltime = 0
417 self._lastnormaltime = 0
415 self._dirty = False
418 self._dirty = False
416 self._updatedfiles.clear()
419 self._updatedfiles.clear()
417 self._parentwriters = 0
420 self._parentwriters = 0
418 self._origpl = None
421 self._origpl = None
419
422
420 def copy(self, source, dest):
423 def copy(self, source, dest):
421 """Mark dest as a copy of source. Unmark dest if source is None."""
424 """Mark dest as a copy of source. Unmark dest if source is None."""
422 if source == dest:
425 if source == dest:
423 return
426 return
424 self._dirty = True
427 self._dirty = True
425 if source is not None:
428 if source is not None:
426 self._map.copymap[dest] = source
429 self._map.copymap[dest] = source
427 self._updatedfiles.add(source)
430 self._updatedfiles.add(source)
428 self._updatedfiles.add(dest)
431 self._updatedfiles.add(dest)
429 elif self._map.copymap.pop(dest, None):
432 elif self._map.copymap.pop(dest, None):
430 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
431
434
432 def copied(self, file):
435 def copied(self, file):
433 return self._map.copymap.get(file, None)
436 return self._map.copymap.get(file, None)
434
437
435 def copies(self):
438 def copies(self):
436 return self._map.copymap
439 return self._map.copymap
437
440
438 def _addpath(self, f, state, mode, size, mtime):
441 def _addpath(self, f, state, mode, size, mtime):
439 oldstate = self[f]
442 oldstate = self[f]
440 if state == b'a' or oldstate == b'r':
443 if state == b'a' or oldstate == b'r':
441 scmutil.checkfilename(f)
444 scmutil.checkfilename(f)
442 if self._map.hastrackeddir(f):
445 if self._map.hastrackeddir(f):
443 msg = _(b'directory %r already in dirstate')
446 msg = _(b'directory %r already in dirstate')
444 msg %= pycompat.bytestr(f)
447 msg %= pycompat.bytestr(f)
445 raise error.Abort(msg)
448 raise error.Abort(msg)
446 # shadows
449 # shadows
447 for d in pathutil.finddirs(f):
450 for d in pathutil.finddirs(f):
448 if self._map.hastrackeddir(d):
451 if self._map.hastrackeddir(d):
449 break
452 break
450 entry = self._map.get(d)
453 entry = self._map.get(d)
451 if entry is not None and entry[0] != b'r':
454 if entry is not None and entry[0] != b'r':
452 msg = _(b'file %r in dirstate clashes with %r')
455 msg = _(b'file %r in dirstate clashes with %r')
453 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
456 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
454 raise error.Abort(msg)
457 raise error.Abort(msg)
455 self._dirty = True
458 self._dirty = True
456 self._updatedfiles.add(f)
459 self._updatedfiles.add(f)
457 self._map.addfile(f, oldstate, state, mode, size, mtime)
460 self._map.addfile(f, oldstate, state, mode, size, mtime)
458
461
459 def normal(self, f, parentfiledata=None):
462 def normal(self, f, parentfiledata=None):
460 """Mark a file normal and clean.
463 """Mark a file normal and clean.
461
464
462 parentfiledata: (mode, size, mtime) of the clean file
465 parentfiledata: (mode, size, mtime) of the clean file
463
466
464 parentfiledata should be computed from memory (for mode,
467 parentfiledata should be computed from memory (for mode,
465 size), as or close as possible from the point where we
468 size), as or close as possible from the point where we
466 determined the file was clean, to limit the risk of the
469 determined the file was clean, to limit the risk of the
467 file having been changed by an external process between the
470 file having been changed by an external process between the
468 moment where the file was determined to be clean and now."""
471 moment where the file was determined to be clean and now."""
469 if parentfiledata:
472 if parentfiledata:
470 (mode, size, mtime) = parentfiledata
473 (mode, size, mtime) = parentfiledata
471 else:
474 else:
472 s = os.lstat(self._join(f))
475 s = os.lstat(self._join(f))
473 mode = s.st_mode
476 mode = s.st_mode
474 size = s.st_size
477 size = s.st_size
475 mtime = s[stat.ST_MTIME]
478 mtime = s[stat.ST_MTIME]
476 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
479 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
477 self._map.copymap.pop(f, None)
480 self._map.copymap.pop(f, None)
478 if f in self._map.nonnormalset:
481 if f in self._map.nonnormalset:
479 self._map.nonnormalset.remove(f)
482 self._map.nonnormalset.remove(f)
480 if mtime > self._lastnormaltime:
483 if mtime > self._lastnormaltime:
481 # Remember the most recent modification timeslot for status(),
484 # Remember the most recent modification timeslot for status(),
482 # to make sure we won't miss future size-preserving file content
485 # to make sure we won't miss future size-preserving file content
483 # modifications that happen within the same timeslot.
486 # modifications that happen within the same timeslot.
484 self._lastnormaltime = mtime
487 self._lastnormaltime = mtime
485
488
486 def normallookup(self, f):
489 def normallookup(self, f):
487 '''Mark a file normal, but possibly dirty.'''
490 '''Mark a file normal, but possibly dirty.'''
488 if self._pl[1] != self._nodeconstants.nullid:
491 if self._pl[1] != self._nodeconstants.nullid:
489 # if there is a merge going on and the file was either
492 # if there is a merge going on and the file was either
490 # in state 'm' (-1) or coming from other parent (-2) before
493 # in state 'm' (-1) or coming from other parent (-2) before
491 # being removed, restore that state.
494 # being removed, restore that state.
492 entry = self._map.get(f)
495 entry = self._map.get(f)
493 if entry is not None:
496 if entry is not None:
494 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
497 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
495 source = self._map.copymap.get(f)
498 source = self._map.copymap.get(f)
496 if entry[2] == NONNORMAL:
499 if entry[2] == NONNORMAL:
497 self.merge(f)
500 self.merge(f)
498 elif entry[2] == FROM_P2:
501 elif entry[2] == FROM_P2:
499 self.otherparent(f)
502 self.otherparent(f)
500 if source:
503 if source:
501 self.copy(source, f)
504 self.copy(source, f)
502 return
505 return
503 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
506 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
504 return
507 return
505 self._addpath(f, b'n', 0, NONNORMAL, -1)
508 self._addpath(f, b'n', 0, NONNORMAL, AMBIGUOUS_TIME)
506 self._map.copymap.pop(f, None)
509 self._map.copymap.pop(f, None)
507
510
508 def otherparent(self, f):
511 def otherparent(self, f):
509 '''Mark as coming from the other parent, always dirty.'''
512 '''Mark as coming from the other parent, always dirty.'''
510 if self._pl[1] == self._nodeconstants.nullid:
513 if self._pl[1] == self._nodeconstants.nullid:
511 msg = _(b"setting %r to other parent only allowed in merges") % f
514 msg = _(b"setting %r to other parent only allowed in merges") % f
512 raise error.Abort(msg)
515 raise error.Abort(msg)
513 if f in self and self[f] == b'n':
516 if f in self and self[f] == b'n':
514 # merge-like
517 # merge-like
515 self._addpath(f, b'm', 0, FROM_P2, -1)
518 self._addpath(f, b'm', 0, FROM_P2, AMBIGUOUS_TIME)
516 else:
519 else:
517 # add-like
520 # add-like
518 self._addpath(f, b'n', 0, FROM_P2, -1)
521 self._addpath(f, b'n', 0, FROM_P2, AMBIGUOUS_TIME)
519 self._map.copymap.pop(f, None)
522 self._map.copymap.pop(f, None)
520
523
521 def add(self, f):
524 def add(self, f):
522 '''Mark a file added.'''
525 '''Mark a file added.'''
523 self._addpath(f, b'a', 0, NONNORMAL, -1)
526 self._addpath(f, b'a', 0, NONNORMAL, AMBIGUOUS_TIME)
524 self._map.copymap.pop(f, None)
527 self._map.copymap.pop(f, None)
525
528
526 def remove(self, f):
529 def remove(self, f):
527 '''Mark a file removed.'''
530 '''Mark a file removed.'''
528 self._dirty = True
531 self._dirty = True
529 oldstate = self[f]
532 oldstate = self[f]
530 size = 0
533 size = 0
531 if self._pl[1] != self._nodeconstants.nullid:
534 if self._pl[1] != self._nodeconstants.nullid:
532 entry = self._map.get(f)
535 entry = self._map.get(f)
533 if entry is not None:
536 if entry is not None:
534 # backup the previous state
537 # backup the previous state
535 if entry[0] == b'm': # merge
538 if entry[0] == b'm': # merge
536 size = NONNORMAL
539 size = NONNORMAL
537 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
540 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
538 size = FROM_P2
541 size = FROM_P2
539 self._map.otherparentset.add(f)
542 self._map.otherparentset.add(f)
540 self._updatedfiles.add(f)
543 self._updatedfiles.add(f)
541 self._map.removefile(f, oldstate, size)
544 self._map.removefile(f, oldstate, size)
542 if size == 0:
545 if size == 0:
543 self._map.copymap.pop(f, None)
546 self._map.copymap.pop(f, None)
544
547
545 def merge(self, f):
548 def merge(self, f):
546 '''Mark a file merged.'''
549 '''Mark a file merged.'''
547 if self._pl[1] == self._nodeconstants.nullid:
550 if self._pl[1] == self._nodeconstants.nullid:
548 return self.normallookup(f)
551 return self.normallookup(f)
549 return self.otherparent(f)
552 return self.otherparent(f)
550
553
551 def drop(self, f):
554 def drop(self, f):
552 '''Drop a file from the dirstate'''
555 '''Drop a file from the dirstate'''
553 oldstate = self[f]
556 oldstate = self[f]
554 if self._map.dropfile(f, oldstate):
557 if self._map.dropfile(f, oldstate):
555 self._dirty = True
558 self._dirty = True
556 self._updatedfiles.add(f)
559 self._updatedfiles.add(f)
557 self._map.copymap.pop(f, None)
560 self._map.copymap.pop(f, None)
558
561
559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
562 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
560 if exists is None:
563 if exists is None:
561 exists = os.path.lexists(os.path.join(self._root, path))
564 exists = os.path.lexists(os.path.join(self._root, path))
562 if not exists:
565 if not exists:
563 # Maybe a path component exists
566 # Maybe a path component exists
564 if not ignoremissing and b'/' in path:
567 if not ignoremissing and b'/' in path:
565 d, f = path.rsplit(b'/', 1)
568 d, f = path.rsplit(b'/', 1)
566 d = self._normalize(d, False, ignoremissing, None)
569 d = self._normalize(d, False, ignoremissing, None)
567 folded = d + b"/" + f
570 folded = d + b"/" + f
568 else:
571 else:
569 # No path components, preserve original case
572 # No path components, preserve original case
570 folded = path
573 folded = path
571 else:
574 else:
572 # recursively normalize leading directory components
575 # recursively normalize leading directory components
573 # against dirstate
576 # against dirstate
574 if b'/' in normed:
577 if b'/' in normed:
575 d, f = normed.rsplit(b'/', 1)
578 d, f = normed.rsplit(b'/', 1)
576 d = self._normalize(d, False, ignoremissing, True)
579 d = self._normalize(d, False, ignoremissing, True)
577 r = self._root + b"/" + d
580 r = self._root + b"/" + d
578 folded = d + b"/" + util.fspath(f, r)
581 folded = d + b"/" + util.fspath(f, r)
579 else:
582 else:
580 folded = util.fspath(normed, self._root)
583 folded = util.fspath(normed, self._root)
581 storemap[normed] = folded
584 storemap[normed] = folded
582
585
583 return folded
586 return folded
584
587
585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
588 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
586 normed = util.normcase(path)
589 normed = util.normcase(path)
587 folded = self._map.filefoldmap.get(normed, None)
590 folded = self._map.filefoldmap.get(normed, None)
588 if folded is None:
591 if folded is None:
589 if isknown:
592 if isknown:
590 folded = path
593 folded = path
591 else:
594 else:
592 folded = self._discoverpath(
595 folded = self._discoverpath(
593 path, normed, ignoremissing, exists, self._map.filefoldmap
596 path, normed, ignoremissing, exists, self._map.filefoldmap
594 )
597 )
595 return folded
598 return folded
596
599
597 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
600 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
598 normed = util.normcase(path)
601 normed = util.normcase(path)
599 folded = self._map.filefoldmap.get(normed, None)
602 folded = self._map.filefoldmap.get(normed, None)
600 if folded is None:
603 if folded is None:
601 folded = self._map.dirfoldmap.get(normed, None)
604 folded = self._map.dirfoldmap.get(normed, None)
602 if folded is None:
605 if folded is None:
603 if isknown:
606 if isknown:
604 folded = path
607 folded = path
605 else:
608 else:
606 # store discovered result in dirfoldmap so that future
609 # store discovered result in dirfoldmap so that future
607 # normalizefile calls don't start matching directories
610 # normalizefile calls don't start matching directories
608 folded = self._discoverpath(
611 folded = self._discoverpath(
609 path, normed, ignoremissing, exists, self._map.dirfoldmap
612 path, normed, ignoremissing, exists, self._map.dirfoldmap
610 )
613 )
611 return folded
614 return folded
612
615
613 def normalize(self, path, isknown=False, ignoremissing=False):
616 def normalize(self, path, isknown=False, ignoremissing=False):
614 """
617 """
615 normalize the case of a pathname when on a casefolding filesystem
618 normalize the case of a pathname when on a casefolding filesystem
616
619
617 isknown specifies whether the filename came from walking the
620 isknown specifies whether the filename came from walking the
618 disk, to avoid extra filesystem access.
621 disk, to avoid extra filesystem access.
619
622
620 If ignoremissing is True, missing path are returned
623 If ignoremissing is True, missing path are returned
621 unchanged. Otherwise, we try harder to normalize possibly
624 unchanged. Otherwise, we try harder to normalize possibly
622 existing path components.
625 existing path components.
623
626
624 The normalized case is determined based on the following precedence:
627 The normalized case is determined based on the following precedence:
625
628
626 - version of name already stored in the dirstate
629 - version of name already stored in the dirstate
627 - version of name stored on disk
630 - version of name stored on disk
628 - version provided via command arguments
631 - version provided via command arguments
629 """
632 """
630
633
631 if self._checkcase:
634 if self._checkcase:
632 return self._normalize(path, isknown, ignoremissing)
635 return self._normalize(path, isknown, ignoremissing)
633 return path
636 return path
634
637
635 def clear(self):
638 def clear(self):
636 self._map.clear()
639 self._map.clear()
637 self._lastnormaltime = 0
640 self._lastnormaltime = 0
638 self._updatedfiles.clear()
641 self._updatedfiles.clear()
639 self._dirty = True
642 self._dirty = True
640
643
641 def rebuild(self, parent, allfiles, changedfiles=None):
644 def rebuild(self, parent, allfiles, changedfiles=None):
642 if changedfiles is None:
645 if changedfiles is None:
643 # Rebuild entire dirstate
646 # Rebuild entire dirstate
644 to_lookup = allfiles
647 to_lookup = allfiles
645 to_drop = []
648 to_drop = []
646 lastnormaltime = self._lastnormaltime
649 lastnormaltime = self._lastnormaltime
647 self.clear()
650 self.clear()
648 self._lastnormaltime = lastnormaltime
651 self._lastnormaltime = lastnormaltime
649 elif len(changedfiles) < 10:
652 elif len(changedfiles) < 10:
650 # Avoid turning allfiles into a set, which can be expensive if it's
653 # Avoid turning allfiles into a set, which can be expensive if it's
651 # large.
654 # large.
652 to_lookup = []
655 to_lookup = []
653 to_drop = []
656 to_drop = []
654 for f in changedfiles:
657 for f in changedfiles:
655 if f in allfiles:
658 if f in allfiles:
656 to_lookup.append(f)
659 to_lookup.append(f)
657 else:
660 else:
658 to_drop.append(f)
661 to_drop.append(f)
659 else:
662 else:
660 changedfilesset = set(changedfiles)
663 changedfilesset = set(changedfiles)
661 to_lookup = changedfilesset & set(allfiles)
664 to_lookup = changedfilesset & set(allfiles)
662 to_drop = changedfilesset - to_lookup
665 to_drop = changedfilesset - to_lookup
663
666
664 if self._origpl is None:
667 if self._origpl is None:
665 self._origpl = self._pl
668 self._origpl = self._pl
666 self._map.setparents(parent, self._nodeconstants.nullid)
669 self._map.setparents(parent, self._nodeconstants.nullid)
667
670
668 for f in to_lookup:
671 for f in to_lookup:
669 self.normallookup(f)
672 self.normallookup(f)
670 for f in to_drop:
673 for f in to_drop:
671 self.drop(f)
674 self.drop(f)
672
675
673 self._dirty = True
676 self._dirty = True
674
677
675 def identity(self):
678 def identity(self):
676 """Return identity of dirstate itself to detect changing in storage
679 """Return identity of dirstate itself to detect changing in storage
677
680
678 If identity of previous dirstate is equal to this, writing
681 If identity of previous dirstate is equal to this, writing
679 changes based on the former dirstate out can keep consistency.
682 changes based on the former dirstate out can keep consistency.
680 """
683 """
681 return self._map.identity
684 return self._map.identity
682
685
683 def write(self, tr):
686 def write(self, tr):
684 if not self._dirty:
687 if not self._dirty:
685 return
688 return
686
689
687 filename = self._filename
690 filename = self._filename
688 if tr:
691 if tr:
689 # 'dirstate.write()' is not only for writing in-memory
692 # 'dirstate.write()' is not only for writing in-memory
690 # changes out, but also for dropping ambiguous timestamp.
693 # changes out, but also for dropping ambiguous timestamp.
691 # delayed writing re-raise "ambiguous timestamp issue".
694 # delayed writing re-raise "ambiguous timestamp issue".
692 # See also the wiki page below for detail:
695 # See also the wiki page below for detail:
693 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
696 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
694
697
695 # emulate dropping timestamp in 'parsers.pack_dirstate'
698 # emulate dropping timestamp in 'parsers.pack_dirstate'
696 now = _getfsnow(self._opener)
699 now = _getfsnow(self._opener)
697 self._map.clearambiguoustimes(self._updatedfiles, now)
700 self._map.clearambiguoustimes(self._updatedfiles, now)
698
701
699 # emulate that all 'dirstate.normal' results are written out
702 # emulate that all 'dirstate.normal' results are written out
700 self._lastnormaltime = 0
703 self._lastnormaltime = 0
701 self._updatedfiles.clear()
704 self._updatedfiles.clear()
702
705
703 # delay writing in-memory changes out
706 # delay writing in-memory changes out
704 tr.addfilegenerator(
707 tr.addfilegenerator(
705 b'dirstate',
708 b'dirstate',
706 (self._filename,),
709 (self._filename,),
707 self._writedirstate,
710 self._writedirstate,
708 location=b'plain',
711 location=b'plain',
709 )
712 )
710 return
713 return
711
714
712 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
715 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
713 self._writedirstate(st)
716 self._writedirstate(st)
714
717
715 def addparentchangecallback(self, category, callback):
718 def addparentchangecallback(self, category, callback):
716 """add a callback to be called when the wd parents are changed
719 """add a callback to be called when the wd parents are changed
717
720
718 Callback will be called with the following arguments:
721 Callback will be called with the following arguments:
719 dirstate, (oldp1, oldp2), (newp1, newp2)
722 dirstate, (oldp1, oldp2), (newp1, newp2)
720
723
721 Category is a unique identifier to allow overwriting an old callback
724 Category is a unique identifier to allow overwriting an old callback
722 with a newer callback.
725 with a newer callback.
723 """
726 """
724 self._plchangecallbacks[category] = callback
727 self._plchangecallbacks[category] = callback
725
728
726 def _writedirstate(self, st):
729 def _writedirstate(self, st):
727 # notify callbacks about parents change
730 # notify callbacks about parents change
728 if self._origpl is not None and self._origpl != self._pl:
731 if self._origpl is not None and self._origpl != self._pl:
729 for c, callback in sorted(
732 for c, callback in sorted(
730 pycompat.iteritems(self._plchangecallbacks)
733 pycompat.iteritems(self._plchangecallbacks)
731 ):
734 ):
732 callback(self, self._origpl, self._pl)
735 callback(self, self._origpl, self._pl)
733 self._origpl = None
736 self._origpl = None
734 # use the modification time of the newly created temporary file as the
737 # use the modification time of the newly created temporary file as the
735 # filesystem's notion of 'now'
738 # filesystem's notion of 'now'
736 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
739 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
737
740
738 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
741 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
739 # timestamp of each entries in dirstate, because of 'now > mtime'
742 # timestamp of each entries in dirstate, because of 'now > mtime'
740 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
743 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
741 if delaywrite > 0:
744 if delaywrite > 0:
742 # do we have any files to delay for?
745 # do we have any files to delay for?
743 for f, e in pycompat.iteritems(self._map):
746 for f, e in pycompat.iteritems(self._map):
744 if e[0] == b'n' and e[3] == now:
747 if e[0] == b'n' and e[3] == now:
745 import time # to avoid useless import
748 import time # to avoid useless import
746
749
747 # rather than sleep n seconds, sleep until the next
750 # rather than sleep n seconds, sleep until the next
748 # multiple of n seconds
751 # multiple of n seconds
749 clock = time.time()
752 clock = time.time()
750 start = int(clock) - (int(clock) % delaywrite)
753 start = int(clock) - (int(clock) % delaywrite)
751 end = start + delaywrite
754 end = start + delaywrite
752 time.sleep(end - clock)
755 time.sleep(end - clock)
753 now = end # trust our estimate that the end is near now
756 now = end # trust our estimate that the end is near now
754 break
757 break
755
758
756 self._map.write(st, now)
759 self._map.write(st, now)
757 self._lastnormaltime = 0
760 self._lastnormaltime = 0
758 self._dirty = False
761 self._dirty = False
759
762
760 def _dirignore(self, f):
763 def _dirignore(self, f):
761 if self._ignore(f):
764 if self._ignore(f):
762 return True
765 return True
763 for p in pathutil.finddirs(f):
766 for p in pathutil.finddirs(f):
764 if self._ignore(p):
767 if self._ignore(p):
765 return True
768 return True
766 return False
769 return False
767
770
768 def _ignorefiles(self):
771 def _ignorefiles(self):
769 files = []
772 files = []
770 if os.path.exists(self._join(b'.hgignore')):
773 if os.path.exists(self._join(b'.hgignore')):
771 files.append(self._join(b'.hgignore'))
774 files.append(self._join(b'.hgignore'))
772 for name, path in self._ui.configitems(b"ui"):
775 for name, path in self._ui.configitems(b"ui"):
773 if name == b'ignore' or name.startswith(b'ignore.'):
776 if name == b'ignore' or name.startswith(b'ignore.'):
774 # we need to use os.path.join here rather than self._join
777 # we need to use os.path.join here rather than self._join
775 # because path is arbitrary and user-specified
778 # because path is arbitrary and user-specified
776 files.append(os.path.join(self._rootdir, util.expandpath(path)))
779 files.append(os.path.join(self._rootdir, util.expandpath(path)))
777 return files
780 return files
778
781
779 def _ignorefileandline(self, f):
782 def _ignorefileandline(self, f):
780 files = collections.deque(self._ignorefiles())
783 files = collections.deque(self._ignorefiles())
781 visited = set()
784 visited = set()
782 while files:
785 while files:
783 i = files.popleft()
786 i = files.popleft()
784 patterns = matchmod.readpatternfile(
787 patterns = matchmod.readpatternfile(
785 i, self._ui.warn, sourceinfo=True
788 i, self._ui.warn, sourceinfo=True
786 )
789 )
787 for pattern, lineno, line in patterns:
790 for pattern, lineno, line in patterns:
788 kind, p = matchmod._patsplit(pattern, b'glob')
791 kind, p = matchmod._patsplit(pattern, b'glob')
789 if kind == b"subinclude":
792 if kind == b"subinclude":
790 if p not in visited:
793 if p not in visited:
791 files.append(p)
794 files.append(p)
792 continue
795 continue
793 m = matchmod.match(
796 m = matchmod.match(
794 self._root, b'', [], [pattern], warn=self._ui.warn
797 self._root, b'', [], [pattern], warn=self._ui.warn
795 )
798 )
796 if m(f):
799 if m(f):
797 return (i, lineno, line)
800 return (i, lineno, line)
798 visited.add(i)
801 visited.add(i)
799 return (None, -1, b"")
802 return (None, -1, b"")
800
803
801 def _walkexplicit(self, match, subrepos):
804 def _walkexplicit(self, match, subrepos):
802 """Get stat data about the files explicitly specified by match.
805 """Get stat data about the files explicitly specified by match.
803
806
804 Return a triple (results, dirsfound, dirsnotfound).
807 Return a triple (results, dirsfound, dirsnotfound).
805 - results is a mapping from filename to stat result. It also contains
808 - results is a mapping from filename to stat result. It also contains
806 listings mapping subrepos and .hg to None.
809 listings mapping subrepos and .hg to None.
807 - dirsfound is a list of files found to be directories.
810 - dirsfound is a list of files found to be directories.
808 - dirsnotfound is a list of files that the dirstate thinks are
811 - dirsnotfound is a list of files that the dirstate thinks are
809 directories and that were not found."""
812 directories and that were not found."""
810
813
811 def badtype(mode):
814 def badtype(mode):
812 kind = _(b'unknown')
815 kind = _(b'unknown')
813 if stat.S_ISCHR(mode):
816 if stat.S_ISCHR(mode):
814 kind = _(b'character device')
817 kind = _(b'character device')
815 elif stat.S_ISBLK(mode):
818 elif stat.S_ISBLK(mode):
816 kind = _(b'block device')
819 kind = _(b'block device')
817 elif stat.S_ISFIFO(mode):
820 elif stat.S_ISFIFO(mode):
818 kind = _(b'fifo')
821 kind = _(b'fifo')
819 elif stat.S_ISSOCK(mode):
822 elif stat.S_ISSOCK(mode):
820 kind = _(b'socket')
823 kind = _(b'socket')
821 elif stat.S_ISDIR(mode):
824 elif stat.S_ISDIR(mode):
822 kind = _(b'directory')
825 kind = _(b'directory')
823 return _(b'unsupported file type (type is %s)') % kind
826 return _(b'unsupported file type (type is %s)') % kind
824
827
825 badfn = match.bad
828 badfn = match.bad
826 dmap = self._map
829 dmap = self._map
827 lstat = os.lstat
830 lstat = os.lstat
828 getkind = stat.S_IFMT
831 getkind = stat.S_IFMT
829 dirkind = stat.S_IFDIR
832 dirkind = stat.S_IFDIR
830 regkind = stat.S_IFREG
833 regkind = stat.S_IFREG
831 lnkkind = stat.S_IFLNK
834 lnkkind = stat.S_IFLNK
832 join = self._join
835 join = self._join
833 dirsfound = []
836 dirsfound = []
834 foundadd = dirsfound.append
837 foundadd = dirsfound.append
835 dirsnotfound = []
838 dirsnotfound = []
836 notfoundadd = dirsnotfound.append
839 notfoundadd = dirsnotfound.append
837
840
838 if not match.isexact() and self._checkcase:
841 if not match.isexact() and self._checkcase:
839 normalize = self._normalize
842 normalize = self._normalize
840 else:
843 else:
841 normalize = None
844 normalize = None
842
845
843 files = sorted(match.files())
846 files = sorted(match.files())
844 subrepos.sort()
847 subrepos.sort()
845 i, j = 0, 0
848 i, j = 0, 0
846 while i < len(files) and j < len(subrepos):
849 while i < len(files) and j < len(subrepos):
847 subpath = subrepos[j] + b"/"
850 subpath = subrepos[j] + b"/"
848 if files[i] < subpath:
851 if files[i] < subpath:
849 i += 1
852 i += 1
850 continue
853 continue
851 while i < len(files) and files[i].startswith(subpath):
854 while i < len(files) and files[i].startswith(subpath):
852 del files[i]
855 del files[i]
853 j += 1
856 j += 1
854
857
855 if not files or b'' in files:
858 if not files or b'' in files:
856 files = [b'']
859 files = [b'']
857 # constructing the foldmap is expensive, so don't do it for the
860 # constructing the foldmap is expensive, so don't do it for the
858 # common case where files is ['']
861 # common case where files is ['']
859 normalize = None
862 normalize = None
860 results = dict.fromkeys(subrepos)
863 results = dict.fromkeys(subrepos)
861 results[b'.hg'] = None
864 results[b'.hg'] = None
862
865
863 for ff in files:
866 for ff in files:
864 if normalize:
867 if normalize:
865 nf = normalize(ff, False, True)
868 nf = normalize(ff, False, True)
866 else:
869 else:
867 nf = ff
870 nf = ff
868 if nf in results:
871 if nf in results:
869 continue
872 continue
870
873
871 try:
874 try:
872 st = lstat(join(nf))
875 st = lstat(join(nf))
873 kind = getkind(st.st_mode)
876 kind = getkind(st.st_mode)
874 if kind == dirkind:
877 if kind == dirkind:
875 if nf in dmap:
878 if nf in dmap:
876 # file replaced by dir on disk but still in dirstate
879 # file replaced by dir on disk but still in dirstate
877 results[nf] = None
880 results[nf] = None
878 foundadd((nf, ff))
881 foundadd((nf, ff))
879 elif kind == regkind or kind == lnkkind:
882 elif kind == regkind or kind == lnkkind:
880 results[nf] = st
883 results[nf] = st
881 else:
884 else:
882 badfn(ff, badtype(kind))
885 badfn(ff, badtype(kind))
883 if nf in dmap:
886 if nf in dmap:
884 results[nf] = None
887 results[nf] = None
885 except OSError as inst: # nf not found on disk - it is dirstate only
888 except OSError as inst: # nf not found on disk - it is dirstate only
886 if nf in dmap: # does it exactly match a missing file?
889 if nf in dmap: # does it exactly match a missing file?
887 results[nf] = None
890 results[nf] = None
888 else: # does it match a missing directory?
891 else: # does it match a missing directory?
889 if self._map.hasdir(nf):
892 if self._map.hasdir(nf):
890 notfoundadd(nf)
893 notfoundadd(nf)
891 else:
894 else:
892 badfn(ff, encoding.strtolocal(inst.strerror))
895 badfn(ff, encoding.strtolocal(inst.strerror))
893
896
894 # match.files() may contain explicitly-specified paths that shouldn't
897 # match.files() may contain explicitly-specified paths that shouldn't
895 # be taken; drop them from the list of files found. dirsfound/notfound
898 # be taken; drop them from the list of files found. dirsfound/notfound
896 # aren't filtered here because they will be tested later.
899 # aren't filtered here because they will be tested later.
897 if match.anypats():
900 if match.anypats():
898 for f in list(results):
901 for f in list(results):
899 if f == b'.hg' or f in subrepos:
902 if f == b'.hg' or f in subrepos:
900 # keep sentinel to disable further out-of-repo walks
903 # keep sentinel to disable further out-of-repo walks
901 continue
904 continue
902 if not match(f):
905 if not match(f):
903 del results[f]
906 del results[f]
904
907
905 # Case insensitive filesystems cannot rely on lstat() failing to detect
908 # Case insensitive filesystems cannot rely on lstat() failing to detect
906 # a case-only rename. Prune the stat object for any file that does not
909 # a case-only rename. Prune the stat object for any file that does not
907 # match the case in the filesystem, if there are multiple files that
910 # match the case in the filesystem, if there are multiple files that
908 # normalize to the same path.
911 # normalize to the same path.
909 if match.isexact() and self._checkcase:
912 if match.isexact() and self._checkcase:
910 normed = {}
913 normed = {}
911
914
912 for f, st in pycompat.iteritems(results):
915 for f, st in pycompat.iteritems(results):
913 if st is None:
916 if st is None:
914 continue
917 continue
915
918
916 nc = util.normcase(f)
919 nc = util.normcase(f)
917 paths = normed.get(nc)
920 paths = normed.get(nc)
918
921
919 if paths is None:
922 if paths is None:
920 paths = set()
923 paths = set()
921 normed[nc] = paths
924 normed[nc] = paths
922
925
923 paths.add(f)
926 paths.add(f)
924
927
925 for norm, paths in pycompat.iteritems(normed):
928 for norm, paths in pycompat.iteritems(normed):
926 if len(paths) > 1:
929 if len(paths) > 1:
927 for path in paths:
930 for path in paths:
928 folded = self._discoverpath(
931 folded = self._discoverpath(
929 path, norm, True, None, self._map.dirfoldmap
932 path, norm, True, None, self._map.dirfoldmap
930 )
933 )
931 if path != folded:
934 if path != folded:
932 results[path] = None
935 results[path] = None
933
936
934 return results, dirsfound, dirsnotfound
937 return results, dirsfound, dirsnotfound
935
938
936 def walk(self, match, subrepos, unknown, ignored, full=True):
939 def walk(self, match, subrepos, unknown, ignored, full=True):
937 """
940 """
938 Walk recursively through the directory tree, finding all files
941 Walk recursively through the directory tree, finding all files
939 matched by match.
942 matched by match.
940
943
941 If full is False, maybe skip some known-clean files.
944 If full is False, maybe skip some known-clean files.
942
945
943 Return a dict mapping filename to stat-like object (either
946 Return a dict mapping filename to stat-like object (either
944 mercurial.osutil.stat instance or return value of os.stat()).
947 mercurial.osutil.stat instance or return value of os.stat()).
945
948
946 """
949 """
947 # full is a flag that extensions that hook into walk can use -- this
950 # full is a flag that extensions that hook into walk can use -- this
948 # implementation doesn't use it at all. This satisfies the contract
951 # implementation doesn't use it at all. This satisfies the contract
949 # because we only guarantee a "maybe".
952 # because we only guarantee a "maybe".
950
953
951 if ignored:
954 if ignored:
952 ignore = util.never
955 ignore = util.never
953 dirignore = util.never
956 dirignore = util.never
954 elif unknown:
957 elif unknown:
955 ignore = self._ignore
958 ignore = self._ignore
956 dirignore = self._dirignore
959 dirignore = self._dirignore
957 else:
960 else:
958 # if not unknown and not ignored, drop dir recursion and step 2
961 # if not unknown and not ignored, drop dir recursion and step 2
959 ignore = util.always
962 ignore = util.always
960 dirignore = util.always
963 dirignore = util.always
961
964
962 matchfn = match.matchfn
965 matchfn = match.matchfn
963 matchalways = match.always()
966 matchalways = match.always()
964 matchtdir = match.traversedir
967 matchtdir = match.traversedir
965 dmap = self._map
968 dmap = self._map
966 listdir = util.listdir
969 listdir = util.listdir
967 lstat = os.lstat
970 lstat = os.lstat
968 dirkind = stat.S_IFDIR
971 dirkind = stat.S_IFDIR
969 regkind = stat.S_IFREG
972 regkind = stat.S_IFREG
970 lnkkind = stat.S_IFLNK
973 lnkkind = stat.S_IFLNK
971 join = self._join
974 join = self._join
972
975
973 exact = skipstep3 = False
976 exact = skipstep3 = False
974 if match.isexact(): # match.exact
977 if match.isexact(): # match.exact
975 exact = True
978 exact = True
976 dirignore = util.always # skip step 2
979 dirignore = util.always # skip step 2
977 elif match.prefix(): # match.match, no patterns
980 elif match.prefix(): # match.match, no patterns
978 skipstep3 = True
981 skipstep3 = True
979
982
980 if not exact and self._checkcase:
983 if not exact and self._checkcase:
981 normalize = self._normalize
984 normalize = self._normalize
982 normalizefile = self._normalizefile
985 normalizefile = self._normalizefile
983 skipstep3 = False
986 skipstep3 = False
984 else:
987 else:
985 normalize = self._normalize
988 normalize = self._normalize
986 normalizefile = None
989 normalizefile = None
987
990
988 # step 1: find all explicit files
991 # step 1: find all explicit files
989 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
992 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
990 if matchtdir:
993 if matchtdir:
991 for d in work:
994 for d in work:
992 matchtdir(d[0])
995 matchtdir(d[0])
993 for d in dirsnotfound:
996 for d in dirsnotfound:
994 matchtdir(d)
997 matchtdir(d)
995
998
996 skipstep3 = skipstep3 and not (work or dirsnotfound)
999 skipstep3 = skipstep3 and not (work or dirsnotfound)
997 work = [d for d in work if not dirignore(d[0])]
1000 work = [d for d in work if not dirignore(d[0])]
998
1001
999 # step 2: visit subdirectories
1002 # step 2: visit subdirectories
1000 def traverse(work, alreadynormed):
1003 def traverse(work, alreadynormed):
1001 wadd = work.append
1004 wadd = work.append
1002 while work:
1005 while work:
1003 tracing.counter('dirstate.walk work', len(work))
1006 tracing.counter('dirstate.walk work', len(work))
1004 nd = work.pop()
1007 nd = work.pop()
1005 visitentries = match.visitchildrenset(nd)
1008 visitentries = match.visitchildrenset(nd)
1006 if not visitentries:
1009 if not visitentries:
1007 continue
1010 continue
1008 if visitentries == b'this' or visitentries == b'all':
1011 if visitentries == b'this' or visitentries == b'all':
1009 visitentries = None
1012 visitentries = None
1010 skip = None
1013 skip = None
1011 if nd != b'':
1014 if nd != b'':
1012 skip = b'.hg'
1015 skip = b'.hg'
1013 try:
1016 try:
1014 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1017 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1015 entries = listdir(join(nd), stat=True, skip=skip)
1018 entries = listdir(join(nd), stat=True, skip=skip)
1016 except OSError as inst:
1019 except OSError as inst:
1017 if inst.errno in (errno.EACCES, errno.ENOENT):
1020 if inst.errno in (errno.EACCES, errno.ENOENT):
1018 match.bad(
1021 match.bad(
1019 self.pathto(nd), encoding.strtolocal(inst.strerror)
1022 self.pathto(nd), encoding.strtolocal(inst.strerror)
1020 )
1023 )
1021 continue
1024 continue
1022 raise
1025 raise
1023 for f, kind, st in entries:
1026 for f, kind, st in entries:
1024 # Some matchers may return files in the visitentries set,
1027 # Some matchers may return files in the visitentries set,
1025 # instead of 'this', if the matcher explicitly mentions them
1028 # instead of 'this', if the matcher explicitly mentions them
1026 # and is not an exactmatcher. This is acceptable; we do not
1029 # and is not an exactmatcher. This is acceptable; we do not
1027 # make any hard assumptions about file-or-directory below
1030 # make any hard assumptions about file-or-directory below
1028 # based on the presence of `f` in visitentries. If
1031 # based on the presence of `f` in visitentries. If
1029 # visitchildrenset returned a set, we can always skip the
1032 # visitchildrenset returned a set, we can always skip the
1030 # entries *not* in the set it provided regardless of whether
1033 # entries *not* in the set it provided regardless of whether
1031 # they're actually a file or a directory.
1034 # they're actually a file or a directory.
1032 if visitentries and f not in visitentries:
1035 if visitentries and f not in visitentries:
1033 continue
1036 continue
1034 if normalizefile:
1037 if normalizefile:
1035 # even though f might be a directory, we're only
1038 # even though f might be a directory, we're only
1036 # interested in comparing it to files currently in the
1039 # interested in comparing it to files currently in the
1037 # dmap -- therefore normalizefile is enough
1040 # dmap -- therefore normalizefile is enough
1038 nf = normalizefile(
1041 nf = normalizefile(
1039 nd and (nd + b"/" + f) or f, True, True
1042 nd and (nd + b"/" + f) or f, True, True
1040 )
1043 )
1041 else:
1044 else:
1042 nf = nd and (nd + b"/" + f) or f
1045 nf = nd and (nd + b"/" + f) or f
1043 if nf not in results:
1046 if nf not in results:
1044 if kind == dirkind:
1047 if kind == dirkind:
1045 if not ignore(nf):
1048 if not ignore(nf):
1046 if matchtdir:
1049 if matchtdir:
1047 matchtdir(nf)
1050 matchtdir(nf)
1048 wadd(nf)
1051 wadd(nf)
1049 if nf in dmap and (matchalways or matchfn(nf)):
1052 if nf in dmap and (matchalways or matchfn(nf)):
1050 results[nf] = None
1053 results[nf] = None
1051 elif kind == regkind or kind == lnkkind:
1054 elif kind == regkind or kind == lnkkind:
1052 if nf in dmap:
1055 if nf in dmap:
1053 if matchalways or matchfn(nf):
1056 if matchalways or matchfn(nf):
1054 results[nf] = st
1057 results[nf] = st
1055 elif (matchalways or matchfn(nf)) and not ignore(
1058 elif (matchalways or matchfn(nf)) and not ignore(
1056 nf
1059 nf
1057 ):
1060 ):
1058 # unknown file -- normalize if necessary
1061 # unknown file -- normalize if necessary
1059 if not alreadynormed:
1062 if not alreadynormed:
1060 nf = normalize(nf, False, True)
1063 nf = normalize(nf, False, True)
1061 results[nf] = st
1064 results[nf] = st
1062 elif nf in dmap and (matchalways or matchfn(nf)):
1065 elif nf in dmap and (matchalways or matchfn(nf)):
1063 results[nf] = None
1066 results[nf] = None
1064
1067
1065 for nd, d in work:
1068 for nd, d in work:
1066 # alreadynormed means that processwork doesn't have to do any
1069 # alreadynormed means that processwork doesn't have to do any
1067 # expensive directory normalization
1070 # expensive directory normalization
1068 alreadynormed = not normalize or nd == d
1071 alreadynormed = not normalize or nd == d
1069 traverse([d], alreadynormed)
1072 traverse([d], alreadynormed)
1070
1073
1071 for s in subrepos:
1074 for s in subrepos:
1072 del results[s]
1075 del results[s]
1073 del results[b'.hg']
1076 del results[b'.hg']
1074
1077
1075 # step 3: visit remaining files from dmap
1078 # step 3: visit remaining files from dmap
1076 if not skipstep3 and not exact:
1079 if not skipstep3 and not exact:
1077 # If a dmap file is not in results yet, it was either
1080 # If a dmap file is not in results yet, it was either
1078 # a) not matching matchfn b) ignored, c) missing, or d) under a
1081 # a) not matching matchfn b) ignored, c) missing, or d) under a
1079 # symlink directory.
1082 # symlink directory.
1080 if not results and matchalways:
1083 if not results and matchalways:
1081 visit = [f for f in dmap]
1084 visit = [f for f in dmap]
1082 else:
1085 else:
1083 visit = [f for f in dmap if f not in results and matchfn(f)]
1086 visit = [f for f in dmap if f not in results and matchfn(f)]
1084 visit.sort()
1087 visit.sort()
1085
1088
1086 if unknown:
1089 if unknown:
1087 # unknown == True means we walked all dirs under the roots
1090 # unknown == True means we walked all dirs under the roots
1088 # that wasn't ignored, and everything that matched was stat'ed
1091 # that wasn't ignored, and everything that matched was stat'ed
1089 # and is already in results.
1092 # and is already in results.
1090 # The rest must thus be ignored or under a symlink.
1093 # The rest must thus be ignored or under a symlink.
1091 audit_path = pathutil.pathauditor(self._root, cached=True)
1094 audit_path = pathutil.pathauditor(self._root, cached=True)
1092
1095
1093 for nf in iter(visit):
1096 for nf in iter(visit):
1094 # If a stat for the same file was already added with a
1097 # If a stat for the same file was already added with a
1095 # different case, don't add one for this, since that would
1098 # different case, don't add one for this, since that would
1096 # make it appear as if the file exists under both names
1099 # make it appear as if the file exists under both names
1097 # on disk.
1100 # on disk.
1098 if (
1101 if (
1099 normalizefile
1102 normalizefile
1100 and normalizefile(nf, True, True) in results
1103 and normalizefile(nf, True, True) in results
1101 ):
1104 ):
1102 results[nf] = None
1105 results[nf] = None
1103 # Report ignored items in the dmap as long as they are not
1106 # Report ignored items in the dmap as long as they are not
1104 # under a symlink directory.
1107 # under a symlink directory.
1105 elif audit_path.check(nf):
1108 elif audit_path.check(nf):
1106 try:
1109 try:
1107 results[nf] = lstat(join(nf))
1110 results[nf] = lstat(join(nf))
1108 # file was just ignored, no links, and exists
1111 # file was just ignored, no links, and exists
1109 except OSError:
1112 except OSError:
1110 # file doesn't exist
1113 # file doesn't exist
1111 results[nf] = None
1114 results[nf] = None
1112 else:
1115 else:
1113 # It's either missing or under a symlink directory
1116 # It's either missing or under a symlink directory
1114 # which we in this case report as missing
1117 # which we in this case report as missing
1115 results[nf] = None
1118 results[nf] = None
1116 else:
1119 else:
1117 # We may not have walked the full directory tree above,
1120 # We may not have walked the full directory tree above,
1118 # so stat and check everything we missed.
1121 # so stat and check everything we missed.
1119 iv = iter(visit)
1122 iv = iter(visit)
1120 for st in util.statfiles([join(i) for i in visit]):
1123 for st in util.statfiles([join(i) for i in visit]):
1121 results[next(iv)] = st
1124 results[next(iv)] = st
1122 return results
1125 return results
1123
1126
1124 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1127 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1125 # Force Rayon (Rust parallelism library) to respect the number of
1128 # Force Rayon (Rust parallelism library) to respect the number of
1126 # workers. This is a temporary workaround until Rust code knows
1129 # workers. This is a temporary workaround until Rust code knows
1127 # how to read the config file.
1130 # how to read the config file.
1128 numcpus = self._ui.configint(b"worker", b"numcpus")
1131 numcpus = self._ui.configint(b"worker", b"numcpus")
1129 if numcpus is not None:
1132 if numcpus is not None:
1130 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1133 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1131
1134
1132 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1135 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1133 if not workers_enabled:
1136 if not workers_enabled:
1134 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1137 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1135
1138
1136 (
1139 (
1137 lookup,
1140 lookup,
1138 modified,
1141 modified,
1139 added,
1142 added,
1140 removed,
1143 removed,
1141 deleted,
1144 deleted,
1142 clean,
1145 clean,
1143 ignored,
1146 ignored,
1144 unknown,
1147 unknown,
1145 warnings,
1148 warnings,
1146 bad,
1149 bad,
1147 traversed,
1150 traversed,
1148 dirty,
1151 dirty,
1149 ) = rustmod.status(
1152 ) = rustmod.status(
1150 self._map._rustmap,
1153 self._map._rustmap,
1151 matcher,
1154 matcher,
1152 self._rootdir,
1155 self._rootdir,
1153 self._ignorefiles(),
1156 self._ignorefiles(),
1154 self._checkexec,
1157 self._checkexec,
1155 self._lastnormaltime,
1158 self._lastnormaltime,
1156 bool(list_clean),
1159 bool(list_clean),
1157 bool(list_ignored),
1160 bool(list_ignored),
1158 bool(list_unknown),
1161 bool(list_unknown),
1159 bool(matcher.traversedir),
1162 bool(matcher.traversedir),
1160 )
1163 )
1161
1164
1162 self._dirty |= dirty
1165 self._dirty |= dirty
1163
1166
1164 if matcher.traversedir:
1167 if matcher.traversedir:
1165 for dir in traversed:
1168 for dir in traversed:
1166 matcher.traversedir(dir)
1169 matcher.traversedir(dir)
1167
1170
1168 if self._ui.warn:
1171 if self._ui.warn:
1169 for item in warnings:
1172 for item in warnings:
1170 if isinstance(item, tuple):
1173 if isinstance(item, tuple):
1171 file_path, syntax = item
1174 file_path, syntax = item
1172 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1175 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1173 file_path,
1176 file_path,
1174 syntax,
1177 syntax,
1175 )
1178 )
1176 self._ui.warn(msg)
1179 self._ui.warn(msg)
1177 else:
1180 else:
1178 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1181 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1179 self._ui.warn(
1182 self._ui.warn(
1180 msg
1183 msg
1181 % (
1184 % (
1182 pathutil.canonpath(
1185 pathutil.canonpath(
1183 self._rootdir, self._rootdir, item
1186 self._rootdir, self._rootdir, item
1184 ),
1187 ),
1185 b"No such file or directory",
1188 b"No such file or directory",
1186 )
1189 )
1187 )
1190 )
1188
1191
1189 for (fn, message) in bad:
1192 for (fn, message) in bad:
1190 matcher.bad(fn, encoding.strtolocal(message))
1193 matcher.bad(fn, encoding.strtolocal(message))
1191
1194
1192 status = scmutil.status(
1195 status = scmutil.status(
1193 modified=modified,
1196 modified=modified,
1194 added=added,
1197 added=added,
1195 removed=removed,
1198 removed=removed,
1196 deleted=deleted,
1199 deleted=deleted,
1197 unknown=unknown,
1200 unknown=unknown,
1198 ignored=ignored,
1201 ignored=ignored,
1199 clean=clean,
1202 clean=clean,
1200 )
1203 )
1201 return (lookup, status)
1204 return (lookup, status)
1202
1205
1203 def status(self, match, subrepos, ignored, clean, unknown):
1206 def status(self, match, subrepos, ignored, clean, unknown):
1204 """Determine the status of the working copy relative to the
1207 """Determine the status of the working copy relative to the
1205 dirstate and return a pair of (unsure, status), where status is of type
1208 dirstate and return a pair of (unsure, status), where status is of type
1206 scmutil.status and:
1209 scmutil.status and:
1207
1210
1208 unsure:
1211 unsure:
1209 files that might have been modified since the dirstate was
1212 files that might have been modified since the dirstate was
1210 written, but need to be read to be sure (size is the same
1213 written, but need to be read to be sure (size is the same
1211 but mtime differs)
1214 but mtime differs)
1212 status.modified:
1215 status.modified:
1213 files that have definitely been modified since the dirstate
1216 files that have definitely been modified since the dirstate
1214 was written (different size or mode)
1217 was written (different size or mode)
1215 status.clean:
1218 status.clean:
1216 files that have definitely not been modified since the
1219 files that have definitely not been modified since the
1217 dirstate was written
1220 dirstate was written
1218 """
1221 """
1219 listignored, listclean, listunknown = ignored, clean, unknown
1222 listignored, listclean, listunknown = ignored, clean, unknown
1220 lookup, modified, added, unknown, ignored = [], [], [], [], []
1223 lookup, modified, added, unknown, ignored = [], [], [], [], []
1221 removed, deleted, clean = [], [], []
1224 removed, deleted, clean = [], [], []
1222
1225
1223 dmap = self._map
1226 dmap = self._map
1224 dmap.preload()
1227 dmap.preload()
1225
1228
1226 use_rust = True
1229 use_rust = True
1227
1230
1228 allowed_matchers = (
1231 allowed_matchers = (
1229 matchmod.alwaysmatcher,
1232 matchmod.alwaysmatcher,
1230 matchmod.exactmatcher,
1233 matchmod.exactmatcher,
1231 matchmod.includematcher,
1234 matchmod.includematcher,
1232 )
1235 )
1233
1236
1234 if rustmod is None:
1237 if rustmod is None:
1235 use_rust = False
1238 use_rust = False
1236 elif self._checkcase:
1239 elif self._checkcase:
1237 # Case-insensitive filesystems are not handled yet
1240 # Case-insensitive filesystems are not handled yet
1238 use_rust = False
1241 use_rust = False
1239 elif subrepos:
1242 elif subrepos:
1240 use_rust = False
1243 use_rust = False
1241 elif sparse.enabled:
1244 elif sparse.enabled:
1242 use_rust = False
1245 use_rust = False
1243 elif not isinstance(match, allowed_matchers):
1246 elif not isinstance(match, allowed_matchers):
1244 # Some matchers have yet to be implemented
1247 # Some matchers have yet to be implemented
1245 use_rust = False
1248 use_rust = False
1246
1249
1247 if use_rust:
1250 if use_rust:
1248 try:
1251 try:
1249 return self._rust_status(
1252 return self._rust_status(
1250 match, listclean, listignored, listunknown
1253 match, listclean, listignored, listunknown
1251 )
1254 )
1252 except rustmod.FallbackError:
1255 except rustmod.FallbackError:
1253 pass
1256 pass
1254
1257
1255 def noop(f):
1258 def noop(f):
1256 pass
1259 pass
1257
1260
1258 dcontains = dmap.__contains__
1261 dcontains = dmap.__contains__
1259 dget = dmap.__getitem__
1262 dget = dmap.__getitem__
1260 ladd = lookup.append # aka "unsure"
1263 ladd = lookup.append # aka "unsure"
1261 madd = modified.append
1264 madd = modified.append
1262 aadd = added.append
1265 aadd = added.append
1263 uadd = unknown.append if listunknown else noop
1266 uadd = unknown.append if listunknown else noop
1264 iadd = ignored.append if listignored else noop
1267 iadd = ignored.append if listignored else noop
1265 radd = removed.append
1268 radd = removed.append
1266 dadd = deleted.append
1269 dadd = deleted.append
1267 cadd = clean.append if listclean else noop
1270 cadd = clean.append if listclean else noop
1268 mexact = match.exact
1271 mexact = match.exact
1269 dirignore = self._dirignore
1272 dirignore = self._dirignore
1270 checkexec = self._checkexec
1273 checkexec = self._checkexec
1271 copymap = self._map.copymap
1274 copymap = self._map.copymap
1272 lastnormaltime = self._lastnormaltime
1275 lastnormaltime = self._lastnormaltime
1273
1276
1274 # We need to do full walks when either
1277 # We need to do full walks when either
1275 # - we're listing all clean files, or
1278 # - we're listing all clean files, or
1276 # - match.traversedir does something, because match.traversedir should
1279 # - match.traversedir does something, because match.traversedir should
1277 # be called for every dir in the working dir
1280 # be called for every dir in the working dir
1278 full = listclean or match.traversedir is not None
1281 full = listclean or match.traversedir is not None
1279 for fn, st in pycompat.iteritems(
1282 for fn, st in pycompat.iteritems(
1280 self.walk(match, subrepos, listunknown, listignored, full=full)
1283 self.walk(match, subrepos, listunknown, listignored, full=full)
1281 ):
1284 ):
1282 if not dcontains(fn):
1285 if not dcontains(fn):
1283 if (listignored or mexact(fn)) and dirignore(fn):
1286 if (listignored or mexact(fn)) and dirignore(fn):
1284 if listignored:
1287 if listignored:
1285 iadd(fn)
1288 iadd(fn)
1286 else:
1289 else:
1287 uadd(fn)
1290 uadd(fn)
1288 continue
1291 continue
1289
1292
1290 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1293 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1291 # written like that for performance reasons. dmap[fn] is not a
1294 # written like that for performance reasons. dmap[fn] is not a
1292 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1295 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1293 # opcode has fast paths when the value to be unpacked is a tuple or
1296 # opcode has fast paths when the value to be unpacked is a tuple or
1294 # a list, but falls back to creating a full-fledged iterator in
1297 # a list, but falls back to creating a full-fledged iterator in
1295 # general. That is much slower than simply accessing and storing the
1298 # general. That is much slower than simply accessing and storing the
1296 # tuple members one by one.
1299 # tuple members one by one.
1297 t = dget(fn)
1300 t = dget(fn)
1298 state = t[0]
1301 state = t[0]
1299 mode = t[1]
1302 mode = t[1]
1300 size = t[2]
1303 size = t[2]
1301 time = t[3]
1304 time = t[3]
1302
1305
1303 if not st and state in b"nma":
1306 if not st and state in b"nma":
1304 dadd(fn)
1307 dadd(fn)
1305 elif state == b'n':
1308 elif state == b'n':
1306 if (
1309 if (
1307 size >= 0
1310 size >= 0
1308 and (
1311 and (
1309 (size != st.st_size and size != st.st_size & _rangemask)
1312 (size != st.st_size and size != st.st_size & _rangemask)
1310 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1313 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1311 )
1314 )
1312 or size == FROM_P2 # other parent
1315 or size == FROM_P2 # other parent
1313 or fn in copymap
1316 or fn in copymap
1314 ):
1317 ):
1315 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1318 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1316 # issue6456: Size returned may be longer due to
1319 # issue6456: Size returned may be longer due to
1317 # encryption on EXT-4 fscrypt, undecided.
1320 # encryption on EXT-4 fscrypt, undecided.
1318 ladd(fn)
1321 ladd(fn)
1319 else:
1322 else:
1320 madd(fn)
1323 madd(fn)
1321 elif (
1324 elif (
1322 time != st[stat.ST_MTIME]
1325 time != st[stat.ST_MTIME]
1323 and time != st[stat.ST_MTIME] & _rangemask
1326 and time != st[stat.ST_MTIME] & _rangemask
1324 ):
1327 ):
1325 ladd(fn)
1328 ladd(fn)
1326 elif st[stat.ST_MTIME] == lastnormaltime:
1329 elif st[stat.ST_MTIME] == lastnormaltime:
1327 # fn may have just been marked as normal and it may have
1330 # fn may have just been marked as normal and it may have
1328 # changed in the same second without changing its size.
1331 # changed in the same second without changing its size.
1329 # This can happen if we quickly do multiple commits.
1332 # This can happen if we quickly do multiple commits.
1330 # Force lookup, so we don't miss such a racy file change.
1333 # Force lookup, so we don't miss such a racy file change.
1331 ladd(fn)
1334 ladd(fn)
1332 elif listclean:
1335 elif listclean:
1333 cadd(fn)
1336 cadd(fn)
1334 elif state == b'm':
1337 elif state == b'm':
1335 madd(fn)
1338 madd(fn)
1336 elif state == b'a':
1339 elif state == b'a':
1337 aadd(fn)
1340 aadd(fn)
1338 elif state == b'r':
1341 elif state == b'r':
1339 radd(fn)
1342 radd(fn)
1340 status = scmutil.status(
1343 status = scmutil.status(
1341 modified, added, removed, deleted, unknown, ignored, clean
1344 modified, added, removed, deleted, unknown, ignored, clean
1342 )
1345 )
1343 return (lookup, status)
1346 return (lookup, status)
1344
1347
1345 def matches(self, match):
1348 def matches(self, match):
1346 """
1349 """
1347 return files in the dirstate (in whatever state) filtered by match
1350 return files in the dirstate (in whatever state) filtered by match
1348 """
1351 """
1349 dmap = self._map
1352 dmap = self._map
1350 if rustmod is not None:
1353 if rustmod is not None:
1351 dmap = self._map._rustmap
1354 dmap = self._map._rustmap
1352
1355
1353 if match.always():
1356 if match.always():
1354 return dmap.keys()
1357 return dmap.keys()
1355 files = match.files()
1358 files = match.files()
1356 if match.isexact():
1359 if match.isexact():
1357 # fast path -- filter the other way around, since typically files is
1360 # fast path -- filter the other way around, since typically files is
1358 # much smaller than dmap
1361 # much smaller than dmap
1359 return [f for f in files if f in dmap]
1362 return [f for f in files if f in dmap]
1360 if match.prefix() and all(fn in dmap for fn in files):
1363 if match.prefix() and all(fn in dmap for fn in files):
1361 # fast path -- all the values are known to be files, so just return
1364 # fast path -- all the values are known to be files, so just return
1362 # that
1365 # that
1363 return list(files)
1366 return list(files)
1364 return [f for f in dmap if match(f)]
1367 return [f for f in dmap if match(f)]
1365
1368
1366 def _actualfilename(self, tr):
1369 def _actualfilename(self, tr):
1367 if tr:
1370 if tr:
1368 return self._pendingfilename
1371 return self._pendingfilename
1369 else:
1372 else:
1370 return self._filename
1373 return self._filename
1371
1374
1372 def savebackup(self, tr, backupname):
1375 def savebackup(self, tr, backupname):
1373 '''Save current dirstate into backup file'''
1376 '''Save current dirstate into backup file'''
1374 filename = self._actualfilename(tr)
1377 filename = self._actualfilename(tr)
1375 assert backupname != filename
1378 assert backupname != filename
1376
1379
1377 # use '_writedirstate' instead of 'write' to write changes certainly,
1380 # use '_writedirstate' instead of 'write' to write changes certainly,
1378 # because the latter omits writing out if transaction is running.
1381 # because the latter omits writing out if transaction is running.
1379 # output file will be used to create backup of dirstate at this point.
1382 # output file will be used to create backup of dirstate at this point.
1380 if self._dirty or not self._opener.exists(filename):
1383 if self._dirty or not self._opener.exists(filename):
1381 self._writedirstate(
1384 self._writedirstate(
1382 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1385 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1383 )
1386 )
1384
1387
1385 if tr:
1388 if tr:
1386 # ensure that subsequent tr.writepending returns True for
1389 # ensure that subsequent tr.writepending returns True for
1387 # changes written out above, even if dirstate is never
1390 # changes written out above, even if dirstate is never
1388 # changed after this
1391 # changed after this
1389 tr.addfilegenerator(
1392 tr.addfilegenerator(
1390 b'dirstate',
1393 b'dirstate',
1391 (self._filename,),
1394 (self._filename,),
1392 self._writedirstate,
1395 self._writedirstate,
1393 location=b'plain',
1396 location=b'plain',
1394 )
1397 )
1395
1398
1396 # ensure that pending file written above is unlinked at
1399 # ensure that pending file written above is unlinked at
1397 # failure, even if tr.writepending isn't invoked until the
1400 # failure, even if tr.writepending isn't invoked until the
1398 # end of this transaction
1401 # end of this transaction
1399 tr.registertmp(filename, location=b'plain')
1402 tr.registertmp(filename, location=b'plain')
1400
1403
1401 self._opener.tryunlink(backupname)
1404 self._opener.tryunlink(backupname)
1402 # hardlink backup is okay because _writedirstate is always called
1405 # hardlink backup is okay because _writedirstate is always called
1403 # with an "atomictemp=True" file.
1406 # with an "atomictemp=True" file.
1404 util.copyfile(
1407 util.copyfile(
1405 self._opener.join(filename),
1408 self._opener.join(filename),
1406 self._opener.join(backupname),
1409 self._opener.join(backupname),
1407 hardlink=True,
1410 hardlink=True,
1408 )
1411 )
1409
1412
1410 def restorebackup(self, tr, backupname):
1413 def restorebackup(self, tr, backupname):
1411 '''Restore dirstate by backup file'''
1414 '''Restore dirstate by backup file'''
1412 # this "invalidate()" prevents "wlock.release()" from writing
1415 # this "invalidate()" prevents "wlock.release()" from writing
1413 # changes of dirstate out after restoring from backup file
1416 # changes of dirstate out after restoring from backup file
1414 self.invalidate()
1417 self.invalidate()
1415 filename = self._actualfilename(tr)
1418 filename = self._actualfilename(tr)
1416 o = self._opener
1419 o = self._opener
1417 if util.samefile(o.join(backupname), o.join(filename)):
1420 if util.samefile(o.join(backupname), o.join(filename)):
1418 o.unlink(backupname)
1421 o.unlink(backupname)
1419 else:
1422 else:
1420 o.rename(backupname, filename, checkambig=True)
1423 o.rename(backupname, filename, checkambig=True)
1421
1424
1422 def clearbackup(self, tr, backupname):
1425 def clearbackup(self, tr, backupname):
1423 '''Clear backup file'''
1426 '''Clear backup file'''
1424 self._opener.unlink(backupname)
1427 self._opener.unlink(backupname)
1425
1428
1426
1429
1427 class dirstatemap(object):
1430 class dirstatemap(object):
1428 """Map encapsulating the dirstate's contents.
1431 """Map encapsulating the dirstate's contents.
1429
1432
1430 The dirstate contains the following state:
1433 The dirstate contains the following state:
1431
1434
1432 - `identity` is the identity of the dirstate file, which can be used to
1435 - `identity` is the identity of the dirstate file, which can be used to
1433 detect when changes have occurred to the dirstate file.
1436 detect when changes have occurred to the dirstate file.
1434
1437
1435 - `parents` is a pair containing the parents of the working copy. The
1438 - `parents` is a pair containing the parents of the working copy. The
1436 parents are updated by calling `setparents`.
1439 parents are updated by calling `setparents`.
1437
1440
1438 - the state map maps filenames to tuples of (state, mode, size, mtime),
1441 - the state map maps filenames to tuples of (state, mode, size, mtime),
1439 where state is a single character representing 'normal', 'added',
1442 where state is a single character representing 'normal', 'added',
1440 'removed', or 'merged'. It is read by treating the dirstate as a
1443 'removed', or 'merged'. It is read by treating the dirstate as a
1441 dict. File state is updated by calling the `addfile`, `removefile` and
1444 dict. File state is updated by calling the `addfile`, `removefile` and
1442 `dropfile` methods.
1445 `dropfile` methods.
1443
1446
1444 - `copymap` maps destination filenames to their source filename.
1447 - `copymap` maps destination filenames to their source filename.
1445
1448
1446 The dirstate also provides the following views onto the state:
1449 The dirstate also provides the following views onto the state:
1447
1450
1448 - `nonnormalset` is a set of the filenames that have state other
1451 - `nonnormalset` is a set of the filenames that have state other
1449 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1452 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1450
1453
1451 - `otherparentset` is a set of the filenames that are marked as coming
1454 - `otherparentset` is a set of the filenames that are marked as coming
1452 from the second parent when the dirstate is currently being merged.
1455 from the second parent when the dirstate is currently being merged.
1453
1456
1454 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1457 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1455 form that they appear as in the dirstate.
1458 form that they appear as in the dirstate.
1456
1459
1457 - `dirfoldmap` is a dict mapping normalized directory names to the
1460 - `dirfoldmap` is a dict mapping normalized directory names to the
1458 denormalized form that they appear as in the dirstate.
1461 denormalized form that they appear as in the dirstate.
1459 """
1462 """
1460
1463
1461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1464 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1462 self._ui = ui
1465 self._ui = ui
1463 self._opener = opener
1466 self._opener = opener
1464 self._root = root
1467 self._root = root
1465 self._filename = b'dirstate'
1468 self._filename = b'dirstate'
1466 self._nodelen = 20
1469 self._nodelen = 20
1467 self._nodeconstants = nodeconstants
1470 self._nodeconstants = nodeconstants
1468 assert (
1471 assert (
1469 not use_dirstate_v2
1472 not use_dirstate_v2
1470 ), "should have detected unsupported requirement"
1473 ), "should have detected unsupported requirement"
1471
1474
1472 self._parents = None
1475 self._parents = None
1473 self._dirtyparents = False
1476 self._dirtyparents = False
1474
1477
1475 # for consistent view between _pl() and _read() invocations
1478 # for consistent view between _pl() and _read() invocations
1476 self._pendingmode = None
1479 self._pendingmode = None
1477
1480
1478 @propertycache
1481 @propertycache
1479 def _map(self):
1482 def _map(self):
1480 self._map = {}
1483 self._map = {}
1481 self.read()
1484 self.read()
1482 return self._map
1485 return self._map
1483
1486
1484 @propertycache
1487 @propertycache
1485 def copymap(self):
1488 def copymap(self):
1486 self.copymap = {}
1489 self.copymap = {}
1487 self._map
1490 self._map
1488 return self.copymap
1491 return self.copymap
1489
1492
1490 def directories(self):
1493 def directories(self):
1491 # Rust / dirstate-v2 only
1494 # Rust / dirstate-v2 only
1492 return []
1495 return []
1493
1496
1494 def clear(self):
1497 def clear(self):
1495 self._map.clear()
1498 self._map.clear()
1496 self.copymap.clear()
1499 self.copymap.clear()
1497 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1500 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1498 util.clearcachedproperty(self, b"_dirs")
1501 util.clearcachedproperty(self, b"_dirs")
1499 util.clearcachedproperty(self, b"_alldirs")
1502 util.clearcachedproperty(self, b"_alldirs")
1500 util.clearcachedproperty(self, b"filefoldmap")
1503 util.clearcachedproperty(self, b"filefoldmap")
1501 util.clearcachedproperty(self, b"dirfoldmap")
1504 util.clearcachedproperty(self, b"dirfoldmap")
1502 util.clearcachedproperty(self, b"nonnormalset")
1505 util.clearcachedproperty(self, b"nonnormalset")
1503 util.clearcachedproperty(self, b"otherparentset")
1506 util.clearcachedproperty(self, b"otherparentset")
1504
1507
1505 def items(self):
1508 def items(self):
1506 return pycompat.iteritems(self._map)
1509 return pycompat.iteritems(self._map)
1507
1510
1508 # forward for python2,3 compat
1511 # forward for python2,3 compat
1509 iteritems = items
1512 iteritems = items
1510
1513
1511 def __len__(self):
1514 def __len__(self):
1512 return len(self._map)
1515 return len(self._map)
1513
1516
1514 def __iter__(self):
1517 def __iter__(self):
1515 return iter(self._map)
1518 return iter(self._map)
1516
1519
1517 def get(self, key, default=None):
1520 def get(self, key, default=None):
1518 return self._map.get(key, default)
1521 return self._map.get(key, default)
1519
1522
1520 def __contains__(self, key):
1523 def __contains__(self, key):
1521 return key in self._map
1524 return key in self._map
1522
1525
1523 def __getitem__(self, key):
1526 def __getitem__(self, key):
1524 return self._map[key]
1527 return self._map[key]
1525
1528
1526 def keys(self):
1529 def keys(self):
1527 return self._map.keys()
1530 return self._map.keys()
1528
1531
1529 def preload(self):
1532 def preload(self):
1530 """Loads the underlying data, if it's not already loaded"""
1533 """Loads the underlying data, if it's not already loaded"""
1531 self._map
1534 self._map
1532
1535
1533 def addfile(self, f, oldstate, state, mode, size, mtime):
1536 def addfile(self, f, oldstate, state, mode, size, mtime):
1534 """Add a tracked file to the dirstate."""
1537 """Add a tracked file to the dirstate."""
1535 if oldstate in b"?r" and "_dirs" in self.__dict__:
1538 if oldstate in b"?r" and "_dirs" in self.__dict__:
1536 self._dirs.addpath(f)
1539 self._dirs.addpath(f)
1537 if oldstate == b"?" and "_alldirs" in self.__dict__:
1540 if oldstate == b"?" and "_alldirs" in self.__dict__:
1538 self._alldirs.addpath(f)
1541 self._alldirs.addpath(f)
1539 self._map[f] = dirstatetuple(state, mode, size, mtime)
1542 self._map[f] = dirstatetuple(state, mode, size, mtime)
1540 if state != b'n' or mtime == -1:
1543 if state != b'n' or mtime == AMBIGUOUS_TIME:
1541 self.nonnormalset.add(f)
1544 self.nonnormalset.add(f)
1542 if size == FROM_P2:
1545 if size == FROM_P2:
1543 self.otherparentset.add(f)
1546 self.otherparentset.add(f)
1544
1547
1545 def removefile(self, f, oldstate, size):
1548 def removefile(self, f, oldstate, size):
1546 """
1549 """
1547 Mark a file as removed in the dirstate.
1550 Mark a file as removed in the dirstate.
1548
1551
1549 The `size` parameter is used to store sentinel values that indicate
1552 The `size` parameter is used to store sentinel values that indicate
1550 the file's previous state. In the future, we should refactor this
1553 the file's previous state. In the future, we should refactor this
1551 to be more explicit about what that state is.
1554 to be more explicit about what that state is.
1552 """
1555 """
1553 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1556 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1554 self._dirs.delpath(f)
1557 self._dirs.delpath(f)
1555 if oldstate == b"?" and "_alldirs" in self.__dict__:
1558 if oldstate == b"?" and "_alldirs" in self.__dict__:
1556 self._alldirs.addpath(f)
1559 self._alldirs.addpath(f)
1557 if "filefoldmap" in self.__dict__:
1560 if "filefoldmap" in self.__dict__:
1558 normed = util.normcase(f)
1561 normed = util.normcase(f)
1559 self.filefoldmap.pop(normed, None)
1562 self.filefoldmap.pop(normed, None)
1560 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1563 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1561 self.nonnormalset.add(f)
1564 self.nonnormalset.add(f)
1562
1565
1563 def dropfile(self, f, oldstate):
1566 def dropfile(self, f, oldstate):
1564 """
1567 """
1565 Remove a file from the dirstate. Returns True if the file was
1568 Remove a file from the dirstate. Returns True if the file was
1566 previously recorded.
1569 previously recorded.
1567 """
1570 """
1568 exists = self._map.pop(f, None) is not None
1571 exists = self._map.pop(f, None) is not None
1569 if exists:
1572 if exists:
1570 if oldstate != b"r" and "_dirs" in self.__dict__:
1573 if oldstate != b"r" and "_dirs" in self.__dict__:
1571 self._dirs.delpath(f)
1574 self._dirs.delpath(f)
1572 if "_alldirs" in self.__dict__:
1575 if "_alldirs" in self.__dict__:
1573 self._alldirs.delpath(f)
1576 self._alldirs.delpath(f)
1574 if "filefoldmap" in self.__dict__:
1577 if "filefoldmap" in self.__dict__:
1575 normed = util.normcase(f)
1578 normed = util.normcase(f)
1576 self.filefoldmap.pop(normed, None)
1579 self.filefoldmap.pop(normed, None)
1577 self.nonnormalset.discard(f)
1580 self.nonnormalset.discard(f)
1578 return exists
1581 return exists
1579
1582
1580 def clearambiguoustimes(self, files, now):
1583 def clearambiguoustimes(self, files, now):
1581 for f in files:
1584 for f in files:
1582 e = self.get(f)
1585 e = self.get(f)
1583 if e is not None and e[0] == b'n' and e[3] == now:
1586 if e is not None and e[0] == b'n' and e[3] == now:
1584 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1587 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1585 self.nonnormalset.add(f)
1588 self.nonnormalset.add(f)
1586
1589
1587 def nonnormalentries(self):
1590 def nonnormalentries(self):
1588 '''Compute the nonnormal dirstate entries from the dmap'''
1591 '''Compute the nonnormal dirstate entries from the dmap'''
1589 try:
1592 try:
1590 return parsers.nonnormalotherparententries(self._map)
1593 return parsers.nonnormalotherparententries(self._map)
1591 except AttributeError:
1594 except AttributeError:
1592 nonnorm = set()
1595 nonnorm = set()
1593 otherparent = set()
1596 otherparent = set()
1594 for fname, e in pycompat.iteritems(self._map):
1597 for fname, e in pycompat.iteritems(self._map):
1595 if e[0] != b'n' or e[3] == -1:
1598 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1596 nonnorm.add(fname)
1599 nonnorm.add(fname)
1597 if e[0] == b'n' and e[2] == FROM_P2:
1600 if e[0] == b'n' and e[2] == FROM_P2:
1598 otherparent.add(fname)
1601 otherparent.add(fname)
1599 return nonnorm, otherparent
1602 return nonnorm, otherparent
1600
1603
1601 @propertycache
1604 @propertycache
1602 def filefoldmap(self):
1605 def filefoldmap(self):
1603 """Returns a dictionary mapping normalized case paths to their
1606 """Returns a dictionary mapping normalized case paths to their
1604 non-normalized versions.
1607 non-normalized versions.
1605 """
1608 """
1606 try:
1609 try:
1607 makefilefoldmap = parsers.make_file_foldmap
1610 makefilefoldmap = parsers.make_file_foldmap
1608 except AttributeError:
1611 except AttributeError:
1609 pass
1612 pass
1610 else:
1613 else:
1611 return makefilefoldmap(
1614 return makefilefoldmap(
1612 self._map, util.normcasespec, util.normcasefallback
1615 self._map, util.normcasespec, util.normcasefallback
1613 )
1616 )
1614
1617
1615 f = {}
1618 f = {}
1616 normcase = util.normcase
1619 normcase = util.normcase
1617 for name, s in pycompat.iteritems(self._map):
1620 for name, s in pycompat.iteritems(self._map):
1618 if s[0] != b'r':
1621 if s[0] != b'r':
1619 f[normcase(name)] = name
1622 f[normcase(name)] = name
1620 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1623 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1621 return f
1624 return f
1622
1625
1623 def hastrackeddir(self, d):
1626 def hastrackeddir(self, d):
1624 """
1627 """
1625 Returns True if the dirstate contains a tracked (not removed) file
1628 Returns True if the dirstate contains a tracked (not removed) file
1626 in this directory.
1629 in this directory.
1627 """
1630 """
1628 return d in self._dirs
1631 return d in self._dirs
1629
1632
1630 def hasdir(self, d):
1633 def hasdir(self, d):
1631 """
1634 """
1632 Returns True if the dirstate contains a file (tracked or removed)
1635 Returns True if the dirstate contains a file (tracked or removed)
1633 in this directory.
1636 in this directory.
1634 """
1637 """
1635 return d in self._alldirs
1638 return d in self._alldirs
1636
1639
1637 @propertycache
1640 @propertycache
1638 def _dirs(self):
1641 def _dirs(self):
1639 return pathutil.dirs(self._map, b'r')
1642 return pathutil.dirs(self._map, b'r')
1640
1643
1641 @propertycache
1644 @propertycache
1642 def _alldirs(self):
1645 def _alldirs(self):
1643 return pathutil.dirs(self._map)
1646 return pathutil.dirs(self._map)
1644
1647
1645 def _opendirstatefile(self):
1648 def _opendirstatefile(self):
1646 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1649 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1647 if self._pendingmode is not None and self._pendingmode != mode:
1650 if self._pendingmode is not None and self._pendingmode != mode:
1648 fp.close()
1651 fp.close()
1649 raise error.Abort(
1652 raise error.Abort(
1650 _(b'working directory state may be changed parallelly')
1653 _(b'working directory state may be changed parallelly')
1651 )
1654 )
1652 self._pendingmode = mode
1655 self._pendingmode = mode
1653 return fp
1656 return fp
1654
1657
1655 def parents(self):
1658 def parents(self):
1656 if not self._parents:
1659 if not self._parents:
1657 try:
1660 try:
1658 fp = self._opendirstatefile()
1661 fp = self._opendirstatefile()
1659 st = fp.read(2 * self._nodelen)
1662 st = fp.read(2 * self._nodelen)
1660 fp.close()
1663 fp.close()
1661 except IOError as err:
1664 except IOError as err:
1662 if err.errno != errno.ENOENT:
1665 if err.errno != errno.ENOENT:
1663 raise
1666 raise
1664 # File doesn't exist, so the current state is empty
1667 # File doesn't exist, so the current state is empty
1665 st = b''
1668 st = b''
1666
1669
1667 l = len(st)
1670 l = len(st)
1668 if l == self._nodelen * 2:
1671 if l == self._nodelen * 2:
1669 self._parents = (
1672 self._parents = (
1670 st[: self._nodelen],
1673 st[: self._nodelen],
1671 st[self._nodelen : 2 * self._nodelen],
1674 st[self._nodelen : 2 * self._nodelen],
1672 )
1675 )
1673 elif l == 0:
1676 elif l == 0:
1674 self._parents = (
1677 self._parents = (
1675 self._nodeconstants.nullid,
1678 self._nodeconstants.nullid,
1676 self._nodeconstants.nullid,
1679 self._nodeconstants.nullid,
1677 )
1680 )
1678 else:
1681 else:
1679 raise error.Abort(
1682 raise error.Abort(
1680 _(b'working directory state appears damaged!')
1683 _(b'working directory state appears damaged!')
1681 )
1684 )
1682
1685
1683 return self._parents
1686 return self._parents
1684
1687
1685 def setparents(self, p1, p2):
1688 def setparents(self, p1, p2):
1686 self._parents = (p1, p2)
1689 self._parents = (p1, p2)
1687 self._dirtyparents = True
1690 self._dirtyparents = True
1688
1691
1689 def read(self):
1692 def read(self):
1690 # ignore HG_PENDING because identity is used only for writing
1693 # ignore HG_PENDING because identity is used only for writing
1691 self.identity = util.filestat.frompath(
1694 self.identity = util.filestat.frompath(
1692 self._opener.join(self._filename)
1695 self._opener.join(self._filename)
1693 )
1696 )
1694
1697
1695 try:
1698 try:
1696 fp = self._opendirstatefile()
1699 fp = self._opendirstatefile()
1697 try:
1700 try:
1698 st = fp.read()
1701 st = fp.read()
1699 finally:
1702 finally:
1700 fp.close()
1703 fp.close()
1701 except IOError as err:
1704 except IOError as err:
1702 if err.errno != errno.ENOENT:
1705 if err.errno != errno.ENOENT:
1703 raise
1706 raise
1704 return
1707 return
1705 if not st:
1708 if not st:
1706 return
1709 return
1707
1710
1708 if util.safehasattr(parsers, b'dict_new_presized'):
1711 if util.safehasattr(parsers, b'dict_new_presized'):
1709 # Make an estimate of the number of files in the dirstate based on
1712 # Make an estimate of the number of files in the dirstate based on
1710 # its size. This trades wasting some memory for avoiding costly
1713 # its size. This trades wasting some memory for avoiding costly
1711 # resizes. Each entry have a prefix of 17 bytes followed by one or
1714 # resizes. Each entry have a prefix of 17 bytes followed by one or
1712 # two path names. Studies on various large-scale real-world repositories
1715 # two path names. Studies on various large-scale real-world repositories
1713 # found 54 bytes a reasonable upper limit for the average path names.
1716 # found 54 bytes a reasonable upper limit for the average path names.
1714 # Copy entries are ignored for the sake of this estimate.
1717 # Copy entries are ignored for the sake of this estimate.
1715 self._map = parsers.dict_new_presized(len(st) // 71)
1718 self._map = parsers.dict_new_presized(len(st) // 71)
1716
1719
1717 # Python's garbage collector triggers a GC each time a certain number
1720 # Python's garbage collector triggers a GC each time a certain number
1718 # of container objects (the number being defined by
1721 # of container objects (the number being defined by
1719 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1722 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1720 # for each file in the dirstate. The C version then immediately marks
1723 # for each file in the dirstate. The C version then immediately marks
1721 # them as not to be tracked by the collector. However, this has no
1724 # them as not to be tracked by the collector. However, this has no
1722 # effect on when GCs are triggered, only on what objects the GC looks
1725 # effect on when GCs are triggered, only on what objects the GC looks
1723 # into. This means that O(number of files) GCs are unavoidable.
1726 # into. This means that O(number of files) GCs are unavoidable.
1724 # Depending on when in the process's lifetime the dirstate is parsed,
1727 # Depending on when in the process's lifetime the dirstate is parsed,
1725 # this can get very expensive. As a workaround, disable GC while
1728 # this can get very expensive. As a workaround, disable GC while
1726 # parsing the dirstate.
1729 # parsing the dirstate.
1727 #
1730 #
1728 # (we cannot decorate the function directly since it is in a C module)
1731 # (we cannot decorate the function directly since it is in a C module)
1729 parse_dirstate = util.nogc(parsers.parse_dirstate)
1732 parse_dirstate = util.nogc(parsers.parse_dirstate)
1730 p = parse_dirstate(self._map, self.copymap, st)
1733 p = parse_dirstate(self._map, self.copymap, st)
1731 if not self._dirtyparents:
1734 if not self._dirtyparents:
1732 self.setparents(*p)
1735 self.setparents(*p)
1733
1736
1734 # Avoid excess attribute lookups by fast pathing certain checks
1737 # Avoid excess attribute lookups by fast pathing certain checks
1735 self.__contains__ = self._map.__contains__
1738 self.__contains__ = self._map.__contains__
1736 self.__getitem__ = self._map.__getitem__
1739 self.__getitem__ = self._map.__getitem__
1737 self.get = self._map.get
1740 self.get = self._map.get
1738
1741
1739 def write(self, st, now):
1742 def write(self, st, now):
1740 st.write(
1743 st.write(
1741 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1744 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1742 )
1745 )
1743 st.close()
1746 st.close()
1744 self._dirtyparents = False
1747 self._dirtyparents = False
1745 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1748 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1746
1749
1747 @propertycache
1750 @propertycache
1748 def nonnormalset(self):
1751 def nonnormalset(self):
1749 nonnorm, otherparents = self.nonnormalentries()
1752 nonnorm, otherparents = self.nonnormalentries()
1750 self.otherparentset = otherparents
1753 self.otherparentset = otherparents
1751 return nonnorm
1754 return nonnorm
1752
1755
1753 @propertycache
1756 @propertycache
1754 def otherparentset(self):
1757 def otherparentset(self):
1755 nonnorm, otherparents = self.nonnormalentries()
1758 nonnorm, otherparents = self.nonnormalentries()
1756 self.nonnormalset = nonnorm
1759 self.nonnormalset = nonnorm
1757 return otherparents
1760 return otherparents
1758
1761
1759 def non_normal_or_other_parent_paths(self):
1762 def non_normal_or_other_parent_paths(self):
1760 return self.nonnormalset.union(self.otherparentset)
1763 return self.nonnormalset.union(self.otherparentset)
1761
1764
1762 @propertycache
1765 @propertycache
1763 def identity(self):
1766 def identity(self):
1764 self._map
1767 self._map
1765 return self.identity
1768 return self.identity
1766
1769
1767 @propertycache
1770 @propertycache
1768 def dirfoldmap(self):
1771 def dirfoldmap(self):
1769 f = {}
1772 f = {}
1770 normcase = util.normcase
1773 normcase = util.normcase
1771 for name in self._dirs:
1774 for name in self._dirs:
1772 f[normcase(name)] = name
1775 f[normcase(name)] = name
1773 return f
1776 return f
1774
1777
1775
1778
1776 if rustmod is not None:
1779 if rustmod is not None:
1777
1780
1778 class dirstatemap(object):
1781 class dirstatemap(object):
1779 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1782 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1780 self._use_dirstate_v2 = use_dirstate_v2
1783 self._use_dirstate_v2 = use_dirstate_v2
1781 self._nodeconstants = nodeconstants
1784 self._nodeconstants = nodeconstants
1782 self._ui = ui
1785 self._ui = ui
1783 self._opener = opener
1786 self._opener = opener
1784 self._root = root
1787 self._root = root
1785 self._filename = b'dirstate'
1788 self._filename = b'dirstate'
1786 self._nodelen = 20 # Also update Rust code when changing this!
1789 self._nodelen = 20 # Also update Rust code when changing this!
1787 self._parents = None
1790 self._parents = None
1788 self._dirtyparents = False
1791 self._dirtyparents = False
1789
1792
1790 # for consistent view between _pl() and _read() invocations
1793 # for consistent view between _pl() and _read() invocations
1791 self._pendingmode = None
1794 self._pendingmode = None
1792
1795
1793 self._use_dirstate_tree = self._ui.configbool(
1796 self._use_dirstate_tree = self._ui.configbool(
1794 b"experimental",
1797 b"experimental",
1795 b"dirstate-tree.in-memory",
1798 b"dirstate-tree.in-memory",
1796 False,
1799 False,
1797 )
1800 )
1798
1801
1799 def addfile(self, *args, **kwargs):
1802 def addfile(self, *args, **kwargs):
1800 return self._rustmap.addfile(*args, **kwargs)
1803 return self._rustmap.addfile(*args, **kwargs)
1801
1804
1802 def removefile(self, *args, **kwargs):
1805 def removefile(self, *args, **kwargs):
1803 return self._rustmap.removefile(*args, **kwargs)
1806 return self._rustmap.removefile(*args, **kwargs)
1804
1807
1805 def dropfile(self, *args, **kwargs):
1808 def dropfile(self, *args, **kwargs):
1806 return self._rustmap.dropfile(*args, **kwargs)
1809 return self._rustmap.dropfile(*args, **kwargs)
1807
1810
1808 def clearambiguoustimes(self, *args, **kwargs):
1811 def clearambiguoustimes(self, *args, **kwargs):
1809 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1812 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1810
1813
1811 def nonnormalentries(self):
1814 def nonnormalentries(self):
1812 return self._rustmap.nonnormalentries()
1815 return self._rustmap.nonnormalentries()
1813
1816
1814 def get(self, *args, **kwargs):
1817 def get(self, *args, **kwargs):
1815 return self._rustmap.get(*args, **kwargs)
1818 return self._rustmap.get(*args, **kwargs)
1816
1819
1817 @property
1820 @property
1818 def copymap(self):
1821 def copymap(self):
1819 return self._rustmap.copymap()
1822 return self._rustmap.copymap()
1820
1823
1821 def directories(self):
1824 def directories(self):
1822 return self._rustmap.directories()
1825 return self._rustmap.directories()
1823
1826
1824 def preload(self):
1827 def preload(self):
1825 self._rustmap
1828 self._rustmap
1826
1829
1827 def clear(self):
1830 def clear(self):
1828 self._rustmap.clear()
1831 self._rustmap.clear()
1829 self.setparents(
1832 self.setparents(
1830 self._nodeconstants.nullid, self._nodeconstants.nullid
1833 self._nodeconstants.nullid, self._nodeconstants.nullid
1831 )
1834 )
1832 util.clearcachedproperty(self, b"_dirs")
1835 util.clearcachedproperty(self, b"_dirs")
1833 util.clearcachedproperty(self, b"_alldirs")
1836 util.clearcachedproperty(self, b"_alldirs")
1834 util.clearcachedproperty(self, b"dirfoldmap")
1837 util.clearcachedproperty(self, b"dirfoldmap")
1835
1838
1836 def items(self):
1839 def items(self):
1837 return self._rustmap.items()
1840 return self._rustmap.items()
1838
1841
1839 def keys(self):
1842 def keys(self):
1840 return iter(self._rustmap)
1843 return iter(self._rustmap)
1841
1844
1842 def __contains__(self, key):
1845 def __contains__(self, key):
1843 return key in self._rustmap
1846 return key in self._rustmap
1844
1847
1845 def __getitem__(self, item):
1848 def __getitem__(self, item):
1846 return self._rustmap[item]
1849 return self._rustmap[item]
1847
1850
1848 def __len__(self):
1851 def __len__(self):
1849 return len(self._rustmap)
1852 return len(self._rustmap)
1850
1853
1851 def __iter__(self):
1854 def __iter__(self):
1852 return iter(self._rustmap)
1855 return iter(self._rustmap)
1853
1856
1854 # forward for python2,3 compat
1857 # forward for python2,3 compat
1855 iteritems = items
1858 iteritems = items
1856
1859
1857 def _opendirstatefile(self):
1860 def _opendirstatefile(self):
1858 fp, mode = txnutil.trypending(
1861 fp, mode = txnutil.trypending(
1859 self._root, self._opener, self._filename
1862 self._root, self._opener, self._filename
1860 )
1863 )
1861 if self._pendingmode is not None and self._pendingmode != mode:
1864 if self._pendingmode is not None and self._pendingmode != mode:
1862 fp.close()
1865 fp.close()
1863 raise error.Abort(
1866 raise error.Abort(
1864 _(b'working directory state may be changed parallelly')
1867 _(b'working directory state may be changed parallelly')
1865 )
1868 )
1866 self._pendingmode = mode
1869 self._pendingmode = mode
1867 return fp
1870 return fp
1868
1871
1869 def setparents(self, p1, p2):
1872 def setparents(self, p1, p2):
1870 self._parents = (p1, p2)
1873 self._parents = (p1, p2)
1871 self._dirtyparents = True
1874 self._dirtyparents = True
1872
1875
1873 def parents(self):
1876 def parents(self):
1874 if not self._parents:
1877 if not self._parents:
1875 if self._use_dirstate_v2:
1878 if self._use_dirstate_v2:
1876 offset = len(rustmod.V2_FORMAT_MARKER)
1879 offset = len(rustmod.V2_FORMAT_MARKER)
1877 else:
1880 else:
1878 offset = 0
1881 offset = 0
1879 read_len = offset + self._nodelen * 2
1882 read_len = offset + self._nodelen * 2
1880 try:
1883 try:
1881 fp = self._opendirstatefile()
1884 fp = self._opendirstatefile()
1882 st = fp.read(read_len)
1885 st = fp.read(read_len)
1883 fp.close()
1886 fp.close()
1884 except IOError as err:
1887 except IOError as err:
1885 if err.errno != errno.ENOENT:
1888 if err.errno != errno.ENOENT:
1886 raise
1889 raise
1887 # File doesn't exist, so the current state is empty
1890 # File doesn't exist, so the current state is empty
1888 st = b''
1891 st = b''
1889
1892
1890 l = len(st)
1893 l = len(st)
1891 if l == read_len:
1894 if l == read_len:
1892 st = st[offset:]
1895 st = st[offset:]
1893 self._parents = (
1896 self._parents = (
1894 st[: self._nodelen],
1897 st[: self._nodelen],
1895 st[self._nodelen : 2 * self._nodelen],
1898 st[self._nodelen : 2 * self._nodelen],
1896 )
1899 )
1897 elif l == 0:
1900 elif l == 0:
1898 self._parents = (
1901 self._parents = (
1899 self._nodeconstants.nullid,
1902 self._nodeconstants.nullid,
1900 self._nodeconstants.nullid,
1903 self._nodeconstants.nullid,
1901 )
1904 )
1902 else:
1905 else:
1903 raise error.Abort(
1906 raise error.Abort(
1904 _(b'working directory state appears damaged!')
1907 _(b'working directory state appears damaged!')
1905 )
1908 )
1906
1909
1907 return self._parents
1910 return self._parents
1908
1911
1909 @propertycache
1912 @propertycache
1910 def _rustmap(self):
1913 def _rustmap(self):
1911 """
1914 """
1912 Fills the Dirstatemap when called.
1915 Fills the Dirstatemap when called.
1913 """
1916 """
1914 # ignore HG_PENDING because identity is used only for writing
1917 # ignore HG_PENDING because identity is used only for writing
1915 self.identity = util.filestat.frompath(
1918 self.identity = util.filestat.frompath(
1916 self._opener.join(self._filename)
1919 self._opener.join(self._filename)
1917 )
1920 )
1918
1921
1919 try:
1922 try:
1920 fp = self._opendirstatefile()
1923 fp = self._opendirstatefile()
1921 try:
1924 try:
1922 st = fp.read()
1925 st = fp.read()
1923 finally:
1926 finally:
1924 fp.close()
1927 fp.close()
1925 except IOError as err:
1928 except IOError as err:
1926 if err.errno != errno.ENOENT:
1929 if err.errno != errno.ENOENT:
1927 raise
1930 raise
1928 st = b''
1931 st = b''
1929
1932
1930 self._rustmap, parents = rustmod.DirstateMap.new(
1933 self._rustmap, parents = rustmod.DirstateMap.new(
1931 self._use_dirstate_tree, self._use_dirstate_v2, st
1934 self._use_dirstate_tree, self._use_dirstate_v2, st
1932 )
1935 )
1933
1936
1934 if parents and not self._dirtyparents:
1937 if parents and not self._dirtyparents:
1935 self.setparents(*parents)
1938 self.setparents(*parents)
1936
1939
1937 self.__contains__ = self._rustmap.__contains__
1940 self.__contains__ = self._rustmap.__contains__
1938 self.__getitem__ = self._rustmap.__getitem__
1941 self.__getitem__ = self._rustmap.__getitem__
1939 self.get = self._rustmap.get
1942 self.get = self._rustmap.get
1940 return self._rustmap
1943 return self._rustmap
1941
1944
1942 def write(self, st, now):
1945 def write(self, st, now):
1943 parents = self.parents()
1946 parents = self.parents()
1944 packed = self._rustmap.write(
1947 packed = self._rustmap.write(
1945 self._use_dirstate_v2, parents[0], parents[1], now
1948 self._use_dirstate_v2, parents[0], parents[1], now
1946 )
1949 )
1947 st.write(packed)
1950 st.write(packed)
1948 st.close()
1951 st.close()
1949 self._dirtyparents = False
1952 self._dirtyparents = False
1950
1953
1951 @propertycache
1954 @propertycache
1952 def filefoldmap(self):
1955 def filefoldmap(self):
1953 """Returns a dictionary mapping normalized case paths to their
1956 """Returns a dictionary mapping normalized case paths to their
1954 non-normalized versions.
1957 non-normalized versions.
1955 """
1958 """
1956 return self._rustmap.filefoldmapasdict()
1959 return self._rustmap.filefoldmapasdict()
1957
1960
1958 def hastrackeddir(self, d):
1961 def hastrackeddir(self, d):
1959 return self._rustmap.hastrackeddir(d)
1962 return self._rustmap.hastrackeddir(d)
1960
1963
1961 def hasdir(self, d):
1964 def hasdir(self, d):
1962 return self._rustmap.hasdir(d)
1965 return self._rustmap.hasdir(d)
1963
1966
1964 @propertycache
1967 @propertycache
1965 def identity(self):
1968 def identity(self):
1966 self._rustmap
1969 self._rustmap
1967 return self.identity
1970 return self.identity
1968
1971
1969 @property
1972 @property
1970 def nonnormalset(self):
1973 def nonnormalset(self):
1971 nonnorm = self._rustmap.non_normal_entries()
1974 nonnorm = self._rustmap.non_normal_entries()
1972 return nonnorm
1975 return nonnorm
1973
1976
1974 @propertycache
1977 @propertycache
1975 def otherparentset(self):
1978 def otherparentset(self):
1976 otherparents = self._rustmap.other_parent_entries()
1979 otherparents = self._rustmap.other_parent_entries()
1977 return otherparents
1980 return otherparents
1978
1981
1979 def non_normal_or_other_parent_paths(self):
1982 def non_normal_or_other_parent_paths(self):
1980 return self._rustmap.non_normal_or_other_parent_paths()
1983 return self._rustmap.non_normal_or_other_parent_paths()
1981
1984
1982 @propertycache
1985 @propertycache
1983 def dirfoldmap(self):
1986 def dirfoldmap(self):
1984 f = {}
1987 f = {}
1985 normcase = util.normcase
1988 normcase = util.normcase
1986 for name, _pseudo_entry in self.directories():
1989 for name, _pseudo_entry in self.directories():
1987 f[normcase(name)] = name
1990 f[normcase(name)] = name
1988 return f
1991 return f
General Comments 0
You need to be logged in to leave comments. Login now