##// END OF EJS Templates
dirstate: add an explicit `possibly_dirty` parameter to `_addpath`...
marmoute -
r48282:91520abe default
parent child Browse files
Show More
@@ -1,2008 +1,2011 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = -2
52 FROM_P2 = -2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = -1
55 NONNORMAL = -1
56
56
57 # a special value used internally for `time` if the time is ambigeous
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = -1
58 AMBIGUOUS_TIME = -1
59
59
60
60
61 class repocache(filecache):
61 class repocache(filecache):
62 """filecache for files in .hg/"""
62 """filecache for files in .hg/"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._opener.join(fname)
65 return obj._opener.join(fname)
66
66
67
67
68 class rootcache(filecache):
68 class rootcache(filecache):
69 """filecache for files in the repository root"""
69 """filecache for files in the repository root"""
70
70
71 def join(self, obj, fname):
71 def join(self, obj, fname):
72 return obj._join(fname)
72 return obj._join(fname)
73
73
74
74
75 def _getfsnow(vfs):
75 def _getfsnow(vfs):
76 '''Get "now" timestamp on filesystem'''
76 '''Get "now" timestamp on filesystem'''
77 tmpfd, tmpname = vfs.mkstemp()
77 tmpfd, tmpname = vfs.mkstemp()
78 try:
78 try:
79 return os.fstat(tmpfd)[stat.ST_MTIME]
79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 finally:
80 finally:
81 os.close(tmpfd)
81 os.close(tmpfd)
82 vfs.unlink(tmpname)
82 vfs.unlink(tmpname)
83
83
84
84
85 @interfaceutil.implementer(intdirstate.idirstate)
85 @interfaceutil.implementer(intdirstate.idirstate)
86 class dirstate(object):
86 class dirstate(object):
87 def __init__(
87 def __init__(
88 self,
88 self,
89 opener,
89 opener,
90 ui,
90 ui,
91 root,
91 root,
92 validate,
92 validate,
93 sparsematchfn,
93 sparsematchfn,
94 nodeconstants,
94 nodeconstants,
95 use_dirstate_v2,
95 use_dirstate_v2,
96 ):
96 ):
97 """Create a new dirstate object.
97 """Create a new dirstate object.
98
98
99 opener is an open()-like callable that can be used to open the
99 opener is an open()-like callable that can be used to open the
100 dirstate file; root is the root of the directory tracked by
100 dirstate file; root is the root of the directory tracked by
101 the dirstate.
101 the dirstate.
102 """
102 """
103 self._use_dirstate_v2 = use_dirstate_v2
103 self._use_dirstate_v2 = use_dirstate_v2
104 self._nodeconstants = nodeconstants
104 self._nodeconstants = nodeconstants
105 self._opener = opener
105 self._opener = opener
106 self._validate = validate
106 self._validate = validate
107 self._root = root
107 self._root = root
108 self._sparsematchfn = sparsematchfn
108 self._sparsematchfn = sparsematchfn
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 # UNC path pointing to root share (issue4557)
110 # UNC path pointing to root share (issue4557)
111 self._rootdir = pathutil.normasprefix(root)
111 self._rootdir = pathutil.normasprefix(root)
112 self._dirty = False
112 self._dirty = False
113 self._lastnormaltime = 0
113 self._lastnormaltime = 0
114 self._ui = ui
114 self._ui = ui
115 self._filecache = {}
115 self._filecache = {}
116 self._parentwriters = 0
116 self._parentwriters = 0
117 self._filename = b'dirstate'
117 self._filename = b'dirstate'
118 self._pendingfilename = b'%s.pending' % self._filename
118 self._pendingfilename = b'%s.pending' % self._filename
119 self._plchangecallbacks = {}
119 self._plchangecallbacks = {}
120 self._origpl = None
120 self._origpl = None
121 self._updatedfiles = set()
121 self._updatedfiles = set()
122 self._mapcls = dirstatemap
122 self._mapcls = dirstatemap
123 # Access and cache cwd early, so we don't access it for the first time
123 # Access and cache cwd early, so we don't access it for the first time
124 # after a working-copy update caused it to not exist (accessing it then
124 # after a working-copy update caused it to not exist (accessing it then
125 # raises an exception).
125 # raises an exception).
126 self._cwd
126 self._cwd
127
127
128 def prefetch_parents(self):
128 def prefetch_parents(self):
129 """make sure the parents are loaded
129 """make sure the parents are loaded
130
130
131 Used to avoid a race condition.
131 Used to avoid a race condition.
132 """
132 """
133 self._pl
133 self._pl
134
134
135 @contextlib.contextmanager
135 @contextlib.contextmanager
136 def parentchange(self):
136 def parentchange(self):
137 """Context manager for handling dirstate parents.
137 """Context manager for handling dirstate parents.
138
138
139 If an exception occurs in the scope of the context manager,
139 If an exception occurs in the scope of the context manager,
140 the incoherent dirstate won't be written when wlock is
140 the incoherent dirstate won't be written when wlock is
141 released.
141 released.
142 """
142 """
143 self._parentwriters += 1
143 self._parentwriters += 1
144 yield
144 yield
145 # Typically we want the "undo" step of a context manager in a
145 # Typically we want the "undo" step of a context manager in a
146 # finally block so it happens even when an exception
146 # finally block so it happens even when an exception
147 # occurs. In this case, however, we only want to decrement
147 # occurs. In this case, however, we only want to decrement
148 # parentwriters if the code in the with statement exits
148 # parentwriters if the code in the with statement exits
149 # normally, so we don't have a try/finally here on purpose.
149 # normally, so we don't have a try/finally here on purpose.
150 self._parentwriters -= 1
150 self._parentwriters -= 1
151
151
152 def pendingparentchange(self):
152 def pendingparentchange(self):
153 """Returns true if the dirstate is in the middle of a set of changes
153 """Returns true if the dirstate is in the middle of a set of changes
154 that modify the dirstate parent.
154 that modify the dirstate parent.
155 """
155 """
156 return self._parentwriters > 0
156 return self._parentwriters > 0
157
157
158 @propertycache
158 @propertycache
159 def _map(self):
159 def _map(self):
160 """Return the dirstate contents (see documentation for dirstatemap)."""
160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 self._map = self._mapcls(
161 self._map = self._mapcls(
162 self._ui,
162 self._ui,
163 self._opener,
163 self._opener,
164 self._root,
164 self._root,
165 self._nodeconstants,
165 self._nodeconstants,
166 self._use_dirstate_v2,
166 self._use_dirstate_v2,
167 )
167 )
168 return self._map
168 return self._map
169
169
170 @property
170 @property
171 def _sparsematcher(self):
171 def _sparsematcher(self):
172 """The matcher for the sparse checkout.
172 """The matcher for the sparse checkout.
173
173
174 The working directory may not include every file from a manifest. The
174 The working directory may not include every file from a manifest. The
175 matcher obtained by this property will match a path if it is to be
175 matcher obtained by this property will match a path if it is to be
176 included in the working directory.
176 included in the working directory.
177 """
177 """
178 # TODO there is potential to cache this property. For now, the matcher
178 # TODO there is potential to cache this property. For now, the matcher
179 # is resolved on every access. (But the called function does use a
179 # is resolved on every access. (But the called function does use a
180 # cache to keep the lookup fast.)
180 # cache to keep the lookup fast.)
181 return self._sparsematchfn()
181 return self._sparsematchfn()
182
182
183 @repocache(b'branch')
183 @repocache(b'branch')
184 def _branch(self):
184 def _branch(self):
185 try:
185 try:
186 return self._opener.read(b"branch").strip() or b"default"
186 return self._opener.read(b"branch").strip() or b"default"
187 except IOError as inst:
187 except IOError as inst:
188 if inst.errno != errno.ENOENT:
188 if inst.errno != errno.ENOENT:
189 raise
189 raise
190 return b"default"
190 return b"default"
191
191
192 @property
192 @property
193 def _pl(self):
193 def _pl(self):
194 return self._map.parents()
194 return self._map.parents()
195
195
196 def hasdir(self, d):
196 def hasdir(self, d):
197 return self._map.hastrackeddir(d)
197 return self._map.hastrackeddir(d)
198
198
199 @rootcache(b'.hgignore')
199 @rootcache(b'.hgignore')
200 def _ignore(self):
200 def _ignore(self):
201 files = self._ignorefiles()
201 files = self._ignorefiles()
202 if not files:
202 if not files:
203 return matchmod.never()
203 return matchmod.never()
204
204
205 pats = [b'include:%s' % f for f in files]
205 pats = [b'include:%s' % f for f in files]
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207
207
208 @propertycache
208 @propertycache
209 def _slash(self):
209 def _slash(self):
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211
211
212 @propertycache
212 @propertycache
213 def _checklink(self):
213 def _checklink(self):
214 return util.checklink(self._root)
214 return util.checklink(self._root)
215
215
216 @propertycache
216 @propertycache
217 def _checkexec(self):
217 def _checkexec(self):
218 return bool(util.checkexec(self._root))
218 return bool(util.checkexec(self._root))
219
219
220 @propertycache
220 @propertycache
221 def _checkcase(self):
221 def _checkcase(self):
222 return not util.fscasesensitive(self._join(b'.hg'))
222 return not util.fscasesensitive(self._join(b'.hg'))
223
223
224 def _join(self, f):
224 def _join(self, f):
225 # much faster than os.path.join()
225 # much faster than os.path.join()
226 # it's safe because f is always a relative path
226 # it's safe because f is always a relative path
227 return self._rootdir + f
227 return self._rootdir + f
228
228
229 def flagfunc(self, buildfallback):
229 def flagfunc(self, buildfallback):
230 if self._checklink and self._checkexec:
230 if self._checklink and self._checkexec:
231
231
232 def f(x):
232 def f(x):
233 try:
233 try:
234 st = os.lstat(self._join(x))
234 st = os.lstat(self._join(x))
235 if util.statislink(st):
235 if util.statislink(st):
236 return b'l'
236 return b'l'
237 if util.statisexec(st):
237 if util.statisexec(st):
238 return b'x'
238 return b'x'
239 except OSError:
239 except OSError:
240 pass
240 pass
241 return b''
241 return b''
242
242
243 return f
243 return f
244
244
245 fallback = buildfallback()
245 fallback = buildfallback()
246 if self._checklink:
246 if self._checklink:
247
247
248 def f(x):
248 def f(x):
249 if os.path.islink(self._join(x)):
249 if os.path.islink(self._join(x)):
250 return b'l'
250 return b'l'
251 if b'x' in fallback(x):
251 if b'x' in fallback(x):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 if self._checkexec:
256 if self._checkexec:
257
257
258 def f(x):
258 def f(x):
259 if b'l' in fallback(x):
259 if b'l' in fallback(x):
260 return b'l'
260 return b'l'
261 if util.isexec(self._join(x)):
261 if util.isexec(self._join(x)):
262 return b'x'
262 return b'x'
263 return b''
263 return b''
264
264
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 # internal config: ui.forcecwd
271 # internal config: ui.forcecwd
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 if forcecwd:
273 if forcecwd:
274 return forcecwd
274 return forcecwd
275 return encoding.getcwd()
275 return encoding.getcwd()
276
276
277 def getcwd(self):
277 def getcwd(self):
278 """Return the path from which a canonical path is calculated.
278 """Return the path from which a canonical path is calculated.
279
279
280 This path should be used to resolve file patterns or to convert
280 This path should be used to resolve file patterns or to convert
281 canonical paths back to file paths for display. It shouldn't be
281 canonical paths back to file paths for display. It shouldn't be
282 used to get real file paths. Use vfs functions instead.
282 used to get real file paths. Use vfs functions instead.
283 """
283 """
284 cwd = self._cwd
284 cwd = self._cwd
285 if cwd == self._root:
285 if cwd == self._root:
286 return b''
286 return b''
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 rootsep = self._root
288 rootsep = self._root
289 if not util.endswithsep(rootsep):
289 if not util.endswithsep(rootsep):
290 rootsep += pycompat.ossep
290 rootsep += pycompat.ossep
291 if cwd.startswith(rootsep):
291 if cwd.startswith(rootsep):
292 return cwd[len(rootsep) :]
292 return cwd[len(rootsep) :]
293 else:
293 else:
294 # we're outside the repo. return an absolute path.
294 # we're outside the repo. return an absolute path.
295 return cwd
295 return cwd
296
296
297 def pathto(self, f, cwd=None):
297 def pathto(self, f, cwd=None):
298 if cwd is None:
298 if cwd is None:
299 cwd = self.getcwd()
299 cwd = self.getcwd()
300 path = util.pathto(self._root, cwd, f)
300 path = util.pathto(self._root, cwd, f)
301 if self._slash:
301 if self._slash:
302 return util.pconvert(path)
302 return util.pconvert(path)
303 return path
303 return path
304
304
305 def __getitem__(self, key):
305 def __getitem__(self, key):
306 """Return the current state of key (a filename) in the dirstate.
306 """Return the current state of key (a filename) in the dirstate.
307
307
308 States are:
308 States are:
309 n normal
309 n normal
310 m needs merging
310 m needs merging
311 r marked for removal
311 r marked for removal
312 a marked for addition
312 a marked for addition
313 ? not tracked
313 ? not tracked
314 """
314 """
315 return self._map.get(key, (b"?",))[0]
315 return self._map.get(key, (b"?",))[0]
316
316
317 def __contains__(self, key):
317 def __contains__(self, key):
318 return key in self._map
318 return key in self._map
319
319
320 def __iter__(self):
320 def __iter__(self):
321 return iter(sorted(self._map))
321 return iter(sorted(self._map))
322
322
323 def items(self):
323 def items(self):
324 return pycompat.iteritems(self._map)
324 return pycompat.iteritems(self._map)
325
325
326 iteritems = items
326 iteritems = items
327
327
328 def directories(self):
328 def directories(self):
329 return self._map.directories()
329 return self._map.directories()
330
330
331 def parents(self):
331 def parents(self):
332 return [self._validate(p) for p in self._pl]
332 return [self._validate(p) for p in self._pl]
333
333
334 def p1(self):
334 def p1(self):
335 return self._validate(self._pl[0])
335 return self._validate(self._pl[0])
336
336
337 def p2(self):
337 def p2(self):
338 return self._validate(self._pl[1])
338 return self._validate(self._pl[1])
339
339
340 def branch(self):
340 def branch(self):
341 return encoding.tolocal(self._branch)
341 return encoding.tolocal(self._branch)
342
342
343 def setparents(self, p1, p2=None):
343 def setparents(self, p1, p2=None):
344 """Set dirstate parents to p1 and p2.
344 """Set dirstate parents to p1 and p2.
345
345
346 When moving from two parents to one, 'm' merged entries a
346 When moving from two parents to one, 'm' merged entries a
347 adjusted to normal and previous copy records discarded and
347 adjusted to normal and previous copy records discarded and
348 returned by the call.
348 returned by the call.
349
349
350 See localrepo.setparents()
350 See localrepo.setparents()
351 """
351 """
352 if p2 is None:
352 if p2 is None:
353 p2 = self._nodeconstants.nullid
353 p2 = self._nodeconstants.nullid
354 if self._parentwriters == 0:
354 if self._parentwriters == 0:
355 raise ValueError(
355 raise ValueError(
356 b"cannot set dirstate parent outside of "
356 b"cannot set dirstate parent outside of "
357 b"dirstate.parentchange context manager"
357 b"dirstate.parentchange context manager"
358 )
358 )
359
359
360 self._dirty = True
360 self._dirty = True
361 oldp2 = self._pl[1]
361 oldp2 = self._pl[1]
362 if self._origpl is None:
362 if self._origpl is None:
363 self._origpl = self._pl
363 self._origpl = self._pl
364 self._map.setparents(p1, p2)
364 self._map.setparents(p1, p2)
365 copies = {}
365 copies = {}
366 if (
366 if (
367 oldp2 != self._nodeconstants.nullid
367 oldp2 != self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
369 ):
369 ):
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
371
371
372 for f in candidatefiles:
372 for f in candidatefiles:
373 s = self._map.get(f)
373 s = self._map.get(f)
374 if s is None:
374 if s is None:
375 continue
375 continue
376
376
377 # Discard 'm' markers when moving away from a merge state
377 # Discard 'm' markers when moving away from a merge state
378 if s[0] == b'm':
378 if s[0] == b'm':
379 source = self._map.copymap.get(f)
379 source = self._map.copymap.get(f)
380 if source:
380 if source:
381 copies[f] = source
381 copies[f] = source
382 self.normallookup(f)
382 self.normallookup(f)
383 # Also fix up otherparent markers
383 # Also fix up otherparent markers
384 elif s[0] == b'n' and s[2] == FROM_P2:
384 elif s[0] == b'n' and s[2] == FROM_P2:
385 source = self._map.copymap.get(f)
385 source = self._map.copymap.get(f)
386 if source:
386 if source:
387 copies[f] = source
387 copies[f] = source
388 self.add(f)
388 self.add(f)
389 return copies
389 return copies
390
390
391 def setbranch(self, branch):
391 def setbranch(self, branch):
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 try:
394 try:
395 f.write(self._branch + b'\n')
395 f.write(self._branch + b'\n')
396 f.close()
396 f.close()
397
397
398 # make sure filecache has the correct stat info for _branch after
398 # make sure filecache has the correct stat info for _branch after
399 # replacing the underlying file
399 # replacing the underlying file
400 ce = self._filecache[b'_branch']
400 ce = self._filecache[b'_branch']
401 if ce:
401 if ce:
402 ce.refresh()
402 ce.refresh()
403 except: # re-raises
403 except: # re-raises
404 f.discard()
404 f.discard()
405 raise
405 raise
406
406
407 def invalidate(self):
407 def invalidate(self):
408 """Causes the next access to reread the dirstate.
408 """Causes the next access to reread the dirstate.
409
409
410 This is different from localrepo.invalidatedirstate() because it always
410 This is different from localrepo.invalidatedirstate() because it always
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 check whether the dirstate has changed before rereading it."""
412 check whether the dirstate has changed before rereading it."""
413
413
414 for a in ("_map", "_branch", "_ignore"):
414 for a in ("_map", "_branch", "_ignore"):
415 if a in self.__dict__:
415 if a in self.__dict__:
416 delattr(self, a)
416 delattr(self, a)
417 self._lastnormaltime = 0
417 self._lastnormaltime = 0
418 self._dirty = False
418 self._dirty = False
419 self._updatedfiles.clear()
419 self._updatedfiles.clear()
420 self._parentwriters = 0
420 self._parentwriters = 0
421 self._origpl = None
421 self._origpl = None
422
422
423 def copy(self, source, dest):
423 def copy(self, source, dest):
424 """Mark dest as a copy of source. Unmark dest if source is None."""
424 """Mark dest as a copy of source. Unmark dest if source is None."""
425 if source == dest:
425 if source == dest:
426 return
426 return
427 self._dirty = True
427 self._dirty = True
428 if source is not None:
428 if source is not None:
429 self._map.copymap[dest] = source
429 self._map.copymap[dest] = source
430 self._updatedfiles.add(source)
430 self._updatedfiles.add(source)
431 self._updatedfiles.add(dest)
431 self._updatedfiles.add(dest)
432 elif self._map.copymap.pop(dest, None):
432 elif self._map.copymap.pop(dest, None):
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434
434
435 def copied(self, file):
435 def copied(self, file):
436 return self._map.copymap.get(file, None)
436 return self._map.copymap.get(file, None)
437
437
438 def copies(self):
438 def copies(self):
439 return self._map.copymap
439 return self._map.copymap
440
440
441 def _addpath(
441 def _addpath(
442 self,
442 self,
443 f,
443 f,
444 state,
444 state,
445 mode,
445 mode,
446 size=NONNORMAL,
446 size=NONNORMAL,
447 mtime=AMBIGUOUS_TIME,
447 mtime=AMBIGUOUS_TIME,
448 from_p2=False,
448 from_p2=False,
449 possibly_dirty=False,
449 ):
450 ):
450 oldstate = self[f]
451 oldstate = self[f]
451 if state == b'a' or oldstate == b'r':
452 if state == b'a' or oldstate == b'r':
452 scmutil.checkfilename(f)
453 scmutil.checkfilename(f)
453 if self._map.hastrackeddir(f):
454 if self._map.hastrackeddir(f):
454 msg = _(b'directory %r already in dirstate')
455 msg = _(b'directory %r already in dirstate')
455 msg %= pycompat.bytestr(f)
456 msg %= pycompat.bytestr(f)
456 raise error.Abort(msg)
457 raise error.Abort(msg)
457 # shadows
458 # shadows
458 for d in pathutil.finddirs(f):
459 for d in pathutil.finddirs(f):
459 if self._map.hastrackeddir(d):
460 if self._map.hastrackeddir(d):
460 break
461 break
461 entry = self._map.get(d)
462 entry = self._map.get(d)
462 if entry is not None and entry[0] != b'r':
463 if entry is not None and entry[0] != b'r':
463 msg = _(b'file %r in dirstate clashes with %r')
464 msg = _(b'file %r in dirstate clashes with %r')
464 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
465 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
465 raise error.Abort(msg)
466 raise error.Abort(msg)
466 if from_p2:
467 if from_p2:
467 size = FROM_P2
468 size = FROM_P2
468 mtime = AMBIGUOUS_TIME
469 mtime = AMBIGUOUS_TIME
470 elif possibly_dirty:
471 mtime = AMBIGUOUS_TIME
469 else:
472 else:
470 assert size != FROM_P2
473 assert size != FROM_P2
471 if size != NONNORMAL:
474 if size != NONNORMAL:
472 size = size & _rangemask
475 size = size & _rangemask
473 if mtime != AMBIGUOUS_TIME:
476 if mtime != AMBIGUOUS_TIME:
474 mtime = mtime & _rangemask
477 mtime = mtime & _rangemask
475 self._dirty = True
478 self._dirty = True
476 self._updatedfiles.add(f)
479 self._updatedfiles.add(f)
477 self._map.addfile(f, oldstate, state, mode, size, mtime)
480 self._map.addfile(f, oldstate, state, mode, size, mtime)
478
481
479 def normal(self, f, parentfiledata=None):
482 def normal(self, f, parentfiledata=None):
480 """Mark a file normal and clean.
483 """Mark a file normal and clean.
481
484
482 parentfiledata: (mode, size, mtime) of the clean file
485 parentfiledata: (mode, size, mtime) of the clean file
483
486
484 parentfiledata should be computed from memory (for mode,
487 parentfiledata should be computed from memory (for mode,
485 size), as or close as possible from the point where we
488 size), as or close as possible from the point where we
486 determined the file was clean, to limit the risk of the
489 determined the file was clean, to limit the risk of the
487 file having been changed by an external process between the
490 file having been changed by an external process between the
488 moment where the file was determined to be clean and now."""
491 moment where the file was determined to be clean and now."""
489 if parentfiledata:
492 if parentfiledata:
490 (mode, size, mtime) = parentfiledata
493 (mode, size, mtime) = parentfiledata
491 else:
494 else:
492 s = os.lstat(self._join(f))
495 s = os.lstat(self._join(f))
493 mode = s.st_mode
496 mode = s.st_mode
494 size = s.st_size
497 size = s.st_size
495 mtime = s[stat.ST_MTIME]
498 mtime = s[stat.ST_MTIME]
496 self._addpath(f, b'n', mode, size, mtime)
499 self._addpath(f, b'n', mode, size, mtime)
497 self._map.copymap.pop(f, None)
500 self._map.copymap.pop(f, None)
498 if f in self._map.nonnormalset:
501 if f in self._map.nonnormalset:
499 self._map.nonnormalset.remove(f)
502 self._map.nonnormalset.remove(f)
500 if mtime > self._lastnormaltime:
503 if mtime > self._lastnormaltime:
501 # Remember the most recent modification timeslot for status(),
504 # Remember the most recent modification timeslot for status(),
502 # to make sure we won't miss future size-preserving file content
505 # to make sure we won't miss future size-preserving file content
503 # modifications that happen within the same timeslot.
506 # modifications that happen within the same timeslot.
504 self._lastnormaltime = mtime
507 self._lastnormaltime = mtime
505
508
506 def normallookup(self, f):
509 def normallookup(self, f):
507 '''Mark a file normal, but possibly dirty.'''
510 '''Mark a file normal, but possibly dirty.'''
508 if self._pl[1] != self._nodeconstants.nullid:
511 if self._pl[1] != self._nodeconstants.nullid:
509 # if there is a merge going on and the file was either
512 # if there is a merge going on and the file was either
510 # in state 'm' (-1) or coming from other parent (-2) before
513 # in state 'm' (-1) or coming from other parent (-2) before
511 # being removed, restore that state.
514 # being removed, restore that state.
512 entry = self._map.get(f)
515 entry = self._map.get(f)
513 if entry is not None:
516 if entry is not None:
514 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
517 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
515 source = self._map.copymap.get(f)
518 source = self._map.copymap.get(f)
516 if entry[2] == NONNORMAL:
519 if entry[2] == NONNORMAL:
517 self.merge(f)
520 self.merge(f)
518 elif entry[2] == FROM_P2:
521 elif entry[2] == FROM_P2:
519 self.otherparent(f)
522 self.otherparent(f)
520 if source:
523 if source:
521 self.copy(source, f)
524 self.copy(source, f)
522 return
525 return
523 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
526 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
524 return
527 return
525 self._addpath(f, b'n', 0)
528 self._addpath(f, b'n', 0, possibly_dirty=True)
526 self._map.copymap.pop(f, None)
529 self._map.copymap.pop(f, None)
527
530
528 def otherparent(self, f):
531 def otherparent(self, f):
529 '''Mark as coming from the other parent, always dirty.'''
532 '''Mark as coming from the other parent, always dirty.'''
530 if self._pl[1] == self._nodeconstants.nullid:
533 if self._pl[1] == self._nodeconstants.nullid:
531 msg = _(b"setting %r to other parent only allowed in merges") % f
534 msg = _(b"setting %r to other parent only allowed in merges") % f
532 raise error.Abort(msg)
535 raise error.Abort(msg)
533 if f in self and self[f] == b'n':
536 if f in self and self[f] == b'n':
534 # merge-like
537 # merge-like
535 self._addpath(f, b'm', 0, from_p2=True)
538 self._addpath(f, b'm', 0, from_p2=True)
536 else:
539 else:
537 # add-like
540 # add-like
538 self._addpath(f, b'n', 0, from_p2=True)
541 self._addpath(f, b'n', 0, from_p2=True)
539 self._map.copymap.pop(f, None)
542 self._map.copymap.pop(f, None)
540
543
541 def add(self, f):
544 def add(self, f):
542 '''Mark a file added.'''
545 '''Mark a file added.'''
543 self._addpath(f, b'a', 0)
546 self._addpath(f, b'a', 0)
544 self._map.copymap.pop(f, None)
547 self._map.copymap.pop(f, None)
545
548
546 def remove(self, f):
549 def remove(self, f):
547 '''Mark a file removed.'''
550 '''Mark a file removed.'''
548 self._dirty = True
551 self._dirty = True
549 oldstate = self[f]
552 oldstate = self[f]
550 size = 0
553 size = 0
551 if self._pl[1] != self._nodeconstants.nullid:
554 if self._pl[1] != self._nodeconstants.nullid:
552 entry = self._map.get(f)
555 entry = self._map.get(f)
553 if entry is not None:
556 if entry is not None:
554 # backup the previous state
557 # backup the previous state
555 if entry[0] == b'm': # merge
558 if entry[0] == b'm': # merge
556 size = NONNORMAL
559 size = NONNORMAL
557 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
560 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
558 size = FROM_P2
561 size = FROM_P2
559 self._map.otherparentset.add(f)
562 self._map.otherparentset.add(f)
560 self._updatedfiles.add(f)
563 self._updatedfiles.add(f)
561 self._map.removefile(f, oldstate, size)
564 self._map.removefile(f, oldstate, size)
562 if size == 0:
565 if size == 0:
563 self._map.copymap.pop(f, None)
566 self._map.copymap.pop(f, None)
564
567
565 def merge(self, f):
568 def merge(self, f):
566 '''Mark a file merged.'''
569 '''Mark a file merged.'''
567 if self._pl[1] == self._nodeconstants.nullid:
570 if self._pl[1] == self._nodeconstants.nullid:
568 return self.normallookup(f)
571 return self.normallookup(f)
569 return self.otherparent(f)
572 return self.otherparent(f)
570
573
571 def drop(self, f):
574 def drop(self, f):
572 '''Drop a file from the dirstate'''
575 '''Drop a file from the dirstate'''
573 oldstate = self[f]
576 oldstate = self[f]
574 if self._map.dropfile(f, oldstate):
577 if self._map.dropfile(f, oldstate):
575 self._dirty = True
578 self._dirty = True
576 self._updatedfiles.add(f)
579 self._updatedfiles.add(f)
577 self._map.copymap.pop(f, None)
580 self._map.copymap.pop(f, None)
578
581
579 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
582 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
580 if exists is None:
583 if exists is None:
581 exists = os.path.lexists(os.path.join(self._root, path))
584 exists = os.path.lexists(os.path.join(self._root, path))
582 if not exists:
585 if not exists:
583 # Maybe a path component exists
586 # Maybe a path component exists
584 if not ignoremissing and b'/' in path:
587 if not ignoremissing and b'/' in path:
585 d, f = path.rsplit(b'/', 1)
588 d, f = path.rsplit(b'/', 1)
586 d = self._normalize(d, False, ignoremissing, None)
589 d = self._normalize(d, False, ignoremissing, None)
587 folded = d + b"/" + f
590 folded = d + b"/" + f
588 else:
591 else:
589 # No path components, preserve original case
592 # No path components, preserve original case
590 folded = path
593 folded = path
591 else:
594 else:
592 # recursively normalize leading directory components
595 # recursively normalize leading directory components
593 # against dirstate
596 # against dirstate
594 if b'/' in normed:
597 if b'/' in normed:
595 d, f = normed.rsplit(b'/', 1)
598 d, f = normed.rsplit(b'/', 1)
596 d = self._normalize(d, False, ignoremissing, True)
599 d = self._normalize(d, False, ignoremissing, True)
597 r = self._root + b"/" + d
600 r = self._root + b"/" + d
598 folded = d + b"/" + util.fspath(f, r)
601 folded = d + b"/" + util.fspath(f, r)
599 else:
602 else:
600 folded = util.fspath(normed, self._root)
603 folded = util.fspath(normed, self._root)
601 storemap[normed] = folded
604 storemap[normed] = folded
602
605
603 return folded
606 return folded
604
607
605 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
608 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
606 normed = util.normcase(path)
609 normed = util.normcase(path)
607 folded = self._map.filefoldmap.get(normed, None)
610 folded = self._map.filefoldmap.get(normed, None)
608 if folded is None:
611 if folded is None:
609 if isknown:
612 if isknown:
610 folded = path
613 folded = path
611 else:
614 else:
612 folded = self._discoverpath(
615 folded = self._discoverpath(
613 path, normed, ignoremissing, exists, self._map.filefoldmap
616 path, normed, ignoremissing, exists, self._map.filefoldmap
614 )
617 )
615 return folded
618 return folded
616
619
617 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
620 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
618 normed = util.normcase(path)
621 normed = util.normcase(path)
619 folded = self._map.filefoldmap.get(normed, None)
622 folded = self._map.filefoldmap.get(normed, None)
620 if folded is None:
623 if folded is None:
621 folded = self._map.dirfoldmap.get(normed, None)
624 folded = self._map.dirfoldmap.get(normed, None)
622 if folded is None:
625 if folded is None:
623 if isknown:
626 if isknown:
624 folded = path
627 folded = path
625 else:
628 else:
626 # store discovered result in dirfoldmap so that future
629 # store discovered result in dirfoldmap so that future
627 # normalizefile calls don't start matching directories
630 # normalizefile calls don't start matching directories
628 folded = self._discoverpath(
631 folded = self._discoverpath(
629 path, normed, ignoremissing, exists, self._map.dirfoldmap
632 path, normed, ignoremissing, exists, self._map.dirfoldmap
630 )
633 )
631 return folded
634 return folded
632
635
633 def normalize(self, path, isknown=False, ignoremissing=False):
636 def normalize(self, path, isknown=False, ignoremissing=False):
634 """
637 """
635 normalize the case of a pathname when on a casefolding filesystem
638 normalize the case of a pathname when on a casefolding filesystem
636
639
637 isknown specifies whether the filename came from walking the
640 isknown specifies whether the filename came from walking the
638 disk, to avoid extra filesystem access.
641 disk, to avoid extra filesystem access.
639
642
640 If ignoremissing is True, missing path are returned
643 If ignoremissing is True, missing path are returned
641 unchanged. Otherwise, we try harder to normalize possibly
644 unchanged. Otherwise, we try harder to normalize possibly
642 existing path components.
645 existing path components.
643
646
644 The normalized case is determined based on the following precedence:
647 The normalized case is determined based on the following precedence:
645
648
646 - version of name already stored in the dirstate
649 - version of name already stored in the dirstate
647 - version of name stored on disk
650 - version of name stored on disk
648 - version provided via command arguments
651 - version provided via command arguments
649 """
652 """
650
653
651 if self._checkcase:
654 if self._checkcase:
652 return self._normalize(path, isknown, ignoremissing)
655 return self._normalize(path, isknown, ignoremissing)
653 return path
656 return path
654
657
655 def clear(self):
658 def clear(self):
656 self._map.clear()
659 self._map.clear()
657 self._lastnormaltime = 0
660 self._lastnormaltime = 0
658 self._updatedfiles.clear()
661 self._updatedfiles.clear()
659 self._dirty = True
662 self._dirty = True
660
663
661 def rebuild(self, parent, allfiles, changedfiles=None):
664 def rebuild(self, parent, allfiles, changedfiles=None):
662 if changedfiles is None:
665 if changedfiles is None:
663 # Rebuild entire dirstate
666 # Rebuild entire dirstate
664 to_lookup = allfiles
667 to_lookup = allfiles
665 to_drop = []
668 to_drop = []
666 lastnormaltime = self._lastnormaltime
669 lastnormaltime = self._lastnormaltime
667 self.clear()
670 self.clear()
668 self._lastnormaltime = lastnormaltime
671 self._lastnormaltime = lastnormaltime
669 elif len(changedfiles) < 10:
672 elif len(changedfiles) < 10:
670 # Avoid turning allfiles into a set, which can be expensive if it's
673 # Avoid turning allfiles into a set, which can be expensive if it's
671 # large.
674 # large.
672 to_lookup = []
675 to_lookup = []
673 to_drop = []
676 to_drop = []
674 for f in changedfiles:
677 for f in changedfiles:
675 if f in allfiles:
678 if f in allfiles:
676 to_lookup.append(f)
679 to_lookup.append(f)
677 else:
680 else:
678 to_drop.append(f)
681 to_drop.append(f)
679 else:
682 else:
680 changedfilesset = set(changedfiles)
683 changedfilesset = set(changedfiles)
681 to_lookup = changedfilesset & set(allfiles)
684 to_lookup = changedfilesset & set(allfiles)
682 to_drop = changedfilesset - to_lookup
685 to_drop = changedfilesset - to_lookup
683
686
684 if self._origpl is None:
687 if self._origpl is None:
685 self._origpl = self._pl
688 self._origpl = self._pl
686 self._map.setparents(parent, self._nodeconstants.nullid)
689 self._map.setparents(parent, self._nodeconstants.nullid)
687
690
688 for f in to_lookup:
691 for f in to_lookup:
689 self.normallookup(f)
692 self.normallookup(f)
690 for f in to_drop:
693 for f in to_drop:
691 self.drop(f)
694 self.drop(f)
692
695
693 self._dirty = True
696 self._dirty = True
694
697
695 def identity(self):
698 def identity(self):
696 """Return identity of dirstate itself to detect changing in storage
699 """Return identity of dirstate itself to detect changing in storage
697
700
698 If identity of previous dirstate is equal to this, writing
701 If identity of previous dirstate is equal to this, writing
699 changes based on the former dirstate out can keep consistency.
702 changes based on the former dirstate out can keep consistency.
700 """
703 """
701 return self._map.identity
704 return self._map.identity
702
705
703 def write(self, tr):
706 def write(self, tr):
704 if not self._dirty:
707 if not self._dirty:
705 return
708 return
706
709
707 filename = self._filename
710 filename = self._filename
708 if tr:
711 if tr:
709 # 'dirstate.write()' is not only for writing in-memory
712 # 'dirstate.write()' is not only for writing in-memory
710 # changes out, but also for dropping ambiguous timestamp.
713 # changes out, but also for dropping ambiguous timestamp.
711 # delayed writing re-raise "ambiguous timestamp issue".
714 # delayed writing re-raise "ambiguous timestamp issue".
712 # See also the wiki page below for detail:
715 # See also the wiki page below for detail:
713 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
716 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
714
717
715 # emulate dropping timestamp in 'parsers.pack_dirstate'
718 # emulate dropping timestamp in 'parsers.pack_dirstate'
716 now = _getfsnow(self._opener)
719 now = _getfsnow(self._opener)
717 self._map.clearambiguoustimes(self._updatedfiles, now)
720 self._map.clearambiguoustimes(self._updatedfiles, now)
718
721
719 # emulate that all 'dirstate.normal' results are written out
722 # emulate that all 'dirstate.normal' results are written out
720 self._lastnormaltime = 0
723 self._lastnormaltime = 0
721 self._updatedfiles.clear()
724 self._updatedfiles.clear()
722
725
723 # delay writing in-memory changes out
726 # delay writing in-memory changes out
724 tr.addfilegenerator(
727 tr.addfilegenerator(
725 b'dirstate',
728 b'dirstate',
726 (self._filename,),
729 (self._filename,),
727 self._writedirstate,
730 self._writedirstate,
728 location=b'plain',
731 location=b'plain',
729 )
732 )
730 return
733 return
731
734
732 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
735 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
733 self._writedirstate(st)
736 self._writedirstate(st)
734
737
735 def addparentchangecallback(self, category, callback):
738 def addparentchangecallback(self, category, callback):
736 """add a callback to be called when the wd parents are changed
739 """add a callback to be called when the wd parents are changed
737
740
738 Callback will be called with the following arguments:
741 Callback will be called with the following arguments:
739 dirstate, (oldp1, oldp2), (newp1, newp2)
742 dirstate, (oldp1, oldp2), (newp1, newp2)
740
743
741 Category is a unique identifier to allow overwriting an old callback
744 Category is a unique identifier to allow overwriting an old callback
742 with a newer callback.
745 with a newer callback.
743 """
746 """
744 self._plchangecallbacks[category] = callback
747 self._plchangecallbacks[category] = callback
745
748
746 def _writedirstate(self, st):
749 def _writedirstate(self, st):
747 # notify callbacks about parents change
750 # notify callbacks about parents change
748 if self._origpl is not None and self._origpl != self._pl:
751 if self._origpl is not None and self._origpl != self._pl:
749 for c, callback in sorted(
752 for c, callback in sorted(
750 pycompat.iteritems(self._plchangecallbacks)
753 pycompat.iteritems(self._plchangecallbacks)
751 ):
754 ):
752 callback(self, self._origpl, self._pl)
755 callback(self, self._origpl, self._pl)
753 self._origpl = None
756 self._origpl = None
754 # use the modification time of the newly created temporary file as the
757 # use the modification time of the newly created temporary file as the
755 # filesystem's notion of 'now'
758 # filesystem's notion of 'now'
756 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
759 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
757
760
758 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
761 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
759 # timestamp of each entries in dirstate, because of 'now > mtime'
762 # timestamp of each entries in dirstate, because of 'now > mtime'
760 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
763 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
761 if delaywrite > 0:
764 if delaywrite > 0:
762 # do we have any files to delay for?
765 # do we have any files to delay for?
763 for f, e in pycompat.iteritems(self._map):
766 for f, e in pycompat.iteritems(self._map):
764 if e[0] == b'n' and e[3] == now:
767 if e[0] == b'n' and e[3] == now:
765 import time # to avoid useless import
768 import time # to avoid useless import
766
769
767 # rather than sleep n seconds, sleep until the next
770 # rather than sleep n seconds, sleep until the next
768 # multiple of n seconds
771 # multiple of n seconds
769 clock = time.time()
772 clock = time.time()
770 start = int(clock) - (int(clock) % delaywrite)
773 start = int(clock) - (int(clock) % delaywrite)
771 end = start + delaywrite
774 end = start + delaywrite
772 time.sleep(end - clock)
775 time.sleep(end - clock)
773 now = end # trust our estimate that the end is near now
776 now = end # trust our estimate that the end is near now
774 break
777 break
775
778
776 self._map.write(st, now)
779 self._map.write(st, now)
777 self._lastnormaltime = 0
780 self._lastnormaltime = 0
778 self._dirty = False
781 self._dirty = False
779
782
780 def _dirignore(self, f):
783 def _dirignore(self, f):
781 if self._ignore(f):
784 if self._ignore(f):
782 return True
785 return True
783 for p in pathutil.finddirs(f):
786 for p in pathutil.finddirs(f):
784 if self._ignore(p):
787 if self._ignore(p):
785 return True
788 return True
786 return False
789 return False
787
790
788 def _ignorefiles(self):
791 def _ignorefiles(self):
789 files = []
792 files = []
790 if os.path.exists(self._join(b'.hgignore')):
793 if os.path.exists(self._join(b'.hgignore')):
791 files.append(self._join(b'.hgignore'))
794 files.append(self._join(b'.hgignore'))
792 for name, path in self._ui.configitems(b"ui"):
795 for name, path in self._ui.configitems(b"ui"):
793 if name == b'ignore' or name.startswith(b'ignore.'):
796 if name == b'ignore' or name.startswith(b'ignore.'):
794 # we need to use os.path.join here rather than self._join
797 # we need to use os.path.join here rather than self._join
795 # because path is arbitrary and user-specified
798 # because path is arbitrary and user-specified
796 files.append(os.path.join(self._rootdir, util.expandpath(path)))
799 files.append(os.path.join(self._rootdir, util.expandpath(path)))
797 return files
800 return files
798
801
799 def _ignorefileandline(self, f):
802 def _ignorefileandline(self, f):
800 files = collections.deque(self._ignorefiles())
803 files = collections.deque(self._ignorefiles())
801 visited = set()
804 visited = set()
802 while files:
805 while files:
803 i = files.popleft()
806 i = files.popleft()
804 patterns = matchmod.readpatternfile(
807 patterns = matchmod.readpatternfile(
805 i, self._ui.warn, sourceinfo=True
808 i, self._ui.warn, sourceinfo=True
806 )
809 )
807 for pattern, lineno, line in patterns:
810 for pattern, lineno, line in patterns:
808 kind, p = matchmod._patsplit(pattern, b'glob')
811 kind, p = matchmod._patsplit(pattern, b'glob')
809 if kind == b"subinclude":
812 if kind == b"subinclude":
810 if p not in visited:
813 if p not in visited:
811 files.append(p)
814 files.append(p)
812 continue
815 continue
813 m = matchmod.match(
816 m = matchmod.match(
814 self._root, b'', [], [pattern], warn=self._ui.warn
817 self._root, b'', [], [pattern], warn=self._ui.warn
815 )
818 )
816 if m(f):
819 if m(f):
817 return (i, lineno, line)
820 return (i, lineno, line)
818 visited.add(i)
821 visited.add(i)
819 return (None, -1, b"")
822 return (None, -1, b"")
820
823
821 def _walkexplicit(self, match, subrepos):
824 def _walkexplicit(self, match, subrepos):
822 """Get stat data about the files explicitly specified by match.
825 """Get stat data about the files explicitly specified by match.
823
826
824 Return a triple (results, dirsfound, dirsnotfound).
827 Return a triple (results, dirsfound, dirsnotfound).
825 - results is a mapping from filename to stat result. It also contains
828 - results is a mapping from filename to stat result. It also contains
826 listings mapping subrepos and .hg to None.
829 listings mapping subrepos and .hg to None.
827 - dirsfound is a list of files found to be directories.
830 - dirsfound is a list of files found to be directories.
828 - dirsnotfound is a list of files that the dirstate thinks are
831 - dirsnotfound is a list of files that the dirstate thinks are
829 directories and that were not found."""
832 directories and that were not found."""
830
833
831 def badtype(mode):
834 def badtype(mode):
832 kind = _(b'unknown')
835 kind = _(b'unknown')
833 if stat.S_ISCHR(mode):
836 if stat.S_ISCHR(mode):
834 kind = _(b'character device')
837 kind = _(b'character device')
835 elif stat.S_ISBLK(mode):
838 elif stat.S_ISBLK(mode):
836 kind = _(b'block device')
839 kind = _(b'block device')
837 elif stat.S_ISFIFO(mode):
840 elif stat.S_ISFIFO(mode):
838 kind = _(b'fifo')
841 kind = _(b'fifo')
839 elif stat.S_ISSOCK(mode):
842 elif stat.S_ISSOCK(mode):
840 kind = _(b'socket')
843 kind = _(b'socket')
841 elif stat.S_ISDIR(mode):
844 elif stat.S_ISDIR(mode):
842 kind = _(b'directory')
845 kind = _(b'directory')
843 return _(b'unsupported file type (type is %s)') % kind
846 return _(b'unsupported file type (type is %s)') % kind
844
847
845 badfn = match.bad
848 badfn = match.bad
846 dmap = self._map
849 dmap = self._map
847 lstat = os.lstat
850 lstat = os.lstat
848 getkind = stat.S_IFMT
851 getkind = stat.S_IFMT
849 dirkind = stat.S_IFDIR
852 dirkind = stat.S_IFDIR
850 regkind = stat.S_IFREG
853 regkind = stat.S_IFREG
851 lnkkind = stat.S_IFLNK
854 lnkkind = stat.S_IFLNK
852 join = self._join
855 join = self._join
853 dirsfound = []
856 dirsfound = []
854 foundadd = dirsfound.append
857 foundadd = dirsfound.append
855 dirsnotfound = []
858 dirsnotfound = []
856 notfoundadd = dirsnotfound.append
859 notfoundadd = dirsnotfound.append
857
860
858 if not match.isexact() and self._checkcase:
861 if not match.isexact() and self._checkcase:
859 normalize = self._normalize
862 normalize = self._normalize
860 else:
863 else:
861 normalize = None
864 normalize = None
862
865
863 files = sorted(match.files())
866 files = sorted(match.files())
864 subrepos.sort()
867 subrepos.sort()
865 i, j = 0, 0
868 i, j = 0, 0
866 while i < len(files) and j < len(subrepos):
869 while i < len(files) and j < len(subrepos):
867 subpath = subrepos[j] + b"/"
870 subpath = subrepos[j] + b"/"
868 if files[i] < subpath:
871 if files[i] < subpath:
869 i += 1
872 i += 1
870 continue
873 continue
871 while i < len(files) and files[i].startswith(subpath):
874 while i < len(files) and files[i].startswith(subpath):
872 del files[i]
875 del files[i]
873 j += 1
876 j += 1
874
877
875 if not files or b'' in files:
878 if not files or b'' in files:
876 files = [b'']
879 files = [b'']
877 # constructing the foldmap is expensive, so don't do it for the
880 # constructing the foldmap is expensive, so don't do it for the
878 # common case where files is ['']
881 # common case where files is ['']
879 normalize = None
882 normalize = None
880 results = dict.fromkeys(subrepos)
883 results = dict.fromkeys(subrepos)
881 results[b'.hg'] = None
884 results[b'.hg'] = None
882
885
883 for ff in files:
886 for ff in files:
884 if normalize:
887 if normalize:
885 nf = normalize(ff, False, True)
888 nf = normalize(ff, False, True)
886 else:
889 else:
887 nf = ff
890 nf = ff
888 if nf in results:
891 if nf in results:
889 continue
892 continue
890
893
891 try:
894 try:
892 st = lstat(join(nf))
895 st = lstat(join(nf))
893 kind = getkind(st.st_mode)
896 kind = getkind(st.st_mode)
894 if kind == dirkind:
897 if kind == dirkind:
895 if nf in dmap:
898 if nf in dmap:
896 # file replaced by dir on disk but still in dirstate
899 # file replaced by dir on disk but still in dirstate
897 results[nf] = None
900 results[nf] = None
898 foundadd((nf, ff))
901 foundadd((nf, ff))
899 elif kind == regkind or kind == lnkkind:
902 elif kind == regkind or kind == lnkkind:
900 results[nf] = st
903 results[nf] = st
901 else:
904 else:
902 badfn(ff, badtype(kind))
905 badfn(ff, badtype(kind))
903 if nf in dmap:
906 if nf in dmap:
904 results[nf] = None
907 results[nf] = None
905 except OSError as inst: # nf not found on disk - it is dirstate only
908 except OSError as inst: # nf not found on disk - it is dirstate only
906 if nf in dmap: # does it exactly match a missing file?
909 if nf in dmap: # does it exactly match a missing file?
907 results[nf] = None
910 results[nf] = None
908 else: # does it match a missing directory?
911 else: # does it match a missing directory?
909 if self._map.hasdir(nf):
912 if self._map.hasdir(nf):
910 notfoundadd(nf)
913 notfoundadd(nf)
911 else:
914 else:
912 badfn(ff, encoding.strtolocal(inst.strerror))
915 badfn(ff, encoding.strtolocal(inst.strerror))
913
916
914 # match.files() may contain explicitly-specified paths that shouldn't
917 # match.files() may contain explicitly-specified paths that shouldn't
915 # be taken; drop them from the list of files found. dirsfound/notfound
918 # be taken; drop them from the list of files found. dirsfound/notfound
916 # aren't filtered here because they will be tested later.
919 # aren't filtered here because they will be tested later.
917 if match.anypats():
920 if match.anypats():
918 for f in list(results):
921 for f in list(results):
919 if f == b'.hg' or f in subrepos:
922 if f == b'.hg' or f in subrepos:
920 # keep sentinel to disable further out-of-repo walks
923 # keep sentinel to disable further out-of-repo walks
921 continue
924 continue
922 if not match(f):
925 if not match(f):
923 del results[f]
926 del results[f]
924
927
925 # Case insensitive filesystems cannot rely on lstat() failing to detect
928 # Case insensitive filesystems cannot rely on lstat() failing to detect
926 # a case-only rename. Prune the stat object for any file that does not
929 # a case-only rename. Prune the stat object for any file that does not
927 # match the case in the filesystem, if there are multiple files that
930 # match the case in the filesystem, if there are multiple files that
928 # normalize to the same path.
931 # normalize to the same path.
929 if match.isexact() and self._checkcase:
932 if match.isexact() and self._checkcase:
930 normed = {}
933 normed = {}
931
934
932 for f, st in pycompat.iteritems(results):
935 for f, st in pycompat.iteritems(results):
933 if st is None:
936 if st is None:
934 continue
937 continue
935
938
936 nc = util.normcase(f)
939 nc = util.normcase(f)
937 paths = normed.get(nc)
940 paths = normed.get(nc)
938
941
939 if paths is None:
942 if paths is None:
940 paths = set()
943 paths = set()
941 normed[nc] = paths
944 normed[nc] = paths
942
945
943 paths.add(f)
946 paths.add(f)
944
947
945 for norm, paths in pycompat.iteritems(normed):
948 for norm, paths in pycompat.iteritems(normed):
946 if len(paths) > 1:
949 if len(paths) > 1:
947 for path in paths:
950 for path in paths:
948 folded = self._discoverpath(
951 folded = self._discoverpath(
949 path, norm, True, None, self._map.dirfoldmap
952 path, norm, True, None, self._map.dirfoldmap
950 )
953 )
951 if path != folded:
954 if path != folded:
952 results[path] = None
955 results[path] = None
953
956
954 return results, dirsfound, dirsnotfound
957 return results, dirsfound, dirsnotfound
955
958
956 def walk(self, match, subrepos, unknown, ignored, full=True):
959 def walk(self, match, subrepos, unknown, ignored, full=True):
957 """
960 """
958 Walk recursively through the directory tree, finding all files
961 Walk recursively through the directory tree, finding all files
959 matched by match.
962 matched by match.
960
963
961 If full is False, maybe skip some known-clean files.
964 If full is False, maybe skip some known-clean files.
962
965
963 Return a dict mapping filename to stat-like object (either
966 Return a dict mapping filename to stat-like object (either
964 mercurial.osutil.stat instance or return value of os.stat()).
967 mercurial.osutil.stat instance or return value of os.stat()).
965
968
966 """
969 """
967 # full is a flag that extensions that hook into walk can use -- this
970 # full is a flag that extensions that hook into walk can use -- this
968 # implementation doesn't use it at all. This satisfies the contract
971 # implementation doesn't use it at all. This satisfies the contract
969 # because we only guarantee a "maybe".
972 # because we only guarantee a "maybe".
970
973
971 if ignored:
974 if ignored:
972 ignore = util.never
975 ignore = util.never
973 dirignore = util.never
976 dirignore = util.never
974 elif unknown:
977 elif unknown:
975 ignore = self._ignore
978 ignore = self._ignore
976 dirignore = self._dirignore
979 dirignore = self._dirignore
977 else:
980 else:
978 # if not unknown and not ignored, drop dir recursion and step 2
981 # if not unknown and not ignored, drop dir recursion and step 2
979 ignore = util.always
982 ignore = util.always
980 dirignore = util.always
983 dirignore = util.always
981
984
982 matchfn = match.matchfn
985 matchfn = match.matchfn
983 matchalways = match.always()
986 matchalways = match.always()
984 matchtdir = match.traversedir
987 matchtdir = match.traversedir
985 dmap = self._map
988 dmap = self._map
986 listdir = util.listdir
989 listdir = util.listdir
987 lstat = os.lstat
990 lstat = os.lstat
988 dirkind = stat.S_IFDIR
991 dirkind = stat.S_IFDIR
989 regkind = stat.S_IFREG
992 regkind = stat.S_IFREG
990 lnkkind = stat.S_IFLNK
993 lnkkind = stat.S_IFLNK
991 join = self._join
994 join = self._join
992
995
993 exact = skipstep3 = False
996 exact = skipstep3 = False
994 if match.isexact(): # match.exact
997 if match.isexact(): # match.exact
995 exact = True
998 exact = True
996 dirignore = util.always # skip step 2
999 dirignore = util.always # skip step 2
997 elif match.prefix(): # match.match, no patterns
1000 elif match.prefix(): # match.match, no patterns
998 skipstep3 = True
1001 skipstep3 = True
999
1002
1000 if not exact and self._checkcase:
1003 if not exact and self._checkcase:
1001 normalize = self._normalize
1004 normalize = self._normalize
1002 normalizefile = self._normalizefile
1005 normalizefile = self._normalizefile
1003 skipstep3 = False
1006 skipstep3 = False
1004 else:
1007 else:
1005 normalize = self._normalize
1008 normalize = self._normalize
1006 normalizefile = None
1009 normalizefile = None
1007
1010
1008 # step 1: find all explicit files
1011 # step 1: find all explicit files
1009 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1012 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1010 if matchtdir:
1013 if matchtdir:
1011 for d in work:
1014 for d in work:
1012 matchtdir(d[0])
1015 matchtdir(d[0])
1013 for d in dirsnotfound:
1016 for d in dirsnotfound:
1014 matchtdir(d)
1017 matchtdir(d)
1015
1018
1016 skipstep3 = skipstep3 and not (work or dirsnotfound)
1019 skipstep3 = skipstep3 and not (work or dirsnotfound)
1017 work = [d for d in work if not dirignore(d[0])]
1020 work = [d for d in work if not dirignore(d[0])]
1018
1021
1019 # step 2: visit subdirectories
1022 # step 2: visit subdirectories
1020 def traverse(work, alreadynormed):
1023 def traverse(work, alreadynormed):
1021 wadd = work.append
1024 wadd = work.append
1022 while work:
1025 while work:
1023 tracing.counter('dirstate.walk work', len(work))
1026 tracing.counter('dirstate.walk work', len(work))
1024 nd = work.pop()
1027 nd = work.pop()
1025 visitentries = match.visitchildrenset(nd)
1028 visitentries = match.visitchildrenset(nd)
1026 if not visitentries:
1029 if not visitentries:
1027 continue
1030 continue
1028 if visitentries == b'this' or visitentries == b'all':
1031 if visitentries == b'this' or visitentries == b'all':
1029 visitentries = None
1032 visitentries = None
1030 skip = None
1033 skip = None
1031 if nd != b'':
1034 if nd != b'':
1032 skip = b'.hg'
1035 skip = b'.hg'
1033 try:
1036 try:
1034 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1037 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1035 entries = listdir(join(nd), stat=True, skip=skip)
1038 entries = listdir(join(nd), stat=True, skip=skip)
1036 except OSError as inst:
1039 except OSError as inst:
1037 if inst.errno in (errno.EACCES, errno.ENOENT):
1040 if inst.errno in (errno.EACCES, errno.ENOENT):
1038 match.bad(
1041 match.bad(
1039 self.pathto(nd), encoding.strtolocal(inst.strerror)
1042 self.pathto(nd), encoding.strtolocal(inst.strerror)
1040 )
1043 )
1041 continue
1044 continue
1042 raise
1045 raise
1043 for f, kind, st in entries:
1046 for f, kind, st in entries:
1044 # Some matchers may return files in the visitentries set,
1047 # Some matchers may return files in the visitentries set,
1045 # instead of 'this', if the matcher explicitly mentions them
1048 # instead of 'this', if the matcher explicitly mentions them
1046 # and is not an exactmatcher. This is acceptable; we do not
1049 # and is not an exactmatcher. This is acceptable; we do not
1047 # make any hard assumptions about file-or-directory below
1050 # make any hard assumptions about file-or-directory below
1048 # based on the presence of `f` in visitentries. If
1051 # based on the presence of `f` in visitentries. If
1049 # visitchildrenset returned a set, we can always skip the
1052 # visitchildrenset returned a set, we can always skip the
1050 # entries *not* in the set it provided regardless of whether
1053 # entries *not* in the set it provided regardless of whether
1051 # they're actually a file or a directory.
1054 # they're actually a file or a directory.
1052 if visitentries and f not in visitentries:
1055 if visitentries and f not in visitentries:
1053 continue
1056 continue
1054 if normalizefile:
1057 if normalizefile:
1055 # even though f might be a directory, we're only
1058 # even though f might be a directory, we're only
1056 # interested in comparing it to files currently in the
1059 # interested in comparing it to files currently in the
1057 # dmap -- therefore normalizefile is enough
1060 # dmap -- therefore normalizefile is enough
1058 nf = normalizefile(
1061 nf = normalizefile(
1059 nd and (nd + b"/" + f) or f, True, True
1062 nd and (nd + b"/" + f) or f, True, True
1060 )
1063 )
1061 else:
1064 else:
1062 nf = nd and (nd + b"/" + f) or f
1065 nf = nd and (nd + b"/" + f) or f
1063 if nf not in results:
1066 if nf not in results:
1064 if kind == dirkind:
1067 if kind == dirkind:
1065 if not ignore(nf):
1068 if not ignore(nf):
1066 if matchtdir:
1069 if matchtdir:
1067 matchtdir(nf)
1070 matchtdir(nf)
1068 wadd(nf)
1071 wadd(nf)
1069 if nf in dmap and (matchalways or matchfn(nf)):
1072 if nf in dmap and (matchalways or matchfn(nf)):
1070 results[nf] = None
1073 results[nf] = None
1071 elif kind == regkind or kind == lnkkind:
1074 elif kind == regkind or kind == lnkkind:
1072 if nf in dmap:
1075 if nf in dmap:
1073 if matchalways or matchfn(nf):
1076 if matchalways or matchfn(nf):
1074 results[nf] = st
1077 results[nf] = st
1075 elif (matchalways or matchfn(nf)) and not ignore(
1078 elif (matchalways or matchfn(nf)) and not ignore(
1076 nf
1079 nf
1077 ):
1080 ):
1078 # unknown file -- normalize if necessary
1081 # unknown file -- normalize if necessary
1079 if not alreadynormed:
1082 if not alreadynormed:
1080 nf = normalize(nf, False, True)
1083 nf = normalize(nf, False, True)
1081 results[nf] = st
1084 results[nf] = st
1082 elif nf in dmap and (matchalways or matchfn(nf)):
1085 elif nf in dmap and (matchalways or matchfn(nf)):
1083 results[nf] = None
1086 results[nf] = None
1084
1087
1085 for nd, d in work:
1088 for nd, d in work:
1086 # alreadynormed means that processwork doesn't have to do any
1089 # alreadynormed means that processwork doesn't have to do any
1087 # expensive directory normalization
1090 # expensive directory normalization
1088 alreadynormed = not normalize or nd == d
1091 alreadynormed = not normalize or nd == d
1089 traverse([d], alreadynormed)
1092 traverse([d], alreadynormed)
1090
1093
1091 for s in subrepos:
1094 for s in subrepos:
1092 del results[s]
1095 del results[s]
1093 del results[b'.hg']
1096 del results[b'.hg']
1094
1097
1095 # step 3: visit remaining files from dmap
1098 # step 3: visit remaining files from dmap
1096 if not skipstep3 and not exact:
1099 if not skipstep3 and not exact:
1097 # If a dmap file is not in results yet, it was either
1100 # If a dmap file is not in results yet, it was either
1098 # a) not matching matchfn b) ignored, c) missing, or d) under a
1101 # a) not matching matchfn b) ignored, c) missing, or d) under a
1099 # symlink directory.
1102 # symlink directory.
1100 if not results and matchalways:
1103 if not results and matchalways:
1101 visit = [f for f in dmap]
1104 visit = [f for f in dmap]
1102 else:
1105 else:
1103 visit = [f for f in dmap if f not in results and matchfn(f)]
1106 visit = [f for f in dmap if f not in results and matchfn(f)]
1104 visit.sort()
1107 visit.sort()
1105
1108
1106 if unknown:
1109 if unknown:
1107 # unknown == True means we walked all dirs under the roots
1110 # unknown == True means we walked all dirs under the roots
1108 # that wasn't ignored, and everything that matched was stat'ed
1111 # that wasn't ignored, and everything that matched was stat'ed
1109 # and is already in results.
1112 # and is already in results.
1110 # The rest must thus be ignored or under a symlink.
1113 # The rest must thus be ignored or under a symlink.
1111 audit_path = pathutil.pathauditor(self._root, cached=True)
1114 audit_path = pathutil.pathauditor(self._root, cached=True)
1112
1115
1113 for nf in iter(visit):
1116 for nf in iter(visit):
1114 # If a stat for the same file was already added with a
1117 # If a stat for the same file was already added with a
1115 # different case, don't add one for this, since that would
1118 # different case, don't add one for this, since that would
1116 # make it appear as if the file exists under both names
1119 # make it appear as if the file exists under both names
1117 # on disk.
1120 # on disk.
1118 if (
1121 if (
1119 normalizefile
1122 normalizefile
1120 and normalizefile(nf, True, True) in results
1123 and normalizefile(nf, True, True) in results
1121 ):
1124 ):
1122 results[nf] = None
1125 results[nf] = None
1123 # Report ignored items in the dmap as long as they are not
1126 # Report ignored items in the dmap as long as they are not
1124 # under a symlink directory.
1127 # under a symlink directory.
1125 elif audit_path.check(nf):
1128 elif audit_path.check(nf):
1126 try:
1129 try:
1127 results[nf] = lstat(join(nf))
1130 results[nf] = lstat(join(nf))
1128 # file was just ignored, no links, and exists
1131 # file was just ignored, no links, and exists
1129 except OSError:
1132 except OSError:
1130 # file doesn't exist
1133 # file doesn't exist
1131 results[nf] = None
1134 results[nf] = None
1132 else:
1135 else:
1133 # It's either missing or under a symlink directory
1136 # It's either missing or under a symlink directory
1134 # which we in this case report as missing
1137 # which we in this case report as missing
1135 results[nf] = None
1138 results[nf] = None
1136 else:
1139 else:
1137 # We may not have walked the full directory tree above,
1140 # We may not have walked the full directory tree above,
1138 # so stat and check everything we missed.
1141 # so stat and check everything we missed.
1139 iv = iter(visit)
1142 iv = iter(visit)
1140 for st in util.statfiles([join(i) for i in visit]):
1143 for st in util.statfiles([join(i) for i in visit]):
1141 results[next(iv)] = st
1144 results[next(iv)] = st
1142 return results
1145 return results
1143
1146
1144 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1147 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1145 # Force Rayon (Rust parallelism library) to respect the number of
1148 # Force Rayon (Rust parallelism library) to respect the number of
1146 # workers. This is a temporary workaround until Rust code knows
1149 # workers. This is a temporary workaround until Rust code knows
1147 # how to read the config file.
1150 # how to read the config file.
1148 numcpus = self._ui.configint(b"worker", b"numcpus")
1151 numcpus = self._ui.configint(b"worker", b"numcpus")
1149 if numcpus is not None:
1152 if numcpus is not None:
1150 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1153 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1151
1154
1152 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1155 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1153 if not workers_enabled:
1156 if not workers_enabled:
1154 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1157 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1155
1158
1156 (
1159 (
1157 lookup,
1160 lookup,
1158 modified,
1161 modified,
1159 added,
1162 added,
1160 removed,
1163 removed,
1161 deleted,
1164 deleted,
1162 clean,
1165 clean,
1163 ignored,
1166 ignored,
1164 unknown,
1167 unknown,
1165 warnings,
1168 warnings,
1166 bad,
1169 bad,
1167 traversed,
1170 traversed,
1168 dirty,
1171 dirty,
1169 ) = rustmod.status(
1172 ) = rustmod.status(
1170 self._map._rustmap,
1173 self._map._rustmap,
1171 matcher,
1174 matcher,
1172 self._rootdir,
1175 self._rootdir,
1173 self._ignorefiles(),
1176 self._ignorefiles(),
1174 self._checkexec,
1177 self._checkexec,
1175 self._lastnormaltime,
1178 self._lastnormaltime,
1176 bool(list_clean),
1179 bool(list_clean),
1177 bool(list_ignored),
1180 bool(list_ignored),
1178 bool(list_unknown),
1181 bool(list_unknown),
1179 bool(matcher.traversedir),
1182 bool(matcher.traversedir),
1180 )
1183 )
1181
1184
1182 self._dirty |= dirty
1185 self._dirty |= dirty
1183
1186
1184 if matcher.traversedir:
1187 if matcher.traversedir:
1185 for dir in traversed:
1188 for dir in traversed:
1186 matcher.traversedir(dir)
1189 matcher.traversedir(dir)
1187
1190
1188 if self._ui.warn:
1191 if self._ui.warn:
1189 for item in warnings:
1192 for item in warnings:
1190 if isinstance(item, tuple):
1193 if isinstance(item, tuple):
1191 file_path, syntax = item
1194 file_path, syntax = item
1192 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1195 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1193 file_path,
1196 file_path,
1194 syntax,
1197 syntax,
1195 )
1198 )
1196 self._ui.warn(msg)
1199 self._ui.warn(msg)
1197 else:
1200 else:
1198 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1201 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1199 self._ui.warn(
1202 self._ui.warn(
1200 msg
1203 msg
1201 % (
1204 % (
1202 pathutil.canonpath(
1205 pathutil.canonpath(
1203 self._rootdir, self._rootdir, item
1206 self._rootdir, self._rootdir, item
1204 ),
1207 ),
1205 b"No such file or directory",
1208 b"No such file or directory",
1206 )
1209 )
1207 )
1210 )
1208
1211
1209 for (fn, message) in bad:
1212 for (fn, message) in bad:
1210 matcher.bad(fn, encoding.strtolocal(message))
1213 matcher.bad(fn, encoding.strtolocal(message))
1211
1214
1212 status = scmutil.status(
1215 status = scmutil.status(
1213 modified=modified,
1216 modified=modified,
1214 added=added,
1217 added=added,
1215 removed=removed,
1218 removed=removed,
1216 deleted=deleted,
1219 deleted=deleted,
1217 unknown=unknown,
1220 unknown=unknown,
1218 ignored=ignored,
1221 ignored=ignored,
1219 clean=clean,
1222 clean=clean,
1220 )
1223 )
1221 return (lookup, status)
1224 return (lookup, status)
1222
1225
1223 def status(self, match, subrepos, ignored, clean, unknown):
1226 def status(self, match, subrepos, ignored, clean, unknown):
1224 """Determine the status of the working copy relative to the
1227 """Determine the status of the working copy relative to the
1225 dirstate and return a pair of (unsure, status), where status is of type
1228 dirstate and return a pair of (unsure, status), where status is of type
1226 scmutil.status and:
1229 scmutil.status and:
1227
1230
1228 unsure:
1231 unsure:
1229 files that might have been modified since the dirstate was
1232 files that might have been modified since the dirstate was
1230 written, but need to be read to be sure (size is the same
1233 written, but need to be read to be sure (size is the same
1231 but mtime differs)
1234 but mtime differs)
1232 status.modified:
1235 status.modified:
1233 files that have definitely been modified since the dirstate
1236 files that have definitely been modified since the dirstate
1234 was written (different size or mode)
1237 was written (different size or mode)
1235 status.clean:
1238 status.clean:
1236 files that have definitely not been modified since the
1239 files that have definitely not been modified since the
1237 dirstate was written
1240 dirstate was written
1238 """
1241 """
1239 listignored, listclean, listunknown = ignored, clean, unknown
1242 listignored, listclean, listunknown = ignored, clean, unknown
1240 lookup, modified, added, unknown, ignored = [], [], [], [], []
1243 lookup, modified, added, unknown, ignored = [], [], [], [], []
1241 removed, deleted, clean = [], [], []
1244 removed, deleted, clean = [], [], []
1242
1245
1243 dmap = self._map
1246 dmap = self._map
1244 dmap.preload()
1247 dmap.preload()
1245
1248
1246 use_rust = True
1249 use_rust = True
1247
1250
1248 allowed_matchers = (
1251 allowed_matchers = (
1249 matchmod.alwaysmatcher,
1252 matchmod.alwaysmatcher,
1250 matchmod.exactmatcher,
1253 matchmod.exactmatcher,
1251 matchmod.includematcher,
1254 matchmod.includematcher,
1252 )
1255 )
1253
1256
1254 if rustmod is None:
1257 if rustmod is None:
1255 use_rust = False
1258 use_rust = False
1256 elif self._checkcase:
1259 elif self._checkcase:
1257 # Case-insensitive filesystems are not handled yet
1260 # Case-insensitive filesystems are not handled yet
1258 use_rust = False
1261 use_rust = False
1259 elif subrepos:
1262 elif subrepos:
1260 use_rust = False
1263 use_rust = False
1261 elif sparse.enabled:
1264 elif sparse.enabled:
1262 use_rust = False
1265 use_rust = False
1263 elif not isinstance(match, allowed_matchers):
1266 elif not isinstance(match, allowed_matchers):
1264 # Some matchers have yet to be implemented
1267 # Some matchers have yet to be implemented
1265 use_rust = False
1268 use_rust = False
1266
1269
1267 if use_rust:
1270 if use_rust:
1268 try:
1271 try:
1269 return self._rust_status(
1272 return self._rust_status(
1270 match, listclean, listignored, listunknown
1273 match, listclean, listignored, listunknown
1271 )
1274 )
1272 except rustmod.FallbackError:
1275 except rustmod.FallbackError:
1273 pass
1276 pass
1274
1277
1275 def noop(f):
1278 def noop(f):
1276 pass
1279 pass
1277
1280
1278 dcontains = dmap.__contains__
1281 dcontains = dmap.__contains__
1279 dget = dmap.__getitem__
1282 dget = dmap.__getitem__
1280 ladd = lookup.append # aka "unsure"
1283 ladd = lookup.append # aka "unsure"
1281 madd = modified.append
1284 madd = modified.append
1282 aadd = added.append
1285 aadd = added.append
1283 uadd = unknown.append if listunknown else noop
1286 uadd = unknown.append if listunknown else noop
1284 iadd = ignored.append if listignored else noop
1287 iadd = ignored.append if listignored else noop
1285 radd = removed.append
1288 radd = removed.append
1286 dadd = deleted.append
1289 dadd = deleted.append
1287 cadd = clean.append if listclean else noop
1290 cadd = clean.append if listclean else noop
1288 mexact = match.exact
1291 mexact = match.exact
1289 dirignore = self._dirignore
1292 dirignore = self._dirignore
1290 checkexec = self._checkexec
1293 checkexec = self._checkexec
1291 copymap = self._map.copymap
1294 copymap = self._map.copymap
1292 lastnormaltime = self._lastnormaltime
1295 lastnormaltime = self._lastnormaltime
1293
1296
1294 # We need to do full walks when either
1297 # We need to do full walks when either
1295 # - we're listing all clean files, or
1298 # - we're listing all clean files, or
1296 # - match.traversedir does something, because match.traversedir should
1299 # - match.traversedir does something, because match.traversedir should
1297 # be called for every dir in the working dir
1300 # be called for every dir in the working dir
1298 full = listclean or match.traversedir is not None
1301 full = listclean or match.traversedir is not None
1299 for fn, st in pycompat.iteritems(
1302 for fn, st in pycompat.iteritems(
1300 self.walk(match, subrepos, listunknown, listignored, full=full)
1303 self.walk(match, subrepos, listunknown, listignored, full=full)
1301 ):
1304 ):
1302 if not dcontains(fn):
1305 if not dcontains(fn):
1303 if (listignored or mexact(fn)) and dirignore(fn):
1306 if (listignored or mexact(fn)) and dirignore(fn):
1304 if listignored:
1307 if listignored:
1305 iadd(fn)
1308 iadd(fn)
1306 else:
1309 else:
1307 uadd(fn)
1310 uadd(fn)
1308 continue
1311 continue
1309
1312
1310 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1313 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1311 # written like that for performance reasons. dmap[fn] is not a
1314 # written like that for performance reasons. dmap[fn] is not a
1312 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1315 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1313 # opcode has fast paths when the value to be unpacked is a tuple or
1316 # opcode has fast paths when the value to be unpacked is a tuple or
1314 # a list, but falls back to creating a full-fledged iterator in
1317 # a list, but falls back to creating a full-fledged iterator in
1315 # general. That is much slower than simply accessing and storing the
1318 # general. That is much slower than simply accessing and storing the
1316 # tuple members one by one.
1319 # tuple members one by one.
1317 t = dget(fn)
1320 t = dget(fn)
1318 state = t[0]
1321 state = t[0]
1319 mode = t[1]
1322 mode = t[1]
1320 size = t[2]
1323 size = t[2]
1321 time = t[3]
1324 time = t[3]
1322
1325
1323 if not st and state in b"nma":
1326 if not st and state in b"nma":
1324 dadd(fn)
1327 dadd(fn)
1325 elif state == b'n':
1328 elif state == b'n':
1326 if (
1329 if (
1327 size >= 0
1330 size >= 0
1328 and (
1331 and (
1329 (size != st.st_size and size != st.st_size & _rangemask)
1332 (size != st.st_size and size != st.st_size & _rangemask)
1330 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1333 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1331 )
1334 )
1332 or size == FROM_P2 # other parent
1335 or size == FROM_P2 # other parent
1333 or fn in copymap
1336 or fn in copymap
1334 ):
1337 ):
1335 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1338 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1336 # issue6456: Size returned may be longer due to
1339 # issue6456: Size returned may be longer due to
1337 # encryption on EXT-4 fscrypt, undecided.
1340 # encryption on EXT-4 fscrypt, undecided.
1338 ladd(fn)
1341 ladd(fn)
1339 else:
1342 else:
1340 madd(fn)
1343 madd(fn)
1341 elif (
1344 elif (
1342 time != st[stat.ST_MTIME]
1345 time != st[stat.ST_MTIME]
1343 and time != st[stat.ST_MTIME] & _rangemask
1346 and time != st[stat.ST_MTIME] & _rangemask
1344 ):
1347 ):
1345 ladd(fn)
1348 ladd(fn)
1346 elif st[stat.ST_MTIME] == lastnormaltime:
1349 elif st[stat.ST_MTIME] == lastnormaltime:
1347 # fn may have just been marked as normal and it may have
1350 # fn may have just been marked as normal and it may have
1348 # changed in the same second without changing its size.
1351 # changed in the same second without changing its size.
1349 # This can happen if we quickly do multiple commits.
1352 # This can happen if we quickly do multiple commits.
1350 # Force lookup, so we don't miss such a racy file change.
1353 # Force lookup, so we don't miss such a racy file change.
1351 ladd(fn)
1354 ladd(fn)
1352 elif listclean:
1355 elif listclean:
1353 cadd(fn)
1356 cadd(fn)
1354 elif state == b'm':
1357 elif state == b'm':
1355 madd(fn)
1358 madd(fn)
1356 elif state == b'a':
1359 elif state == b'a':
1357 aadd(fn)
1360 aadd(fn)
1358 elif state == b'r':
1361 elif state == b'r':
1359 radd(fn)
1362 radd(fn)
1360 status = scmutil.status(
1363 status = scmutil.status(
1361 modified, added, removed, deleted, unknown, ignored, clean
1364 modified, added, removed, deleted, unknown, ignored, clean
1362 )
1365 )
1363 return (lookup, status)
1366 return (lookup, status)
1364
1367
1365 def matches(self, match):
1368 def matches(self, match):
1366 """
1369 """
1367 return files in the dirstate (in whatever state) filtered by match
1370 return files in the dirstate (in whatever state) filtered by match
1368 """
1371 """
1369 dmap = self._map
1372 dmap = self._map
1370 if rustmod is not None:
1373 if rustmod is not None:
1371 dmap = self._map._rustmap
1374 dmap = self._map._rustmap
1372
1375
1373 if match.always():
1376 if match.always():
1374 return dmap.keys()
1377 return dmap.keys()
1375 files = match.files()
1378 files = match.files()
1376 if match.isexact():
1379 if match.isexact():
1377 # fast path -- filter the other way around, since typically files is
1380 # fast path -- filter the other way around, since typically files is
1378 # much smaller than dmap
1381 # much smaller than dmap
1379 return [f for f in files if f in dmap]
1382 return [f for f in files if f in dmap]
1380 if match.prefix() and all(fn in dmap for fn in files):
1383 if match.prefix() and all(fn in dmap for fn in files):
1381 # fast path -- all the values are known to be files, so just return
1384 # fast path -- all the values are known to be files, so just return
1382 # that
1385 # that
1383 return list(files)
1386 return list(files)
1384 return [f for f in dmap if match(f)]
1387 return [f for f in dmap if match(f)]
1385
1388
1386 def _actualfilename(self, tr):
1389 def _actualfilename(self, tr):
1387 if tr:
1390 if tr:
1388 return self._pendingfilename
1391 return self._pendingfilename
1389 else:
1392 else:
1390 return self._filename
1393 return self._filename
1391
1394
1392 def savebackup(self, tr, backupname):
1395 def savebackup(self, tr, backupname):
1393 '''Save current dirstate into backup file'''
1396 '''Save current dirstate into backup file'''
1394 filename = self._actualfilename(tr)
1397 filename = self._actualfilename(tr)
1395 assert backupname != filename
1398 assert backupname != filename
1396
1399
1397 # use '_writedirstate' instead of 'write' to write changes certainly,
1400 # use '_writedirstate' instead of 'write' to write changes certainly,
1398 # because the latter omits writing out if transaction is running.
1401 # because the latter omits writing out if transaction is running.
1399 # output file will be used to create backup of dirstate at this point.
1402 # output file will be used to create backup of dirstate at this point.
1400 if self._dirty or not self._opener.exists(filename):
1403 if self._dirty or not self._opener.exists(filename):
1401 self._writedirstate(
1404 self._writedirstate(
1402 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1405 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1403 )
1406 )
1404
1407
1405 if tr:
1408 if tr:
1406 # ensure that subsequent tr.writepending returns True for
1409 # ensure that subsequent tr.writepending returns True for
1407 # changes written out above, even if dirstate is never
1410 # changes written out above, even if dirstate is never
1408 # changed after this
1411 # changed after this
1409 tr.addfilegenerator(
1412 tr.addfilegenerator(
1410 b'dirstate',
1413 b'dirstate',
1411 (self._filename,),
1414 (self._filename,),
1412 self._writedirstate,
1415 self._writedirstate,
1413 location=b'plain',
1416 location=b'plain',
1414 )
1417 )
1415
1418
1416 # ensure that pending file written above is unlinked at
1419 # ensure that pending file written above is unlinked at
1417 # failure, even if tr.writepending isn't invoked until the
1420 # failure, even if tr.writepending isn't invoked until the
1418 # end of this transaction
1421 # end of this transaction
1419 tr.registertmp(filename, location=b'plain')
1422 tr.registertmp(filename, location=b'plain')
1420
1423
1421 self._opener.tryunlink(backupname)
1424 self._opener.tryunlink(backupname)
1422 # hardlink backup is okay because _writedirstate is always called
1425 # hardlink backup is okay because _writedirstate is always called
1423 # with an "atomictemp=True" file.
1426 # with an "atomictemp=True" file.
1424 util.copyfile(
1427 util.copyfile(
1425 self._opener.join(filename),
1428 self._opener.join(filename),
1426 self._opener.join(backupname),
1429 self._opener.join(backupname),
1427 hardlink=True,
1430 hardlink=True,
1428 )
1431 )
1429
1432
1430 def restorebackup(self, tr, backupname):
1433 def restorebackup(self, tr, backupname):
1431 '''Restore dirstate by backup file'''
1434 '''Restore dirstate by backup file'''
1432 # this "invalidate()" prevents "wlock.release()" from writing
1435 # this "invalidate()" prevents "wlock.release()" from writing
1433 # changes of dirstate out after restoring from backup file
1436 # changes of dirstate out after restoring from backup file
1434 self.invalidate()
1437 self.invalidate()
1435 filename = self._actualfilename(tr)
1438 filename = self._actualfilename(tr)
1436 o = self._opener
1439 o = self._opener
1437 if util.samefile(o.join(backupname), o.join(filename)):
1440 if util.samefile(o.join(backupname), o.join(filename)):
1438 o.unlink(backupname)
1441 o.unlink(backupname)
1439 else:
1442 else:
1440 o.rename(backupname, filename, checkambig=True)
1443 o.rename(backupname, filename, checkambig=True)
1441
1444
1442 def clearbackup(self, tr, backupname):
1445 def clearbackup(self, tr, backupname):
1443 '''Clear backup file'''
1446 '''Clear backup file'''
1444 self._opener.unlink(backupname)
1447 self._opener.unlink(backupname)
1445
1448
1446
1449
1447 class dirstatemap(object):
1450 class dirstatemap(object):
1448 """Map encapsulating the dirstate's contents.
1451 """Map encapsulating the dirstate's contents.
1449
1452
1450 The dirstate contains the following state:
1453 The dirstate contains the following state:
1451
1454
1452 - `identity` is the identity of the dirstate file, which can be used to
1455 - `identity` is the identity of the dirstate file, which can be used to
1453 detect when changes have occurred to the dirstate file.
1456 detect when changes have occurred to the dirstate file.
1454
1457
1455 - `parents` is a pair containing the parents of the working copy. The
1458 - `parents` is a pair containing the parents of the working copy. The
1456 parents are updated by calling `setparents`.
1459 parents are updated by calling `setparents`.
1457
1460
1458 - the state map maps filenames to tuples of (state, mode, size, mtime),
1461 - the state map maps filenames to tuples of (state, mode, size, mtime),
1459 where state is a single character representing 'normal', 'added',
1462 where state is a single character representing 'normal', 'added',
1460 'removed', or 'merged'. It is read by treating the dirstate as a
1463 'removed', or 'merged'. It is read by treating the dirstate as a
1461 dict. File state is updated by calling the `addfile`, `removefile` and
1464 dict. File state is updated by calling the `addfile`, `removefile` and
1462 `dropfile` methods.
1465 `dropfile` methods.
1463
1466
1464 - `copymap` maps destination filenames to their source filename.
1467 - `copymap` maps destination filenames to their source filename.
1465
1468
1466 The dirstate also provides the following views onto the state:
1469 The dirstate also provides the following views onto the state:
1467
1470
1468 - `nonnormalset` is a set of the filenames that have state other
1471 - `nonnormalset` is a set of the filenames that have state other
1469 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1472 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1470
1473
1471 - `otherparentset` is a set of the filenames that are marked as coming
1474 - `otherparentset` is a set of the filenames that are marked as coming
1472 from the second parent when the dirstate is currently being merged.
1475 from the second parent when the dirstate is currently being merged.
1473
1476
1474 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1477 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1475 form that they appear as in the dirstate.
1478 form that they appear as in the dirstate.
1476
1479
1477 - `dirfoldmap` is a dict mapping normalized directory names to the
1480 - `dirfoldmap` is a dict mapping normalized directory names to the
1478 denormalized form that they appear as in the dirstate.
1481 denormalized form that they appear as in the dirstate.
1479 """
1482 """
1480
1483
1481 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1484 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1482 self._ui = ui
1485 self._ui = ui
1483 self._opener = opener
1486 self._opener = opener
1484 self._root = root
1487 self._root = root
1485 self._filename = b'dirstate'
1488 self._filename = b'dirstate'
1486 self._nodelen = 20
1489 self._nodelen = 20
1487 self._nodeconstants = nodeconstants
1490 self._nodeconstants = nodeconstants
1488 assert (
1491 assert (
1489 not use_dirstate_v2
1492 not use_dirstate_v2
1490 ), "should have detected unsupported requirement"
1493 ), "should have detected unsupported requirement"
1491
1494
1492 self._parents = None
1495 self._parents = None
1493 self._dirtyparents = False
1496 self._dirtyparents = False
1494
1497
1495 # for consistent view between _pl() and _read() invocations
1498 # for consistent view between _pl() and _read() invocations
1496 self._pendingmode = None
1499 self._pendingmode = None
1497
1500
1498 @propertycache
1501 @propertycache
1499 def _map(self):
1502 def _map(self):
1500 self._map = {}
1503 self._map = {}
1501 self.read()
1504 self.read()
1502 return self._map
1505 return self._map
1503
1506
1504 @propertycache
1507 @propertycache
1505 def copymap(self):
1508 def copymap(self):
1506 self.copymap = {}
1509 self.copymap = {}
1507 self._map
1510 self._map
1508 return self.copymap
1511 return self.copymap
1509
1512
1510 def directories(self):
1513 def directories(self):
1511 # Rust / dirstate-v2 only
1514 # Rust / dirstate-v2 only
1512 return []
1515 return []
1513
1516
1514 def clear(self):
1517 def clear(self):
1515 self._map.clear()
1518 self._map.clear()
1516 self.copymap.clear()
1519 self.copymap.clear()
1517 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1520 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1518 util.clearcachedproperty(self, b"_dirs")
1521 util.clearcachedproperty(self, b"_dirs")
1519 util.clearcachedproperty(self, b"_alldirs")
1522 util.clearcachedproperty(self, b"_alldirs")
1520 util.clearcachedproperty(self, b"filefoldmap")
1523 util.clearcachedproperty(self, b"filefoldmap")
1521 util.clearcachedproperty(self, b"dirfoldmap")
1524 util.clearcachedproperty(self, b"dirfoldmap")
1522 util.clearcachedproperty(self, b"nonnormalset")
1525 util.clearcachedproperty(self, b"nonnormalset")
1523 util.clearcachedproperty(self, b"otherparentset")
1526 util.clearcachedproperty(self, b"otherparentset")
1524
1527
1525 def items(self):
1528 def items(self):
1526 return pycompat.iteritems(self._map)
1529 return pycompat.iteritems(self._map)
1527
1530
1528 # forward for python2,3 compat
1531 # forward for python2,3 compat
1529 iteritems = items
1532 iteritems = items
1530
1533
1531 def __len__(self):
1534 def __len__(self):
1532 return len(self._map)
1535 return len(self._map)
1533
1536
1534 def __iter__(self):
1537 def __iter__(self):
1535 return iter(self._map)
1538 return iter(self._map)
1536
1539
1537 def get(self, key, default=None):
1540 def get(self, key, default=None):
1538 return self._map.get(key, default)
1541 return self._map.get(key, default)
1539
1542
1540 def __contains__(self, key):
1543 def __contains__(self, key):
1541 return key in self._map
1544 return key in self._map
1542
1545
1543 def __getitem__(self, key):
1546 def __getitem__(self, key):
1544 return self._map[key]
1547 return self._map[key]
1545
1548
1546 def keys(self):
1549 def keys(self):
1547 return self._map.keys()
1550 return self._map.keys()
1548
1551
1549 def preload(self):
1552 def preload(self):
1550 """Loads the underlying data, if it's not already loaded"""
1553 """Loads the underlying data, if it's not already loaded"""
1551 self._map
1554 self._map
1552
1555
1553 def addfile(self, f, oldstate, state, mode, size, mtime):
1556 def addfile(self, f, oldstate, state, mode, size, mtime):
1554 """Add a tracked file to the dirstate."""
1557 """Add a tracked file to the dirstate."""
1555 if oldstate in b"?r" and "_dirs" in self.__dict__:
1558 if oldstate in b"?r" and "_dirs" in self.__dict__:
1556 self._dirs.addpath(f)
1559 self._dirs.addpath(f)
1557 if oldstate == b"?" and "_alldirs" in self.__dict__:
1560 if oldstate == b"?" and "_alldirs" in self.__dict__:
1558 self._alldirs.addpath(f)
1561 self._alldirs.addpath(f)
1559 self._map[f] = dirstatetuple(state, mode, size, mtime)
1562 self._map[f] = dirstatetuple(state, mode, size, mtime)
1560 if state != b'n' or mtime == AMBIGUOUS_TIME:
1563 if state != b'n' or mtime == AMBIGUOUS_TIME:
1561 self.nonnormalset.add(f)
1564 self.nonnormalset.add(f)
1562 if size == FROM_P2:
1565 if size == FROM_P2:
1563 self.otherparentset.add(f)
1566 self.otherparentset.add(f)
1564
1567
1565 def removefile(self, f, oldstate, size):
1568 def removefile(self, f, oldstate, size):
1566 """
1569 """
1567 Mark a file as removed in the dirstate.
1570 Mark a file as removed in the dirstate.
1568
1571
1569 The `size` parameter is used to store sentinel values that indicate
1572 The `size` parameter is used to store sentinel values that indicate
1570 the file's previous state. In the future, we should refactor this
1573 the file's previous state. In the future, we should refactor this
1571 to be more explicit about what that state is.
1574 to be more explicit about what that state is.
1572 """
1575 """
1573 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1576 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1574 self._dirs.delpath(f)
1577 self._dirs.delpath(f)
1575 if oldstate == b"?" and "_alldirs" in self.__dict__:
1578 if oldstate == b"?" and "_alldirs" in self.__dict__:
1576 self._alldirs.addpath(f)
1579 self._alldirs.addpath(f)
1577 if "filefoldmap" in self.__dict__:
1580 if "filefoldmap" in self.__dict__:
1578 normed = util.normcase(f)
1581 normed = util.normcase(f)
1579 self.filefoldmap.pop(normed, None)
1582 self.filefoldmap.pop(normed, None)
1580 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1583 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1581 self.nonnormalset.add(f)
1584 self.nonnormalset.add(f)
1582
1585
1583 def dropfile(self, f, oldstate):
1586 def dropfile(self, f, oldstate):
1584 """
1587 """
1585 Remove a file from the dirstate. Returns True if the file was
1588 Remove a file from the dirstate. Returns True if the file was
1586 previously recorded.
1589 previously recorded.
1587 """
1590 """
1588 exists = self._map.pop(f, None) is not None
1591 exists = self._map.pop(f, None) is not None
1589 if exists:
1592 if exists:
1590 if oldstate != b"r" and "_dirs" in self.__dict__:
1593 if oldstate != b"r" and "_dirs" in self.__dict__:
1591 self._dirs.delpath(f)
1594 self._dirs.delpath(f)
1592 if "_alldirs" in self.__dict__:
1595 if "_alldirs" in self.__dict__:
1593 self._alldirs.delpath(f)
1596 self._alldirs.delpath(f)
1594 if "filefoldmap" in self.__dict__:
1597 if "filefoldmap" in self.__dict__:
1595 normed = util.normcase(f)
1598 normed = util.normcase(f)
1596 self.filefoldmap.pop(normed, None)
1599 self.filefoldmap.pop(normed, None)
1597 self.nonnormalset.discard(f)
1600 self.nonnormalset.discard(f)
1598 return exists
1601 return exists
1599
1602
1600 def clearambiguoustimes(self, files, now):
1603 def clearambiguoustimes(self, files, now):
1601 for f in files:
1604 for f in files:
1602 e = self.get(f)
1605 e = self.get(f)
1603 if e is not None and e[0] == b'n' and e[3] == now:
1606 if e is not None and e[0] == b'n' and e[3] == now:
1604 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1607 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
1605 self.nonnormalset.add(f)
1608 self.nonnormalset.add(f)
1606
1609
1607 def nonnormalentries(self):
1610 def nonnormalentries(self):
1608 '''Compute the nonnormal dirstate entries from the dmap'''
1611 '''Compute the nonnormal dirstate entries from the dmap'''
1609 try:
1612 try:
1610 return parsers.nonnormalotherparententries(self._map)
1613 return parsers.nonnormalotherparententries(self._map)
1611 except AttributeError:
1614 except AttributeError:
1612 nonnorm = set()
1615 nonnorm = set()
1613 otherparent = set()
1616 otherparent = set()
1614 for fname, e in pycompat.iteritems(self._map):
1617 for fname, e in pycompat.iteritems(self._map):
1615 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1618 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
1616 nonnorm.add(fname)
1619 nonnorm.add(fname)
1617 if e[0] == b'n' and e[2] == FROM_P2:
1620 if e[0] == b'n' and e[2] == FROM_P2:
1618 otherparent.add(fname)
1621 otherparent.add(fname)
1619 return nonnorm, otherparent
1622 return nonnorm, otherparent
1620
1623
1621 @propertycache
1624 @propertycache
1622 def filefoldmap(self):
1625 def filefoldmap(self):
1623 """Returns a dictionary mapping normalized case paths to their
1626 """Returns a dictionary mapping normalized case paths to their
1624 non-normalized versions.
1627 non-normalized versions.
1625 """
1628 """
1626 try:
1629 try:
1627 makefilefoldmap = parsers.make_file_foldmap
1630 makefilefoldmap = parsers.make_file_foldmap
1628 except AttributeError:
1631 except AttributeError:
1629 pass
1632 pass
1630 else:
1633 else:
1631 return makefilefoldmap(
1634 return makefilefoldmap(
1632 self._map, util.normcasespec, util.normcasefallback
1635 self._map, util.normcasespec, util.normcasefallback
1633 )
1636 )
1634
1637
1635 f = {}
1638 f = {}
1636 normcase = util.normcase
1639 normcase = util.normcase
1637 for name, s in pycompat.iteritems(self._map):
1640 for name, s in pycompat.iteritems(self._map):
1638 if s[0] != b'r':
1641 if s[0] != b'r':
1639 f[normcase(name)] = name
1642 f[normcase(name)] = name
1640 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1643 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1641 return f
1644 return f
1642
1645
1643 def hastrackeddir(self, d):
1646 def hastrackeddir(self, d):
1644 """
1647 """
1645 Returns True if the dirstate contains a tracked (not removed) file
1648 Returns True if the dirstate contains a tracked (not removed) file
1646 in this directory.
1649 in this directory.
1647 """
1650 """
1648 return d in self._dirs
1651 return d in self._dirs
1649
1652
1650 def hasdir(self, d):
1653 def hasdir(self, d):
1651 """
1654 """
1652 Returns True if the dirstate contains a file (tracked or removed)
1655 Returns True if the dirstate contains a file (tracked or removed)
1653 in this directory.
1656 in this directory.
1654 """
1657 """
1655 return d in self._alldirs
1658 return d in self._alldirs
1656
1659
1657 @propertycache
1660 @propertycache
1658 def _dirs(self):
1661 def _dirs(self):
1659 return pathutil.dirs(self._map, b'r')
1662 return pathutil.dirs(self._map, b'r')
1660
1663
1661 @propertycache
1664 @propertycache
1662 def _alldirs(self):
1665 def _alldirs(self):
1663 return pathutil.dirs(self._map)
1666 return pathutil.dirs(self._map)
1664
1667
1665 def _opendirstatefile(self):
1668 def _opendirstatefile(self):
1666 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1669 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1667 if self._pendingmode is not None and self._pendingmode != mode:
1670 if self._pendingmode is not None and self._pendingmode != mode:
1668 fp.close()
1671 fp.close()
1669 raise error.Abort(
1672 raise error.Abort(
1670 _(b'working directory state may be changed parallelly')
1673 _(b'working directory state may be changed parallelly')
1671 )
1674 )
1672 self._pendingmode = mode
1675 self._pendingmode = mode
1673 return fp
1676 return fp
1674
1677
1675 def parents(self):
1678 def parents(self):
1676 if not self._parents:
1679 if not self._parents:
1677 try:
1680 try:
1678 fp = self._opendirstatefile()
1681 fp = self._opendirstatefile()
1679 st = fp.read(2 * self._nodelen)
1682 st = fp.read(2 * self._nodelen)
1680 fp.close()
1683 fp.close()
1681 except IOError as err:
1684 except IOError as err:
1682 if err.errno != errno.ENOENT:
1685 if err.errno != errno.ENOENT:
1683 raise
1686 raise
1684 # File doesn't exist, so the current state is empty
1687 # File doesn't exist, so the current state is empty
1685 st = b''
1688 st = b''
1686
1689
1687 l = len(st)
1690 l = len(st)
1688 if l == self._nodelen * 2:
1691 if l == self._nodelen * 2:
1689 self._parents = (
1692 self._parents = (
1690 st[: self._nodelen],
1693 st[: self._nodelen],
1691 st[self._nodelen : 2 * self._nodelen],
1694 st[self._nodelen : 2 * self._nodelen],
1692 )
1695 )
1693 elif l == 0:
1696 elif l == 0:
1694 self._parents = (
1697 self._parents = (
1695 self._nodeconstants.nullid,
1698 self._nodeconstants.nullid,
1696 self._nodeconstants.nullid,
1699 self._nodeconstants.nullid,
1697 )
1700 )
1698 else:
1701 else:
1699 raise error.Abort(
1702 raise error.Abort(
1700 _(b'working directory state appears damaged!')
1703 _(b'working directory state appears damaged!')
1701 )
1704 )
1702
1705
1703 return self._parents
1706 return self._parents
1704
1707
1705 def setparents(self, p1, p2):
1708 def setparents(self, p1, p2):
1706 self._parents = (p1, p2)
1709 self._parents = (p1, p2)
1707 self._dirtyparents = True
1710 self._dirtyparents = True
1708
1711
1709 def read(self):
1712 def read(self):
1710 # ignore HG_PENDING because identity is used only for writing
1713 # ignore HG_PENDING because identity is used only for writing
1711 self.identity = util.filestat.frompath(
1714 self.identity = util.filestat.frompath(
1712 self._opener.join(self._filename)
1715 self._opener.join(self._filename)
1713 )
1716 )
1714
1717
1715 try:
1718 try:
1716 fp = self._opendirstatefile()
1719 fp = self._opendirstatefile()
1717 try:
1720 try:
1718 st = fp.read()
1721 st = fp.read()
1719 finally:
1722 finally:
1720 fp.close()
1723 fp.close()
1721 except IOError as err:
1724 except IOError as err:
1722 if err.errno != errno.ENOENT:
1725 if err.errno != errno.ENOENT:
1723 raise
1726 raise
1724 return
1727 return
1725 if not st:
1728 if not st:
1726 return
1729 return
1727
1730
1728 if util.safehasattr(parsers, b'dict_new_presized'):
1731 if util.safehasattr(parsers, b'dict_new_presized'):
1729 # Make an estimate of the number of files in the dirstate based on
1732 # Make an estimate of the number of files in the dirstate based on
1730 # its size. This trades wasting some memory for avoiding costly
1733 # its size. This trades wasting some memory for avoiding costly
1731 # resizes. Each entry have a prefix of 17 bytes followed by one or
1734 # resizes. Each entry have a prefix of 17 bytes followed by one or
1732 # two path names. Studies on various large-scale real-world repositories
1735 # two path names. Studies on various large-scale real-world repositories
1733 # found 54 bytes a reasonable upper limit for the average path names.
1736 # found 54 bytes a reasonable upper limit for the average path names.
1734 # Copy entries are ignored for the sake of this estimate.
1737 # Copy entries are ignored for the sake of this estimate.
1735 self._map = parsers.dict_new_presized(len(st) // 71)
1738 self._map = parsers.dict_new_presized(len(st) // 71)
1736
1739
1737 # Python's garbage collector triggers a GC each time a certain number
1740 # Python's garbage collector triggers a GC each time a certain number
1738 # of container objects (the number being defined by
1741 # of container objects (the number being defined by
1739 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1742 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1740 # for each file in the dirstate. The C version then immediately marks
1743 # for each file in the dirstate. The C version then immediately marks
1741 # them as not to be tracked by the collector. However, this has no
1744 # them as not to be tracked by the collector. However, this has no
1742 # effect on when GCs are triggered, only on what objects the GC looks
1745 # effect on when GCs are triggered, only on what objects the GC looks
1743 # into. This means that O(number of files) GCs are unavoidable.
1746 # into. This means that O(number of files) GCs are unavoidable.
1744 # Depending on when in the process's lifetime the dirstate is parsed,
1747 # Depending on when in the process's lifetime the dirstate is parsed,
1745 # this can get very expensive. As a workaround, disable GC while
1748 # this can get very expensive. As a workaround, disable GC while
1746 # parsing the dirstate.
1749 # parsing the dirstate.
1747 #
1750 #
1748 # (we cannot decorate the function directly since it is in a C module)
1751 # (we cannot decorate the function directly since it is in a C module)
1749 parse_dirstate = util.nogc(parsers.parse_dirstate)
1752 parse_dirstate = util.nogc(parsers.parse_dirstate)
1750 p = parse_dirstate(self._map, self.copymap, st)
1753 p = parse_dirstate(self._map, self.copymap, st)
1751 if not self._dirtyparents:
1754 if not self._dirtyparents:
1752 self.setparents(*p)
1755 self.setparents(*p)
1753
1756
1754 # Avoid excess attribute lookups by fast pathing certain checks
1757 # Avoid excess attribute lookups by fast pathing certain checks
1755 self.__contains__ = self._map.__contains__
1758 self.__contains__ = self._map.__contains__
1756 self.__getitem__ = self._map.__getitem__
1759 self.__getitem__ = self._map.__getitem__
1757 self.get = self._map.get
1760 self.get = self._map.get
1758
1761
1759 def write(self, st, now):
1762 def write(self, st, now):
1760 st.write(
1763 st.write(
1761 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1764 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1762 )
1765 )
1763 st.close()
1766 st.close()
1764 self._dirtyparents = False
1767 self._dirtyparents = False
1765 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1768 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1766
1769
1767 @propertycache
1770 @propertycache
1768 def nonnormalset(self):
1771 def nonnormalset(self):
1769 nonnorm, otherparents = self.nonnormalentries()
1772 nonnorm, otherparents = self.nonnormalentries()
1770 self.otherparentset = otherparents
1773 self.otherparentset = otherparents
1771 return nonnorm
1774 return nonnorm
1772
1775
1773 @propertycache
1776 @propertycache
1774 def otherparentset(self):
1777 def otherparentset(self):
1775 nonnorm, otherparents = self.nonnormalentries()
1778 nonnorm, otherparents = self.nonnormalentries()
1776 self.nonnormalset = nonnorm
1779 self.nonnormalset = nonnorm
1777 return otherparents
1780 return otherparents
1778
1781
1779 def non_normal_or_other_parent_paths(self):
1782 def non_normal_or_other_parent_paths(self):
1780 return self.nonnormalset.union(self.otherparentset)
1783 return self.nonnormalset.union(self.otherparentset)
1781
1784
1782 @propertycache
1785 @propertycache
1783 def identity(self):
1786 def identity(self):
1784 self._map
1787 self._map
1785 return self.identity
1788 return self.identity
1786
1789
1787 @propertycache
1790 @propertycache
1788 def dirfoldmap(self):
1791 def dirfoldmap(self):
1789 f = {}
1792 f = {}
1790 normcase = util.normcase
1793 normcase = util.normcase
1791 for name in self._dirs:
1794 for name in self._dirs:
1792 f[normcase(name)] = name
1795 f[normcase(name)] = name
1793 return f
1796 return f
1794
1797
1795
1798
1796 if rustmod is not None:
1799 if rustmod is not None:
1797
1800
1798 class dirstatemap(object):
1801 class dirstatemap(object):
1799 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1802 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
1800 self._use_dirstate_v2 = use_dirstate_v2
1803 self._use_dirstate_v2 = use_dirstate_v2
1801 self._nodeconstants = nodeconstants
1804 self._nodeconstants = nodeconstants
1802 self._ui = ui
1805 self._ui = ui
1803 self._opener = opener
1806 self._opener = opener
1804 self._root = root
1807 self._root = root
1805 self._filename = b'dirstate'
1808 self._filename = b'dirstate'
1806 self._nodelen = 20 # Also update Rust code when changing this!
1809 self._nodelen = 20 # Also update Rust code when changing this!
1807 self._parents = None
1810 self._parents = None
1808 self._dirtyparents = False
1811 self._dirtyparents = False
1809
1812
1810 # for consistent view between _pl() and _read() invocations
1813 # for consistent view between _pl() and _read() invocations
1811 self._pendingmode = None
1814 self._pendingmode = None
1812
1815
1813 self._use_dirstate_tree = self._ui.configbool(
1816 self._use_dirstate_tree = self._ui.configbool(
1814 b"experimental",
1817 b"experimental",
1815 b"dirstate-tree.in-memory",
1818 b"dirstate-tree.in-memory",
1816 False,
1819 False,
1817 )
1820 )
1818
1821
1819 def addfile(self, *args, **kwargs):
1822 def addfile(self, *args, **kwargs):
1820 return self._rustmap.addfile(*args, **kwargs)
1823 return self._rustmap.addfile(*args, **kwargs)
1821
1824
1822 def removefile(self, *args, **kwargs):
1825 def removefile(self, *args, **kwargs):
1823 return self._rustmap.removefile(*args, **kwargs)
1826 return self._rustmap.removefile(*args, **kwargs)
1824
1827
1825 def dropfile(self, *args, **kwargs):
1828 def dropfile(self, *args, **kwargs):
1826 return self._rustmap.dropfile(*args, **kwargs)
1829 return self._rustmap.dropfile(*args, **kwargs)
1827
1830
1828 def clearambiguoustimes(self, *args, **kwargs):
1831 def clearambiguoustimes(self, *args, **kwargs):
1829 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1832 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1830
1833
1831 def nonnormalentries(self):
1834 def nonnormalentries(self):
1832 return self._rustmap.nonnormalentries()
1835 return self._rustmap.nonnormalentries()
1833
1836
1834 def get(self, *args, **kwargs):
1837 def get(self, *args, **kwargs):
1835 return self._rustmap.get(*args, **kwargs)
1838 return self._rustmap.get(*args, **kwargs)
1836
1839
1837 @property
1840 @property
1838 def copymap(self):
1841 def copymap(self):
1839 return self._rustmap.copymap()
1842 return self._rustmap.copymap()
1840
1843
1841 def directories(self):
1844 def directories(self):
1842 return self._rustmap.directories()
1845 return self._rustmap.directories()
1843
1846
1844 def preload(self):
1847 def preload(self):
1845 self._rustmap
1848 self._rustmap
1846
1849
1847 def clear(self):
1850 def clear(self):
1848 self._rustmap.clear()
1851 self._rustmap.clear()
1849 self.setparents(
1852 self.setparents(
1850 self._nodeconstants.nullid, self._nodeconstants.nullid
1853 self._nodeconstants.nullid, self._nodeconstants.nullid
1851 )
1854 )
1852 util.clearcachedproperty(self, b"_dirs")
1855 util.clearcachedproperty(self, b"_dirs")
1853 util.clearcachedproperty(self, b"_alldirs")
1856 util.clearcachedproperty(self, b"_alldirs")
1854 util.clearcachedproperty(self, b"dirfoldmap")
1857 util.clearcachedproperty(self, b"dirfoldmap")
1855
1858
1856 def items(self):
1859 def items(self):
1857 return self._rustmap.items()
1860 return self._rustmap.items()
1858
1861
1859 def keys(self):
1862 def keys(self):
1860 return iter(self._rustmap)
1863 return iter(self._rustmap)
1861
1864
1862 def __contains__(self, key):
1865 def __contains__(self, key):
1863 return key in self._rustmap
1866 return key in self._rustmap
1864
1867
1865 def __getitem__(self, item):
1868 def __getitem__(self, item):
1866 return self._rustmap[item]
1869 return self._rustmap[item]
1867
1870
1868 def __len__(self):
1871 def __len__(self):
1869 return len(self._rustmap)
1872 return len(self._rustmap)
1870
1873
1871 def __iter__(self):
1874 def __iter__(self):
1872 return iter(self._rustmap)
1875 return iter(self._rustmap)
1873
1876
1874 # forward for python2,3 compat
1877 # forward for python2,3 compat
1875 iteritems = items
1878 iteritems = items
1876
1879
1877 def _opendirstatefile(self):
1880 def _opendirstatefile(self):
1878 fp, mode = txnutil.trypending(
1881 fp, mode = txnutil.trypending(
1879 self._root, self._opener, self._filename
1882 self._root, self._opener, self._filename
1880 )
1883 )
1881 if self._pendingmode is not None and self._pendingmode != mode:
1884 if self._pendingmode is not None and self._pendingmode != mode:
1882 fp.close()
1885 fp.close()
1883 raise error.Abort(
1886 raise error.Abort(
1884 _(b'working directory state may be changed parallelly')
1887 _(b'working directory state may be changed parallelly')
1885 )
1888 )
1886 self._pendingmode = mode
1889 self._pendingmode = mode
1887 return fp
1890 return fp
1888
1891
1889 def setparents(self, p1, p2):
1892 def setparents(self, p1, p2):
1890 self._parents = (p1, p2)
1893 self._parents = (p1, p2)
1891 self._dirtyparents = True
1894 self._dirtyparents = True
1892
1895
1893 def parents(self):
1896 def parents(self):
1894 if not self._parents:
1897 if not self._parents:
1895 if self._use_dirstate_v2:
1898 if self._use_dirstate_v2:
1896 offset = len(rustmod.V2_FORMAT_MARKER)
1899 offset = len(rustmod.V2_FORMAT_MARKER)
1897 else:
1900 else:
1898 offset = 0
1901 offset = 0
1899 read_len = offset + self._nodelen * 2
1902 read_len = offset + self._nodelen * 2
1900 try:
1903 try:
1901 fp = self._opendirstatefile()
1904 fp = self._opendirstatefile()
1902 st = fp.read(read_len)
1905 st = fp.read(read_len)
1903 fp.close()
1906 fp.close()
1904 except IOError as err:
1907 except IOError as err:
1905 if err.errno != errno.ENOENT:
1908 if err.errno != errno.ENOENT:
1906 raise
1909 raise
1907 # File doesn't exist, so the current state is empty
1910 # File doesn't exist, so the current state is empty
1908 st = b''
1911 st = b''
1909
1912
1910 l = len(st)
1913 l = len(st)
1911 if l == read_len:
1914 if l == read_len:
1912 st = st[offset:]
1915 st = st[offset:]
1913 self._parents = (
1916 self._parents = (
1914 st[: self._nodelen],
1917 st[: self._nodelen],
1915 st[self._nodelen : 2 * self._nodelen],
1918 st[self._nodelen : 2 * self._nodelen],
1916 )
1919 )
1917 elif l == 0:
1920 elif l == 0:
1918 self._parents = (
1921 self._parents = (
1919 self._nodeconstants.nullid,
1922 self._nodeconstants.nullid,
1920 self._nodeconstants.nullid,
1923 self._nodeconstants.nullid,
1921 )
1924 )
1922 else:
1925 else:
1923 raise error.Abort(
1926 raise error.Abort(
1924 _(b'working directory state appears damaged!')
1927 _(b'working directory state appears damaged!')
1925 )
1928 )
1926
1929
1927 return self._parents
1930 return self._parents
1928
1931
1929 @propertycache
1932 @propertycache
1930 def _rustmap(self):
1933 def _rustmap(self):
1931 """
1934 """
1932 Fills the Dirstatemap when called.
1935 Fills the Dirstatemap when called.
1933 """
1936 """
1934 # ignore HG_PENDING because identity is used only for writing
1937 # ignore HG_PENDING because identity is used only for writing
1935 self.identity = util.filestat.frompath(
1938 self.identity = util.filestat.frompath(
1936 self._opener.join(self._filename)
1939 self._opener.join(self._filename)
1937 )
1940 )
1938
1941
1939 try:
1942 try:
1940 fp = self._opendirstatefile()
1943 fp = self._opendirstatefile()
1941 try:
1944 try:
1942 st = fp.read()
1945 st = fp.read()
1943 finally:
1946 finally:
1944 fp.close()
1947 fp.close()
1945 except IOError as err:
1948 except IOError as err:
1946 if err.errno != errno.ENOENT:
1949 if err.errno != errno.ENOENT:
1947 raise
1950 raise
1948 st = b''
1951 st = b''
1949
1952
1950 self._rustmap, parents = rustmod.DirstateMap.new(
1953 self._rustmap, parents = rustmod.DirstateMap.new(
1951 self._use_dirstate_tree, self._use_dirstate_v2, st
1954 self._use_dirstate_tree, self._use_dirstate_v2, st
1952 )
1955 )
1953
1956
1954 if parents and not self._dirtyparents:
1957 if parents and not self._dirtyparents:
1955 self.setparents(*parents)
1958 self.setparents(*parents)
1956
1959
1957 self.__contains__ = self._rustmap.__contains__
1960 self.__contains__ = self._rustmap.__contains__
1958 self.__getitem__ = self._rustmap.__getitem__
1961 self.__getitem__ = self._rustmap.__getitem__
1959 self.get = self._rustmap.get
1962 self.get = self._rustmap.get
1960 return self._rustmap
1963 return self._rustmap
1961
1964
1962 def write(self, st, now):
1965 def write(self, st, now):
1963 parents = self.parents()
1966 parents = self.parents()
1964 packed = self._rustmap.write(
1967 packed = self._rustmap.write(
1965 self._use_dirstate_v2, parents[0], parents[1], now
1968 self._use_dirstate_v2, parents[0], parents[1], now
1966 )
1969 )
1967 st.write(packed)
1970 st.write(packed)
1968 st.close()
1971 st.close()
1969 self._dirtyparents = False
1972 self._dirtyparents = False
1970
1973
1971 @propertycache
1974 @propertycache
1972 def filefoldmap(self):
1975 def filefoldmap(self):
1973 """Returns a dictionary mapping normalized case paths to their
1976 """Returns a dictionary mapping normalized case paths to their
1974 non-normalized versions.
1977 non-normalized versions.
1975 """
1978 """
1976 return self._rustmap.filefoldmapasdict()
1979 return self._rustmap.filefoldmapasdict()
1977
1980
1978 def hastrackeddir(self, d):
1981 def hastrackeddir(self, d):
1979 return self._rustmap.hastrackeddir(d)
1982 return self._rustmap.hastrackeddir(d)
1980
1983
1981 def hasdir(self, d):
1984 def hasdir(self, d):
1982 return self._rustmap.hasdir(d)
1985 return self._rustmap.hasdir(d)
1983
1986
1984 @propertycache
1987 @propertycache
1985 def identity(self):
1988 def identity(self):
1986 self._rustmap
1989 self._rustmap
1987 return self.identity
1990 return self.identity
1988
1991
1989 @property
1992 @property
1990 def nonnormalset(self):
1993 def nonnormalset(self):
1991 nonnorm = self._rustmap.non_normal_entries()
1994 nonnorm = self._rustmap.non_normal_entries()
1992 return nonnorm
1995 return nonnorm
1993
1996
1994 @propertycache
1997 @propertycache
1995 def otherparentset(self):
1998 def otherparentset(self):
1996 otherparents = self._rustmap.other_parent_entries()
1999 otherparents = self._rustmap.other_parent_entries()
1997 return otherparents
2000 return otherparents
1998
2001
1999 def non_normal_or_other_parent_paths(self):
2002 def non_normal_or_other_parent_paths(self):
2000 return self._rustmap.non_normal_or_other_parent_paths()
2003 return self._rustmap.non_normal_or_other_parent_paths()
2001
2004
2002 @propertycache
2005 @propertycache
2003 def dirfoldmap(self):
2006 def dirfoldmap(self):
2004 f = {}
2007 f = {}
2005 normcase = util.normcase
2008 normcase = util.normcase
2006 for name, _pseudo_entry in self.directories():
2009 for name, _pseudo_entry in self.directories():
2007 f[normcase(name)] = name
2010 f[normcase(name)] = name
2008 return f
2011 return f
General Comments 0
You need to be logged in to leave comments. Login now