##// END OF EJS Templates
dirstate: no longer pass `oldstate` to the `dropfile`...
marmoute -
r48324:6025353c default
parent child Browse files
Show More
@@ -1,1437 +1,1435 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304
304
305 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
306 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
307 instead.
307 instead.
308 """
308 """
309 entry = self._map.get(key)
309 entry = self._map.get(key)
310 if entry is not None:
310 if entry is not None:
311 return entry.state
311 return entry.state
312 return b'?'
312 return b'?'
313
313
314 def __contains__(self, key):
314 def __contains__(self, key):
315 return key in self._map
315 return key in self._map
316
316
317 def __iter__(self):
317 def __iter__(self):
318 return iter(sorted(self._map))
318 return iter(sorted(self._map))
319
319
320 def items(self):
320 def items(self):
321 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
322
322
323 iteritems = items
323 iteritems = items
324
324
325 def directories(self):
325 def directories(self):
326 return self._map.directories()
326 return self._map.directories()
327
327
328 def parents(self):
328 def parents(self):
329 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
330
330
331 def p1(self):
331 def p1(self):
332 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
333
333
334 def p2(self):
334 def p2(self):
335 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
336
336
337 @property
337 @property
338 def in_merge(self):
338 def in_merge(self):
339 """True if a merge is in progress"""
339 """True if a merge is in progress"""
340 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
341
341
342 def branch(self):
342 def branch(self):
343 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
344
344
345 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
346 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
347
347
348 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
349 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
350 returned by the call.
350 returned by the call.
351
351
352 See localrepo.setparents()
352 See localrepo.setparents()
353 """
353 """
354 if p2 is None:
354 if p2 is None:
355 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
356 if self._parentwriters == 0:
356 if self._parentwriters == 0:
357 raise ValueError(
357 raise ValueError(
358 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
359 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
360 )
360 )
361
361
362 self._dirty = True
362 self._dirty = True
363 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
364 if self._origpl is None:
364 if self._origpl is None:
365 self._origpl = self._pl
365 self._origpl = self._pl
366 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
367 copies = {}
367 copies = {}
368 if (
368 if (
369 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
371 ):
371 ):
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
373
373
374 for f in candidatefiles:
374 for f in candidatefiles:
375 s = self._map.get(f)
375 s = self._map.get(f)
376 if s is None:
376 if s is None:
377 continue
377 continue
378
378
379 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
380 if s.merged:
380 if s.merged:
381 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
382 if source:
382 if source:
383 copies[f] = source
383 copies[f] = source
384 self.normallookup(f)
384 self.normallookup(f)
385 # Also fix up otherparent markers
385 # Also fix up otherparent markers
386 elif s.from_p2:
386 elif s.from_p2:
387 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
388 if source:
388 if source:
389 copies[f] = source
389 copies[f] = source
390 self.add(f)
390 self.add(f)
391 return copies
391 return copies
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._lastnormaltime = 0
419 self._lastnormaltime = 0
420 self._dirty = False
420 self._dirty = False
421 self._updatedfiles.clear()
421 self._updatedfiles.clear()
422 self._parentwriters = 0
422 self._parentwriters = 0
423 self._origpl = None
423 self._origpl = None
424
424
425 def copy(self, source, dest):
425 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
427 if source == dest:
428 return
428 return
429 self._dirty = True
429 self._dirty = True
430 if source is not None:
430 if source is not None:
431 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
432 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
435 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
436
436
437 def copied(self, file):
437 def copied(self, file):
438 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
439
439
440 def copies(self):
440 def copies(self):
441 return self._map.copymap
441 return self._map.copymap
442
442
443 def _addpath(
443 def _addpath(
444 self,
444 self,
445 f,
445 f,
446 mode=0,
446 mode=0,
447 size=None,
447 size=None,
448 mtime=None,
448 mtime=None,
449 added=False,
449 added=False,
450 merged=False,
450 merged=False,
451 from_p2=False,
451 from_p2=False,
452 possibly_dirty=False,
452 possibly_dirty=False,
453 ):
453 ):
454 entry = self._map.get(f)
454 entry = self._map.get(f)
455 if added or entry is not None and entry.removed:
455 if added or entry is not None and entry.removed:
456 scmutil.checkfilename(f)
456 scmutil.checkfilename(f)
457 if self._map.hastrackeddir(f):
457 if self._map.hastrackeddir(f):
458 msg = _(b'directory %r already in dirstate')
458 msg = _(b'directory %r already in dirstate')
459 msg %= pycompat.bytestr(f)
459 msg %= pycompat.bytestr(f)
460 raise error.Abort(msg)
460 raise error.Abort(msg)
461 # shadows
461 # shadows
462 for d in pathutil.finddirs(f):
462 for d in pathutil.finddirs(f):
463 if self._map.hastrackeddir(d):
463 if self._map.hastrackeddir(d):
464 break
464 break
465 entry = self._map.get(d)
465 entry = self._map.get(d)
466 if entry is not None and not entry.removed:
466 if entry is not None and not entry.removed:
467 msg = _(b'file %r in dirstate clashes with %r')
467 msg = _(b'file %r in dirstate clashes with %r')
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
469 raise error.Abort(msg)
469 raise error.Abort(msg)
470 self._dirty = True
470 self._dirty = True
471 self._updatedfiles.add(f)
471 self._updatedfiles.add(f)
472 self._map.addfile(
472 self._map.addfile(
473 f,
473 f,
474 mode=mode,
474 mode=mode,
475 size=size,
475 size=size,
476 mtime=mtime,
476 mtime=mtime,
477 added=added,
477 added=added,
478 merged=merged,
478 merged=merged,
479 from_p2=from_p2,
479 from_p2=from_p2,
480 possibly_dirty=possibly_dirty,
480 possibly_dirty=possibly_dirty,
481 )
481 )
482
482
483 def normal(self, f, parentfiledata=None):
483 def normal(self, f, parentfiledata=None):
484 """Mark a file normal and clean.
484 """Mark a file normal and clean.
485
485
486 parentfiledata: (mode, size, mtime) of the clean file
486 parentfiledata: (mode, size, mtime) of the clean file
487
487
488 parentfiledata should be computed from memory (for mode,
488 parentfiledata should be computed from memory (for mode,
489 size), as or close as possible from the point where we
489 size), as or close as possible from the point where we
490 determined the file was clean, to limit the risk of the
490 determined the file was clean, to limit the risk of the
491 file having been changed by an external process between the
491 file having been changed by an external process between the
492 moment where the file was determined to be clean and now."""
492 moment where the file was determined to be clean and now."""
493 if parentfiledata:
493 if parentfiledata:
494 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
495 else:
495 else:
496 s = os.lstat(self._join(f))
496 s = os.lstat(self._join(f))
497 mode = s.st_mode
497 mode = s.st_mode
498 size = s.st_size
498 size = s.st_size
499 mtime = s[stat.ST_MTIME]
499 mtime = s[stat.ST_MTIME]
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
501 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
502 if f in self._map.nonnormalset:
502 if f in self._map.nonnormalset:
503 self._map.nonnormalset.remove(f)
503 self._map.nonnormalset.remove(f)
504 if mtime > self._lastnormaltime:
504 if mtime > self._lastnormaltime:
505 # Remember the most recent modification timeslot for status(),
505 # Remember the most recent modification timeslot for status(),
506 # to make sure we won't miss future size-preserving file content
506 # to make sure we won't miss future size-preserving file content
507 # modifications that happen within the same timeslot.
507 # modifications that happen within the same timeslot.
508 self._lastnormaltime = mtime
508 self._lastnormaltime = mtime
509
509
510 def normallookup(self, f):
510 def normallookup(self, f):
511 '''Mark a file normal, but possibly dirty.'''
511 '''Mark a file normal, but possibly dirty.'''
512 if self.in_merge:
512 if self.in_merge:
513 # if there is a merge going on and the file was either
513 # if there is a merge going on and the file was either
514 # "merged" or coming from other parent (-2) before
514 # "merged" or coming from other parent (-2) before
515 # being removed, restore that state.
515 # being removed, restore that state.
516 entry = self._map.get(f)
516 entry = self._map.get(f)
517 if entry is not None:
517 if entry is not None:
518 # XXX this should probably be dealt with a a lower level
518 # XXX this should probably be dealt with a a lower level
519 # (see `merged_removed` and `from_p2_removed`)
519 # (see `merged_removed` and `from_p2_removed`)
520 if entry.merged_removed or entry.from_p2_removed:
520 if entry.merged_removed or entry.from_p2_removed:
521 source = self._map.copymap.get(f)
521 source = self._map.copymap.get(f)
522 if entry.merged_removed:
522 if entry.merged_removed:
523 self.merge(f)
523 self.merge(f)
524 elif entry.from_p2_removed:
524 elif entry.from_p2_removed:
525 self.otherparent(f)
525 self.otherparent(f)
526 if source is not None:
526 if source is not None:
527 self.copy(source, f)
527 self.copy(source, f)
528 return
528 return
529 elif entry.merged or entry.from_p2:
529 elif entry.merged or entry.from_p2:
530 return
530 return
531 self._addpath(f, possibly_dirty=True)
531 self._addpath(f, possibly_dirty=True)
532 self._map.copymap.pop(f, None)
532 self._map.copymap.pop(f, None)
533
533
534 def otherparent(self, f):
534 def otherparent(self, f):
535 '''Mark as coming from the other parent, always dirty.'''
535 '''Mark as coming from the other parent, always dirty.'''
536 if not self.in_merge:
536 if not self.in_merge:
537 msg = _(b"setting %r to other parent only allowed in merges") % f
537 msg = _(b"setting %r to other parent only allowed in merges") % f
538 raise error.Abort(msg)
538 raise error.Abort(msg)
539 if f in self and self[f] == b'n':
539 if f in self and self[f] == b'n':
540 # merge-like
540 # merge-like
541 self._addpath(f, merged=True)
541 self._addpath(f, merged=True)
542 else:
542 else:
543 # add-like
543 # add-like
544 self._addpath(f, from_p2=True)
544 self._addpath(f, from_p2=True)
545 self._map.copymap.pop(f, None)
545 self._map.copymap.pop(f, None)
546
546
547 def add(self, f):
547 def add(self, f):
548 '''Mark a file added.'''
548 '''Mark a file added.'''
549 self._addpath(f, added=True)
549 self._addpath(f, added=True)
550 self._map.copymap.pop(f, None)
550 self._map.copymap.pop(f, None)
551
551
552 def remove(self, f):
552 def remove(self, f):
553 '''Mark a file removed.'''
553 '''Mark a file removed.'''
554 self._dirty = True
554 self._dirty = True
555 self._updatedfiles.add(f)
555 self._updatedfiles.add(f)
556 self._map.removefile(f, in_merge=self.in_merge)
556 self._map.removefile(f, in_merge=self.in_merge)
557
557
558 def merge(self, f):
558 def merge(self, f):
559 '''Mark a file merged.'''
559 '''Mark a file merged.'''
560 if not self.in_merge:
560 if not self.in_merge:
561 return self.normallookup(f)
561 return self.normallookup(f)
562 return self.otherparent(f)
562 return self.otherparent(f)
563
563
564 def drop(self, f):
564 def drop(self, f):
565 '''Drop a file from the dirstate'''
565 '''Drop a file from the dirstate'''
566 oldstate = self[f]
566 if self._map.dropfile(f):
567 if self._map.dropfile(f, oldstate):
568 self._dirty = True
567 self._dirty = True
569 self._updatedfiles.add(f)
568 self._updatedfiles.add(f)
570 self._map.copymap.pop(f, None)
569 self._map.copymap.pop(f, None)
571
570
572 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
571 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
573 if exists is None:
572 if exists is None:
574 exists = os.path.lexists(os.path.join(self._root, path))
573 exists = os.path.lexists(os.path.join(self._root, path))
575 if not exists:
574 if not exists:
576 # Maybe a path component exists
575 # Maybe a path component exists
577 if not ignoremissing and b'/' in path:
576 if not ignoremissing and b'/' in path:
578 d, f = path.rsplit(b'/', 1)
577 d, f = path.rsplit(b'/', 1)
579 d = self._normalize(d, False, ignoremissing, None)
578 d = self._normalize(d, False, ignoremissing, None)
580 folded = d + b"/" + f
579 folded = d + b"/" + f
581 else:
580 else:
582 # No path components, preserve original case
581 # No path components, preserve original case
583 folded = path
582 folded = path
584 else:
583 else:
585 # recursively normalize leading directory components
584 # recursively normalize leading directory components
586 # against dirstate
585 # against dirstate
587 if b'/' in normed:
586 if b'/' in normed:
588 d, f = normed.rsplit(b'/', 1)
587 d, f = normed.rsplit(b'/', 1)
589 d = self._normalize(d, False, ignoremissing, True)
588 d = self._normalize(d, False, ignoremissing, True)
590 r = self._root + b"/" + d
589 r = self._root + b"/" + d
591 folded = d + b"/" + util.fspath(f, r)
590 folded = d + b"/" + util.fspath(f, r)
592 else:
591 else:
593 folded = util.fspath(normed, self._root)
592 folded = util.fspath(normed, self._root)
594 storemap[normed] = folded
593 storemap[normed] = folded
595
594
596 return folded
595 return folded
597
596
598 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
597 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
599 normed = util.normcase(path)
598 normed = util.normcase(path)
600 folded = self._map.filefoldmap.get(normed, None)
599 folded = self._map.filefoldmap.get(normed, None)
601 if folded is None:
600 if folded is None:
602 if isknown:
601 if isknown:
603 folded = path
602 folded = path
604 else:
603 else:
605 folded = self._discoverpath(
604 folded = self._discoverpath(
606 path, normed, ignoremissing, exists, self._map.filefoldmap
605 path, normed, ignoremissing, exists, self._map.filefoldmap
607 )
606 )
608 return folded
607 return folded
609
608
610 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
609 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
611 normed = util.normcase(path)
610 normed = util.normcase(path)
612 folded = self._map.filefoldmap.get(normed, None)
611 folded = self._map.filefoldmap.get(normed, None)
613 if folded is None:
612 if folded is None:
614 folded = self._map.dirfoldmap.get(normed, None)
613 folded = self._map.dirfoldmap.get(normed, None)
615 if folded is None:
614 if folded is None:
616 if isknown:
615 if isknown:
617 folded = path
616 folded = path
618 else:
617 else:
619 # store discovered result in dirfoldmap so that future
618 # store discovered result in dirfoldmap so that future
620 # normalizefile calls don't start matching directories
619 # normalizefile calls don't start matching directories
621 folded = self._discoverpath(
620 folded = self._discoverpath(
622 path, normed, ignoremissing, exists, self._map.dirfoldmap
621 path, normed, ignoremissing, exists, self._map.dirfoldmap
623 )
622 )
624 return folded
623 return folded
625
624
626 def normalize(self, path, isknown=False, ignoremissing=False):
625 def normalize(self, path, isknown=False, ignoremissing=False):
627 """
626 """
628 normalize the case of a pathname when on a casefolding filesystem
627 normalize the case of a pathname when on a casefolding filesystem
629
628
630 isknown specifies whether the filename came from walking the
629 isknown specifies whether the filename came from walking the
631 disk, to avoid extra filesystem access.
630 disk, to avoid extra filesystem access.
632
631
633 If ignoremissing is True, missing path are returned
632 If ignoremissing is True, missing path are returned
634 unchanged. Otherwise, we try harder to normalize possibly
633 unchanged. Otherwise, we try harder to normalize possibly
635 existing path components.
634 existing path components.
636
635
637 The normalized case is determined based on the following precedence:
636 The normalized case is determined based on the following precedence:
638
637
639 - version of name already stored in the dirstate
638 - version of name already stored in the dirstate
640 - version of name stored on disk
639 - version of name stored on disk
641 - version provided via command arguments
640 - version provided via command arguments
642 """
641 """
643
642
644 if self._checkcase:
643 if self._checkcase:
645 return self._normalize(path, isknown, ignoremissing)
644 return self._normalize(path, isknown, ignoremissing)
646 return path
645 return path
647
646
648 def clear(self):
647 def clear(self):
649 self._map.clear()
648 self._map.clear()
650 self._lastnormaltime = 0
649 self._lastnormaltime = 0
651 self._updatedfiles.clear()
650 self._updatedfiles.clear()
652 self._dirty = True
651 self._dirty = True
653
652
654 def rebuild(self, parent, allfiles, changedfiles=None):
653 def rebuild(self, parent, allfiles, changedfiles=None):
655 if changedfiles is None:
654 if changedfiles is None:
656 # Rebuild entire dirstate
655 # Rebuild entire dirstate
657 to_lookup = allfiles
656 to_lookup = allfiles
658 to_drop = []
657 to_drop = []
659 lastnormaltime = self._lastnormaltime
658 lastnormaltime = self._lastnormaltime
660 self.clear()
659 self.clear()
661 self._lastnormaltime = lastnormaltime
660 self._lastnormaltime = lastnormaltime
662 elif len(changedfiles) < 10:
661 elif len(changedfiles) < 10:
663 # Avoid turning allfiles into a set, which can be expensive if it's
662 # Avoid turning allfiles into a set, which can be expensive if it's
664 # large.
663 # large.
665 to_lookup = []
664 to_lookup = []
666 to_drop = []
665 to_drop = []
667 for f in changedfiles:
666 for f in changedfiles:
668 if f in allfiles:
667 if f in allfiles:
669 to_lookup.append(f)
668 to_lookup.append(f)
670 else:
669 else:
671 to_drop.append(f)
670 to_drop.append(f)
672 else:
671 else:
673 changedfilesset = set(changedfiles)
672 changedfilesset = set(changedfiles)
674 to_lookup = changedfilesset & set(allfiles)
673 to_lookup = changedfilesset & set(allfiles)
675 to_drop = changedfilesset - to_lookup
674 to_drop = changedfilesset - to_lookup
676
675
677 if self._origpl is None:
676 if self._origpl is None:
678 self._origpl = self._pl
677 self._origpl = self._pl
679 self._map.setparents(parent, self._nodeconstants.nullid)
678 self._map.setparents(parent, self._nodeconstants.nullid)
680
679
681 for f in to_lookup:
680 for f in to_lookup:
682 self.normallookup(f)
681 self.normallookup(f)
683 for f in to_drop:
682 for f in to_drop:
684 self.drop(f)
683 self.drop(f)
685
684
686 self._dirty = True
685 self._dirty = True
687
686
688 def identity(self):
687 def identity(self):
689 """Return identity of dirstate itself to detect changing in storage
688 """Return identity of dirstate itself to detect changing in storage
690
689
691 If identity of previous dirstate is equal to this, writing
690 If identity of previous dirstate is equal to this, writing
692 changes based on the former dirstate out can keep consistency.
691 changes based on the former dirstate out can keep consistency.
693 """
692 """
694 return self._map.identity
693 return self._map.identity
695
694
696 def write(self, tr):
695 def write(self, tr):
697 if not self._dirty:
696 if not self._dirty:
698 return
697 return
699
698
700 filename = self._filename
699 filename = self._filename
701 if tr:
700 if tr:
702 # 'dirstate.write()' is not only for writing in-memory
701 # 'dirstate.write()' is not only for writing in-memory
703 # changes out, but also for dropping ambiguous timestamp.
702 # changes out, but also for dropping ambiguous timestamp.
704 # delayed writing re-raise "ambiguous timestamp issue".
703 # delayed writing re-raise "ambiguous timestamp issue".
705 # See also the wiki page below for detail:
704 # See also the wiki page below for detail:
706 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
705 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
707
706
708 # emulate dropping timestamp in 'parsers.pack_dirstate'
707 # emulate dropping timestamp in 'parsers.pack_dirstate'
709 now = _getfsnow(self._opener)
708 now = _getfsnow(self._opener)
710 self._map.clearambiguoustimes(self._updatedfiles, now)
709 self._map.clearambiguoustimes(self._updatedfiles, now)
711
710
712 # emulate that all 'dirstate.normal' results are written out
711 # emulate that all 'dirstate.normal' results are written out
713 self._lastnormaltime = 0
712 self._lastnormaltime = 0
714 self._updatedfiles.clear()
713 self._updatedfiles.clear()
715
714
716 # delay writing in-memory changes out
715 # delay writing in-memory changes out
717 tr.addfilegenerator(
716 tr.addfilegenerator(
718 b'dirstate',
717 b'dirstate',
719 (self._filename,),
718 (self._filename,),
720 self._writedirstate,
719 self._writedirstate,
721 location=b'plain',
720 location=b'plain',
722 )
721 )
723 return
722 return
724
723
725 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
724 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
726 self._writedirstate(st)
725 self._writedirstate(st)
727
726
728 def addparentchangecallback(self, category, callback):
727 def addparentchangecallback(self, category, callback):
729 """add a callback to be called when the wd parents are changed
728 """add a callback to be called when the wd parents are changed
730
729
731 Callback will be called with the following arguments:
730 Callback will be called with the following arguments:
732 dirstate, (oldp1, oldp2), (newp1, newp2)
731 dirstate, (oldp1, oldp2), (newp1, newp2)
733
732
734 Category is a unique identifier to allow overwriting an old callback
733 Category is a unique identifier to allow overwriting an old callback
735 with a newer callback.
734 with a newer callback.
736 """
735 """
737 self._plchangecallbacks[category] = callback
736 self._plchangecallbacks[category] = callback
738
737
739 def _writedirstate(self, st):
738 def _writedirstate(self, st):
740 # notify callbacks about parents change
739 # notify callbacks about parents change
741 if self._origpl is not None and self._origpl != self._pl:
740 if self._origpl is not None and self._origpl != self._pl:
742 for c, callback in sorted(
741 for c, callback in sorted(
743 pycompat.iteritems(self._plchangecallbacks)
742 pycompat.iteritems(self._plchangecallbacks)
744 ):
743 ):
745 callback(self, self._origpl, self._pl)
744 callback(self, self._origpl, self._pl)
746 self._origpl = None
745 self._origpl = None
747 # use the modification time of the newly created temporary file as the
746 # use the modification time of the newly created temporary file as the
748 # filesystem's notion of 'now'
747 # filesystem's notion of 'now'
749 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
748 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
750
749
751 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
750 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
752 # timestamp of each entries in dirstate, because of 'now > mtime'
751 # timestamp of each entries in dirstate, because of 'now > mtime'
753 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
752 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
754 if delaywrite > 0:
753 if delaywrite > 0:
755 # do we have any files to delay for?
754 # do we have any files to delay for?
756 for f, e in pycompat.iteritems(self._map):
755 for f, e in pycompat.iteritems(self._map):
757 if e.need_delay(now):
756 if e.need_delay(now):
758 import time # to avoid useless import
757 import time # to avoid useless import
759
758
760 # rather than sleep n seconds, sleep until the next
759 # rather than sleep n seconds, sleep until the next
761 # multiple of n seconds
760 # multiple of n seconds
762 clock = time.time()
761 clock = time.time()
763 start = int(clock) - (int(clock) % delaywrite)
762 start = int(clock) - (int(clock) % delaywrite)
764 end = start + delaywrite
763 end = start + delaywrite
765 time.sleep(end - clock)
764 time.sleep(end - clock)
766 now = end # trust our estimate that the end is near now
765 now = end # trust our estimate that the end is near now
767 break
766 break
768
767
769 self._map.write(st, now)
768 self._map.write(st, now)
770 self._lastnormaltime = 0
769 self._lastnormaltime = 0
771 self._dirty = False
770 self._dirty = False
772
771
773 def _dirignore(self, f):
772 def _dirignore(self, f):
774 if self._ignore(f):
773 if self._ignore(f):
775 return True
774 return True
776 for p in pathutil.finddirs(f):
775 for p in pathutil.finddirs(f):
777 if self._ignore(p):
776 if self._ignore(p):
778 return True
777 return True
779 return False
778 return False
780
779
781 def _ignorefiles(self):
780 def _ignorefiles(self):
782 files = []
781 files = []
783 if os.path.exists(self._join(b'.hgignore')):
782 if os.path.exists(self._join(b'.hgignore')):
784 files.append(self._join(b'.hgignore'))
783 files.append(self._join(b'.hgignore'))
785 for name, path in self._ui.configitems(b"ui"):
784 for name, path in self._ui.configitems(b"ui"):
786 if name == b'ignore' or name.startswith(b'ignore.'):
785 if name == b'ignore' or name.startswith(b'ignore.'):
787 # we need to use os.path.join here rather than self._join
786 # we need to use os.path.join here rather than self._join
788 # because path is arbitrary and user-specified
787 # because path is arbitrary and user-specified
789 files.append(os.path.join(self._rootdir, util.expandpath(path)))
788 files.append(os.path.join(self._rootdir, util.expandpath(path)))
790 return files
789 return files
791
790
792 def _ignorefileandline(self, f):
791 def _ignorefileandline(self, f):
793 files = collections.deque(self._ignorefiles())
792 files = collections.deque(self._ignorefiles())
794 visited = set()
793 visited = set()
795 while files:
794 while files:
796 i = files.popleft()
795 i = files.popleft()
797 patterns = matchmod.readpatternfile(
796 patterns = matchmod.readpatternfile(
798 i, self._ui.warn, sourceinfo=True
797 i, self._ui.warn, sourceinfo=True
799 )
798 )
800 for pattern, lineno, line in patterns:
799 for pattern, lineno, line in patterns:
801 kind, p = matchmod._patsplit(pattern, b'glob')
800 kind, p = matchmod._patsplit(pattern, b'glob')
802 if kind == b"subinclude":
801 if kind == b"subinclude":
803 if p not in visited:
802 if p not in visited:
804 files.append(p)
803 files.append(p)
805 continue
804 continue
806 m = matchmod.match(
805 m = matchmod.match(
807 self._root, b'', [], [pattern], warn=self._ui.warn
806 self._root, b'', [], [pattern], warn=self._ui.warn
808 )
807 )
809 if m(f):
808 if m(f):
810 return (i, lineno, line)
809 return (i, lineno, line)
811 visited.add(i)
810 visited.add(i)
812 return (None, -1, b"")
811 return (None, -1, b"")
813
812
814 def _walkexplicit(self, match, subrepos):
813 def _walkexplicit(self, match, subrepos):
815 """Get stat data about the files explicitly specified by match.
814 """Get stat data about the files explicitly specified by match.
816
815
817 Return a triple (results, dirsfound, dirsnotfound).
816 Return a triple (results, dirsfound, dirsnotfound).
818 - results is a mapping from filename to stat result. It also contains
817 - results is a mapping from filename to stat result. It also contains
819 listings mapping subrepos and .hg to None.
818 listings mapping subrepos and .hg to None.
820 - dirsfound is a list of files found to be directories.
819 - dirsfound is a list of files found to be directories.
821 - dirsnotfound is a list of files that the dirstate thinks are
820 - dirsnotfound is a list of files that the dirstate thinks are
822 directories and that were not found."""
821 directories and that were not found."""
823
822
824 def badtype(mode):
823 def badtype(mode):
825 kind = _(b'unknown')
824 kind = _(b'unknown')
826 if stat.S_ISCHR(mode):
825 if stat.S_ISCHR(mode):
827 kind = _(b'character device')
826 kind = _(b'character device')
828 elif stat.S_ISBLK(mode):
827 elif stat.S_ISBLK(mode):
829 kind = _(b'block device')
828 kind = _(b'block device')
830 elif stat.S_ISFIFO(mode):
829 elif stat.S_ISFIFO(mode):
831 kind = _(b'fifo')
830 kind = _(b'fifo')
832 elif stat.S_ISSOCK(mode):
831 elif stat.S_ISSOCK(mode):
833 kind = _(b'socket')
832 kind = _(b'socket')
834 elif stat.S_ISDIR(mode):
833 elif stat.S_ISDIR(mode):
835 kind = _(b'directory')
834 kind = _(b'directory')
836 return _(b'unsupported file type (type is %s)') % kind
835 return _(b'unsupported file type (type is %s)') % kind
837
836
838 badfn = match.bad
837 badfn = match.bad
839 dmap = self._map
838 dmap = self._map
840 lstat = os.lstat
839 lstat = os.lstat
841 getkind = stat.S_IFMT
840 getkind = stat.S_IFMT
842 dirkind = stat.S_IFDIR
841 dirkind = stat.S_IFDIR
843 regkind = stat.S_IFREG
842 regkind = stat.S_IFREG
844 lnkkind = stat.S_IFLNK
843 lnkkind = stat.S_IFLNK
845 join = self._join
844 join = self._join
846 dirsfound = []
845 dirsfound = []
847 foundadd = dirsfound.append
846 foundadd = dirsfound.append
848 dirsnotfound = []
847 dirsnotfound = []
849 notfoundadd = dirsnotfound.append
848 notfoundadd = dirsnotfound.append
850
849
851 if not match.isexact() and self._checkcase:
850 if not match.isexact() and self._checkcase:
852 normalize = self._normalize
851 normalize = self._normalize
853 else:
852 else:
854 normalize = None
853 normalize = None
855
854
856 files = sorted(match.files())
855 files = sorted(match.files())
857 subrepos.sort()
856 subrepos.sort()
858 i, j = 0, 0
857 i, j = 0, 0
859 while i < len(files) and j < len(subrepos):
858 while i < len(files) and j < len(subrepos):
860 subpath = subrepos[j] + b"/"
859 subpath = subrepos[j] + b"/"
861 if files[i] < subpath:
860 if files[i] < subpath:
862 i += 1
861 i += 1
863 continue
862 continue
864 while i < len(files) and files[i].startswith(subpath):
863 while i < len(files) and files[i].startswith(subpath):
865 del files[i]
864 del files[i]
866 j += 1
865 j += 1
867
866
868 if not files or b'' in files:
867 if not files or b'' in files:
869 files = [b'']
868 files = [b'']
870 # constructing the foldmap is expensive, so don't do it for the
869 # constructing the foldmap is expensive, so don't do it for the
871 # common case where files is ['']
870 # common case where files is ['']
872 normalize = None
871 normalize = None
873 results = dict.fromkeys(subrepos)
872 results = dict.fromkeys(subrepos)
874 results[b'.hg'] = None
873 results[b'.hg'] = None
875
874
876 for ff in files:
875 for ff in files:
877 if normalize:
876 if normalize:
878 nf = normalize(ff, False, True)
877 nf = normalize(ff, False, True)
879 else:
878 else:
880 nf = ff
879 nf = ff
881 if nf in results:
880 if nf in results:
882 continue
881 continue
883
882
884 try:
883 try:
885 st = lstat(join(nf))
884 st = lstat(join(nf))
886 kind = getkind(st.st_mode)
885 kind = getkind(st.st_mode)
887 if kind == dirkind:
886 if kind == dirkind:
888 if nf in dmap:
887 if nf in dmap:
889 # file replaced by dir on disk but still in dirstate
888 # file replaced by dir on disk but still in dirstate
890 results[nf] = None
889 results[nf] = None
891 foundadd((nf, ff))
890 foundadd((nf, ff))
892 elif kind == regkind or kind == lnkkind:
891 elif kind == regkind or kind == lnkkind:
893 results[nf] = st
892 results[nf] = st
894 else:
893 else:
895 badfn(ff, badtype(kind))
894 badfn(ff, badtype(kind))
896 if nf in dmap:
895 if nf in dmap:
897 results[nf] = None
896 results[nf] = None
898 except OSError as inst: # nf not found on disk - it is dirstate only
897 except OSError as inst: # nf not found on disk - it is dirstate only
899 if nf in dmap: # does it exactly match a missing file?
898 if nf in dmap: # does it exactly match a missing file?
900 results[nf] = None
899 results[nf] = None
901 else: # does it match a missing directory?
900 else: # does it match a missing directory?
902 if self._map.hasdir(nf):
901 if self._map.hasdir(nf):
903 notfoundadd(nf)
902 notfoundadd(nf)
904 else:
903 else:
905 badfn(ff, encoding.strtolocal(inst.strerror))
904 badfn(ff, encoding.strtolocal(inst.strerror))
906
905
907 # match.files() may contain explicitly-specified paths that shouldn't
906 # match.files() may contain explicitly-specified paths that shouldn't
908 # be taken; drop them from the list of files found. dirsfound/notfound
907 # be taken; drop them from the list of files found. dirsfound/notfound
909 # aren't filtered here because they will be tested later.
908 # aren't filtered here because they will be tested later.
910 if match.anypats():
909 if match.anypats():
911 for f in list(results):
910 for f in list(results):
912 if f == b'.hg' or f in subrepos:
911 if f == b'.hg' or f in subrepos:
913 # keep sentinel to disable further out-of-repo walks
912 # keep sentinel to disable further out-of-repo walks
914 continue
913 continue
915 if not match(f):
914 if not match(f):
916 del results[f]
915 del results[f]
917
916
918 # Case insensitive filesystems cannot rely on lstat() failing to detect
917 # Case insensitive filesystems cannot rely on lstat() failing to detect
919 # a case-only rename. Prune the stat object for any file that does not
918 # a case-only rename. Prune the stat object for any file that does not
920 # match the case in the filesystem, if there are multiple files that
919 # match the case in the filesystem, if there are multiple files that
921 # normalize to the same path.
920 # normalize to the same path.
922 if match.isexact() and self._checkcase:
921 if match.isexact() and self._checkcase:
923 normed = {}
922 normed = {}
924
923
925 for f, st in pycompat.iteritems(results):
924 for f, st in pycompat.iteritems(results):
926 if st is None:
925 if st is None:
927 continue
926 continue
928
927
929 nc = util.normcase(f)
928 nc = util.normcase(f)
930 paths = normed.get(nc)
929 paths = normed.get(nc)
931
930
932 if paths is None:
931 if paths is None:
933 paths = set()
932 paths = set()
934 normed[nc] = paths
933 normed[nc] = paths
935
934
936 paths.add(f)
935 paths.add(f)
937
936
938 for norm, paths in pycompat.iteritems(normed):
937 for norm, paths in pycompat.iteritems(normed):
939 if len(paths) > 1:
938 if len(paths) > 1:
940 for path in paths:
939 for path in paths:
941 folded = self._discoverpath(
940 folded = self._discoverpath(
942 path, norm, True, None, self._map.dirfoldmap
941 path, norm, True, None, self._map.dirfoldmap
943 )
942 )
944 if path != folded:
943 if path != folded:
945 results[path] = None
944 results[path] = None
946
945
947 return results, dirsfound, dirsnotfound
946 return results, dirsfound, dirsnotfound
948
947
949 def walk(self, match, subrepos, unknown, ignored, full=True):
948 def walk(self, match, subrepos, unknown, ignored, full=True):
950 """
949 """
951 Walk recursively through the directory tree, finding all files
950 Walk recursively through the directory tree, finding all files
952 matched by match.
951 matched by match.
953
952
954 If full is False, maybe skip some known-clean files.
953 If full is False, maybe skip some known-clean files.
955
954
956 Return a dict mapping filename to stat-like object (either
955 Return a dict mapping filename to stat-like object (either
957 mercurial.osutil.stat instance or return value of os.stat()).
956 mercurial.osutil.stat instance or return value of os.stat()).
958
957
959 """
958 """
960 # full is a flag that extensions that hook into walk can use -- this
959 # full is a flag that extensions that hook into walk can use -- this
961 # implementation doesn't use it at all. This satisfies the contract
960 # implementation doesn't use it at all. This satisfies the contract
962 # because we only guarantee a "maybe".
961 # because we only guarantee a "maybe".
963
962
964 if ignored:
963 if ignored:
965 ignore = util.never
964 ignore = util.never
966 dirignore = util.never
965 dirignore = util.never
967 elif unknown:
966 elif unknown:
968 ignore = self._ignore
967 ignore = self._ignore
969 dirignore = self._dirignore
968 dirignore = self._dirignore
970 else:
969 else:
971 # if not unknown and not ignored, drop dir recursion and step 2
970 # if not unknown and not ignored, drop dir recursion and step 2
972 ignore = util.always
971 ignore = util.always
973 dirignore = util.always
972 dirignore = util.always
974
973
975 matchfn = match.matchfn
974 matchfn = match.matchfn
976 matchalways = match.always()
975 matchalways = match.always()
977 matchtdir = match.traversedir
976 matchtdir = match.traversedir
978 dmap = self._map
977 dmap = self._map
979 listdir = util.listdir
978 listdir = util.listdir
980 lstat = os.lstat
979 lstat = os.lstat
981 dirkind = stat.S_IFDIR
980 dirkind = stat.S_IFDIR
982 regkind = stat.S_IFREG
981 regkind = stat.S_IFREG
983 lnkkind = stat.S_IFLNK
982 lnkkind = stat.S_IFLNK
984 join = self._join
983 join = self._join
985
984
986 exact = skipstep3 = False
985 exact = skipstep3 = False
987 if match.isexact(): # match.exact
986 if match.isexact(): # match.exact
988 exact = True
987 exact = True
989 dirignore = util.always # skip step 2
988 dirignore = util.always # skip step 2
990 elif match.prefix(): # match.match, no patterns
989 elif match.prefix(): # match.match, no patterns
991 skipstep3 = True
990 skipstep3 = True
992
991
993 if not exact and self._checkcase:
992 if not exact and self._checkcase:
994 normalize = self._normalize
993 normalize = self._normalize
995 normalizefile = self._normalizefile
994 normalizefile = self._normalizefile
996 skipstep3 = False
995 skipstep3 = False
997 else:
996 else:
998 normalize = self._normalize
997 normalize = self._normalize
999 normalizefile = None
998 normalizefile = None
1000
999
1001 # step 1: find all explicit files
1000 # step 1: find all explicit files
1002 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1001 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1003 if matchtdir:
1002 if matchtdir:
1004 for d in work:
1003 for d in work:
1005 matchtdir(d[0])
1004 matchtdir(d[0])
1006 for d in dirsnotfound:
1005 for d in dirsnotfound:
1007 matchtdir(d)
1006 matchtdir(d)
1008
1007
1009 skipstep3 = skipstep3 and not (work or dirsnotfound)
1008 skipstep3 = skipstep3 and not (work or dirsnotfound)
1010 work = [d for d in work if not dirignore(d[0])]
1009 work = [d for d in work if not dirignore(d[0])]
1011
1010
1012 # step 2: visit subdirectories
1011 # step 2: visit subdirectories
1013 def traverse(work, alreadynormed):
1012 def traverse(work, alreadynormed):
1014 wadd = work.append
1013 wadd = work.append
1015 while work:
1014 while work:
1016 tracing.counter('dirstate.walk work', len(work))
1015 tracing.counter('dirstate.walk work', len(work))
1017 nd = work.pop()
1016 nd = work.pop()
1018 visitentries = match.visitchildrenset(nd)
1017 visitentries = match.visitchildrenset(nd)
1019 if not visitentries:
1018 if not visitentries:
1020 continue
1019 continue
1021 if visitentries == b'this' or visitentries == b'all':
1020 if visitentries == b'this' or visitentries == b'all':
1022 visitentries = None
1021 visitentries = None
1023 skip = None
1022 skip = None
1024 if nd != b'':
1023 if nd != b'':
1025 skip = b'.hg'
1024 skip = b'.hg'
1026 try:
1025 try:
1027 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1026 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1028 entries = listdir(join(nd), stat=True, skip=skip)
1027 entries = listdir(join(nd), stat=True, skip=skip)
1029 except OSError as inst:
1028 except OSError as inst:
1030 if inst.errno in (errno.EACCES, errno.ENOENT):
1029 if inst.errno in (errno.EACCES, errno.ENOENT):
1031 match.bad(
1030 match.bad(
1032 self.pathto(nd), encoding.strtolocal(inst.strerror)
1031 self.pathto(nd), encoding.strtolocal(inst.strerror)
1033 )
1032 )
1034 continue
1033 continue
1035 raise
1034 raise
1036 for f, kind, st in entries:
1035 for f, kind, st in entries:
1037 # Some matchers may return files in the visitentries set,
1036 # Some matchers may return files in the visitentries set,
1038 # instead of 'this', if the matcher explicitly mentions them
1037 # instead of 'this', if the matcher explicitly mentions them
1039 # and is not an exactmatcher. This is acceptable; we do not
1038 # and is not an exactmatcher. This is acceptable; we do not
1040 # make any hard assumptions about file-or-directory below
1039 # make any hard assumptions about file-or-directory below
1041 # based on the presence of `f` in visitentries. If
1040 # based on the presence of `f` in visitentries. If
1042 # visitchildrenset returned a set, we can always skip the
1041 # visitchildrenset returned a set, we can always skip the
1043 # entries *not* in the set it provided regardless of whether
1042 # entries *not* in the set it provided regardless of whether
1044 # they're actually a file or a directory.
1043 # they're actually a file or a directory.
1045 if visitentries and f not in visitentries:
1044 if visitentries and f not in visitentries:
1046 continue
1045 continue
1047 if normalizefile:
1046 if normalizefile:
1048 # even though f might be a directory, we're only
1047 # even though f might be a directory, we're only
1049 # interested in comparing it to files currently in the
1048 # interested in comparing it to files currently in the
1050 # dmap -- therefore normalizefile is enough
1049 # dmap -- therefore normalizefile is enough
1051 nf = normalizefile(
1050 nf = normalizefile(
1052 nd and (nd + b"/" + f) or f, True, True
1051 nd and (nd + b"/" + f) or f, True, True
1053 )
1052 )
1054 else:
1053 else:
1055 nf = nd and (nd + b"/" + f) or f
1054 nf = nd and (nd + b"/" + f) or f
1056 if nf not in results:
1055 if nf not in results:
1057 if kind == dirkind:
1056 if kind == dirkind:
1058 if not ignore(nf):
1057 if not ignore(nf):
1059 if matchtdir:
1058 if matchtdir:
1060 matchtdir(nf)
1059 matchtdir(nf)
1061 wadd(nf)
1060 wadd(nf)
1062 if nf in dmap and (matchalways or matchfn(nf)):
1061 if nf in dmap and (matchalways or matchfn(nf)):
1063 results[nf] = None
1062 results[nf] = None
1064 elif kind == regkind or kind == lnkkind:
1063 elif kind == regkind or kind == lnkkind:
1065 if nf in dmap:
1064 if nf in dmap:
1066 if matchalways or matchfn(nf):
1065 if matchalways or matchfn(nf):
1067 results[nf] = st
1066 results[nf] = st
1068 elif (matchalways or matchfn(nf)) and not ignore(
1067 elif (matchalways or matchfn(nf)) and not ignore(
1069 nf
1068 nf
1070 ):
1069 ):
1071 # unknown file -- normalize if necessary
1070 # unknown file -- normalize if necessary
1072 if not alreadynormed:
1071 if not alreadynormed:
1073 nf = normalize(nf, False, True)
1072 nf = normalize(nf, False, True)
1074 results[nf] = st
1073 results[nf] = st
1075 elif nf in dmap and (matchalways or matchfn(nf)):
1074 elif nf in dmap and (matchalways or matchfn(nf)):
1076 results[nf] = None
1075 results[nf] = None
1077
1076
1078 for nd, d in work:
1077 for nd, d in work:
1079 # alreadynormed means that processwork doesn't have to do any
1078 # alreadynormed means that processwork doesn't have to do any
1080 # expensive directory normalization
1079 # expensive directory normalization
1081 alreadynormed = not normalize or nd == d
1080 alreadynormed = not normalize or nd == d
1082 traverse([d], alreadynormed)
1081 traverse([d], alreadynormed)
1083
1082
1084 for s in subrepos:
1083 for s in subrepos:
1085 del results[s]
1084 del results[s]
1086 del results[b'.hg']
1085 del results[b'.hg']
1087
1086
1088 # step 3: visit remaining files from dmap
1087 # step 3: visit remaining files from dmap
1089 if not skipstep3 and not exact:
1088 if not skipstep3 and not exact:
1090 # If a dmap file is not in results yet, it was either
1089 # If a dmap file is not in results yet, it was either
1091 # a) not matching matchfn b) ignored, c) missing, or d) under a
1090 # a) not matching matchfn b) ignored, c) missing, or d) under a
1092 # symlink directory.
1091 # symlink directory.
1093 if not results and matchalways:
1092 if not results and matchalways:
1094 visit = [f for f in dmap]
1093 visit = [f for f in dmap]
1095 else:
1094 else:
1096 visit = [f for f in dmap if f not in results and matchfn(f)]
1095 visit = [f for f in dmap if f not in results and matchfn(f)]
1097 visit.sort()
1096 visit.sort()
1098
1097
1099 if unknown:
1098 if unknown:
1100 # unknown == True means we walked all dirs under the roots
1099 # unknown == True means we walked all dirs under the roots
1101 # that wasn't ignored, and everything that matched was stat'ed
1100 # that wasn't ignored, and everything that matched was stat'ed
1102 # and is already in results.
1101 # and is already in results.
1103 # The rest must thus be ignored or under a symlink.
1102 # The rest must thus be ignored or under a symlink.
1104 audit_path = pathutil.pathauditor(self._root, cached=True)
1103 audit_path = pathutil.pathauditor(self._root, cached=True)
1105
1104
1106 for nf in iter(visit):
1105 for nf in iter(visit):
1107 # If a stat for the same file was already added with a
1106 # If a stat for the same file was already added with a
1108 # different case, don't add one for this, since that would
1107 # different case, don't add one for this, since that would
1109 # make it appear as if the file exists under both names
1108 # make it appear as if the file exists under both names
1110 # on disk.
1109 # on disk.
1111 if (
1110 if (
1112 normalizefile
1111 normalizefile
1113 and normalizefile(nf, True, True) in results
1112 and normalizefile(nf, True, True) in results
1114 ):
1113 ):
1115 results[nf] = None
1114 results[nf] = None
1116 # Report ignored items in the dmap as long as they are not
1115 # Report ignored items in the dmap as long as they are not
1117 # under a symlink directory.
1116 # under a symlink directory.
1118 elif audit_path.check(nf):
1117 elif audit_path.check(nf):
1119 try:
1118 try:
1120 results[nf] = lstat(join(nf))
1119 results[nf] = lstat(join(nf))
1121 # file was just ignored, no links, and exists
1120 # file was just ignored, no links, and exists
1122 except OSError:
1121 except OSError:
1123 # file doesn't exist
1122 # file doesn't exist
1124 results[nf] = None
1123 results[nf] = None
1125 else:
1124 else:
1126 # It's either missing or under a symlink directory
1125 # It's either missing or under a symlink directory
1127 # which we in this case report as missing
1126 # which we in this case report as missing
1128 results[nf] = None
1127 results[nf] = None
1129 else:
1128 else:
1130 # We may not have walked the full directory tree above,
1129 # We may not have walked the full directory tree above,
1131 # so stat and check everything we missed.
1130 # so stat and check everything we missed.
1132 iv = iter(visit)
1131 iv = iter(visit)
1133 for st in util.statfiles([join(i) for i in visit]):
1132 for st in util.statfiles([join(i) for i in visit]):
1134 results[next(iv)] = st
1133 results[next(iv)] = st
1135 return results
1134 return results
1136
1135
1137 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1136 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1138 # Force Rayon (Rust parallelism library) to respect the number of
1137 # Force Rayon (Rust parallelism library) to respect the number of
1139 # workers. This is a temporary workaround until Rust code knows
1138 # workers. This is a temporary workaround until Rust code knows
1140 # how to read the config file.
1139 # how to read the config file.
1141 numcpus = self._ui.configint(b"worker", b"numcpus")
1140 numcpus = self._ui.configint(b"worker", b"numcpus")
1142 if numcpus is not None:
1141 if numcpus is not None:
1143 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1142 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1144
1143
1145 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1144 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1146 if not workers_enabled:
1145 if not workers_enabled:
1147 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1146 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1148
1147
1149 (
1148 (
1150 lookup,
1149 lookup,
1151 modified,
1150 modified,
1152 added,
1151 added,
1153 removed,
1152 removed,
1154 deleted,
1153 deleted,
1155 clean,
1154 clean,
1156 ignored,
1155 ignored,
1157 unknown,
1156 unknown,
1158 warnings,
1157 warnings,
1159 bad,
1158 bad,
1160 traversed,
1159 traversed,
1161 dirty,
1160 dirty,
1162 ) = rustmod.status(
1161 ) = rustmod.status(
1163 self._map._rustmap,
1162 self._map._rustmap,
1164 matcher,
1163 matcher,
1165 self._rootdir,
1164 self._rootdir,
1166 self._ignorefiles(),
1165 self._ignorefiles(),
1167 self._checkexec,
1166 self._checkexec,
1168 self._lastnormaltime,
1167 self._lastnormaltime,
1169 bool(list_clean),
1168 bool(list_clean),
1170 bool(list_ignored),
1169 bool(list_ignored),
1171 bool(list_unknown),
1170 bool(list_unknown),
1172 bool(matcher.traversedir),
1171 bool(matcher.traversedir),
1173 )
1172 )
1174
1173
1175 self._dirty |= dirty
1174 self._dirty |= dirty
1176
1175
1177 if matcher.traversedir:
1176 if matcher.traversedir:
1178 for dir in traversed:
1177 for dir in traversed:
1179 matcher.traversedir(dir)
1178 matcher.traversedir(dir)
1180
1179
1181 if self._ui.warn:
1180 if self._ui.warn:
1182 for item in warnings:
1181 for item in warnings:
1183 if isinstance(item, tuple):
1182 if isinstance(item, tuple):
1184 file_path, syntax = item
1183 file_path, syntax = item
1185 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1184 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1186 file_path,
1185 file_path,
1187 syntax,
1186 syntax,
1188 )
1187 )
1189 self._ui.warn(msg)
1188 self._ui.warn(msg)
1190 else:
1189 else:
1191 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1190 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1192 self._ui.warn(
1191 self._ui.warn(
1193 msg
1192 msg
1194 % (
1193 % (
1195 pathutil.canonpath(
1194 pathutil.canonpath(
1196 self._rootdir, self._rootdir, item
1195 self._rootdir, self._rootdir, item
1197 ),
1196 ),
1198 b"No such file or directory",
1197 b"No such file or directory",
1199 )
1198 )
1200 )
1199 )
1201
1200
1202 for (fn, message) in bad:
1201 for (fn, message) in bad:
1203 matcher.bad(fn, encoding.strtolocal(message))
1202 matcher.bad(fn, encoding.strtolocal(message))
1204
1203
1205 status = scmutil.status(
1204 status = scmutil.status(
1206 modified=modified,
1205 modified=modified,
1207 added=added,
1206 added=added,
1208 removed=removed,
1207 removed=removed,
1209 deleted=deleted,
1208 deleted=deleted,
1210 unknown=unknown,
1209 unknown=unknown,
1211 ignored=ignored,
1210 ignored=ignored,
1212 clean=clean,
1211 clean=clean,
1213 )
1212 )
1214 return (lookup, status)
1213 return (lookup, status)
1215
1214
1216 def status(self, match, subrepos, ignored, clean, unknown):
1215 def status(self, match, subrepos, ignored, clean, unknown):
1217 """Determine the status of the working copy relative to the
1216 """Determine the status of the working copy relative to the
1218 dirstate and return a pair of (unsure, status), where status is of type
1217 dirstate and return a pair of (unsure, status), where status is of type
1219 scmutil.status and:
1218 scmutil.status and:
1220
1219
1221 unsure:
1220 unsure:
1222 files that might have been modified since the dirstate was
1221 files that might have been modified since the dirstate was
1223 written, but need to be read to be sure (size is the same
1222 written, but need to be read to be sure (size is the same
1224 but mtime differs)
1223 but mtime differs)
1225 status.modified:
1224 status.modified:
1226 files that have definitely been modified since the dirstate
1225 files that have definitely been modified since the dirstate
1227 was written (different size or mode)
1226 was written (different size or mode)
1228 status.clean:
1227 status.clean:
1229 files that have definitely not been modified since the
1228 files that have definitely not been modified since the
1230 dirstate was written
1229 dirstate was written
1231 """
1230 """
1232 listignored, listclean, listunknown = ignored, clean, unknown
1231 listignored, listclean, listunknown = ignored, clean, unknown
1233 lookup, modified, added, unknown, ignored = [], [], [], [], []
1232 lookup, modified, added, unknown, ignored = [], [], [], [], []
1234 removed, deleted, clean = [], [], []
1233 removed, deleted, clean = [], [], []
1235
1234
1236 dmap = self._map
1235 dmap = self._map
1237 dmap.preload()
1236 dmap.preload()
1238
1237
1239 use_rust = True
1238 use_rust = True
1240
1239
1241 allowed_matchers = (
1240 allowed_matchers = (
1242 matchmod.alwaysmatcher,
1241 matchmod.alwaysmatcher,
1243 matchmod.exactmatcher,
1242 matchmod.exactmatcher,
1244 matchmod.includematcher,
1243 matchmod.includematcher,
1245 )
1244 )
1246
1245
1247 if rustmod is None:
1246 if rustmod is None:
1248 use_rust = False
1247 use_rust = False
1249 elif self._checkcase:
1248 elif self._checkcase:
1250 # Case-insensitive filesystems are not handled yet
1249 # Case-insensitive filesystems are not handled yet
1251 use_rust = False
1250 use_rust = False
1252 elif subrepos:
1251 elif subrepos:
1253 use_rust = False
1252 use_rust = False
1254 elif sparse.enabled:
1253 elif sparse.enabled:
1255 use_rust = False
1254 use_rust = False
1256 elif not isinstance(match, allowed_matchers):
1255 elif not isinstance(match, allowed_matchers):
1257 # Some matchers have yet to be implemented
1256 # Some matchers have yet to be implemented
1258 use_rust = False
1257 use_rust = False
1259
1258
1260 if use_rust:
1259 if use_rust:
1261 try:
1260 try:
1262 return self._rust_status(
1261 return self._rust_status(
1263 match, listclean, listignored, listunknown
1262 match, listclean, listignored, listunknown
1264 )
1263 )
1265 except rustmod.FallbackError:
1264 except rustmod.FallbackError:
1266 pass
1265 pass
1267
1266
1268 def noop(f):
1267 def noop(f):
1269 pass
1268 pass
1270
1269
1271 dcontains = dmap.__contains__
1270 dcontains = dmap.__contains__
1272 dget = dmap.__getitem__
1271 dget = dmap.__getitem__
1273 ladd = lookup.append # aka "unsure"
1272 ladd = lookup.append # aka "unsure"
1274 madd = modified.append
1273 madd = modified.append
1275 aadd = added.append
1274 aadd = added.append
1276 uadd = unknown.append if listunknown else noop
1275 uadd = unknown.append if listunknown else noop
1277 iadd = ignored.append if listignored else noop
1276 iadd = ignored.append if listignored else noop
1278 radd = removed.append
1277 radd = removed.append
1279 dadd = deleted.append
1278 dadd = deleted.append
1280 cadd = clean.append if listclean else noop
1279 cadd = clean.append if listclean else noop
1281 mexact = match.exact
1280 mexact = match.exact
1282 dirignore = self._dirignore
1281 dirignore = self._dirignore
1283 checkexec = self._checkexec
1282 checkexec = self._checkexec
1284 copymap = self._map.copymap
1283 copymap = self._map.copymap
1285 lastnormaltime = self._lastnormaltime
1284 lastnormaltime = self._lastnormaltime
1286
1285
1287 # We need to do full walks when either
1286 # We need to do full walks when either
1288 # - we're listing all clean files, or
1287 # - we're listing all clean files, or
1289 # - match.traversedir does something, because match.traversedir should
1288 # - match.traversedir does something, because match.traversedir should
1290 # be called for every dir in the working dir
1289 # be called for every dir in the working dir
1291 full = listclean or match.traversedir is not None
1290 full = listclean or match.traversedir is not None
1292 for fn, st in pycompat.iteritems(
1291 for fn, st in pycompat.iteritems(
1293 self.walk(match, subrepos, listunknown, listignored, full=full)
1292 self.walk(match, subrepos, listunknown, listignored, full=full)
1294 ):
1293 ):
1295 if not dcontains(fn):
1294 if not dcontains(fn):
1296 if (listignored or mexact(fn)) and dirignore(fn):
1295 if (listignored or mexact(fn)) and dirignore(fn):
1297 if listignored:
1296 if listignored:
1298 iadd(fn)
1297 iadd(fn)
1299 else:
1298 else:
1300 uadd(fn)
1299 uadd(fn)
1301 continue
1300 continue
1302
1301
1303 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1302 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1304 # written like that for performance reasons. dmap[fn] is not a
1303 # written like that for performance reasons. dmap[fn] is not a
1305 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1304 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1306 # opcode has fast paths when the value to be unpacked is a tuple or
1305 # opcode has fast paths when the value to be unpacked is a tuple or
1307 # a list, but falls back to creating a full-fledged iterator in
1306 # a list, but falls back to creating a full-fledged iterator in
1308 # general. That is much slower than simply accessing and storing the
1307 # general. That is much slower than simply accessing and storing the
1309 # tuple members one by one.
1308 # tuple members one by one.
1310 t = dget(fn)
1309 t = dget(fn)
1311 state = t.state
1312 mode = t[1]
1310 mode = t[1]
1313 size = t[2]
1311 size = t[2]
1314 time = t[3]
1312 time = t[3]
1315
1313
1316 if not st and t.tracked:
1314 if not st and t.tracked:
1317 dadd(fn)
1315 dadd(fn)
1318 elif t.merged:
1316 elif t.merged:
1319 madd(fn)
1317 madd(fn)
1320 elif t.added:
1318 elif t.added:
1321 aadd(fn)
1319 aadd(fn)
1322 elif t.removed:
1320 elif t.removed:
1323 radd(fn)
1321 radd(fn)
1324 elif t.tracked:
1322 elif t.tracked:
1325 if (
1323 if (
1326 size >= 0
1324 size >= 0
1327 and (
1325 and (
1328 (size != st.st_size and size != st.st_size & _rangemask)
1326 (size != st.st_size and size != st.st_size & _rangemask)
1329 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1327 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1330 )
1328 )
1331 or t.from_p2
1329 or t.from_p2
1332 or fn in copymap
1330 or fn in copymap
1333 ):
1331 ):
1334 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1332 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1335 # issue6456: Size returned may be longer due to
1333 # issue6456: Size returned may be longer due to
1336 # encryption on EXT-4 fscrypt, undecided.
1334 # encryption on EXT-4 fscrypt, undecided.
1337 ladd(fn)
1335 ladd(fn)
1338 else:
1336 else:
1339 madd(fn)
1337 madd(fn)
1340 elif (
1338 elif (
1341 time != st[stat.ST_MTIME]
1339 time != st[stat.ST_MTIME]
1342 and time != st[stat.ST_MTIME] & _rangemask
1340 and time != st[stat.ST_MTIME] & _rangemask
1343 ):
1341 ):
1344 ladd(fn)
1342 ladd(fn)
1345 elif st[stat.ST_MTIME] == lastnormaltime:
1343 elif st[stat.ST_MTIME] == lastnormaltime:
1346 # fn may have just been marked as normal and it may have
1344 # fn may have just been marked as normal and it may have
1347 # changed in the same second without changing its size.
1345 # changed in the same second without changing its size.
1348 # This can happen if we quickly do multiple commits.
1346 # This can happen if we quickly do multiple commits.
1349 # Force lookup, so we don't miss such a racy file change.
1347 # Force lookup, so we don't miss such a racy file change.
1350 ladd(fn)
1348 ladd(fn)
1351 elif listclean:
1349 elif listclean:
1352 cadd(fn)
1350 cadd(fn)
1353 status = scmutil.status(
1351 status = scmutil.status(
1354 modified, added, removed, deleted, unknown, ignored, clean
1352 modified, added, removed, deleted, unknown, ignored, clean
1355 )
1353 )
1356 return (lookup, status)
1354 return (lookup, status)
1357
1355
1358 def matches(self, match):
1356 def matches(self, match):
1359 """
1357 """
1360 return files in the dirstate (in whatever state) filtered by match
1358 return files in the dirstate (in whatever state) filtered by match
1361 """
1359 """
1362 dmap = self._map
1360 dmap = self._map
1363 if rustmod is not None:
1361 if rustmod is not None:
1364 dmap = self._map._rustmap
1362 dmap = self._map._rustmap
1365
1363
1366 if match.always():
1364 if match.always():
1367 return dmap.keys()
1365 return dmap.keys()
1368 files = match.files()
1366 files = match.files()
1369 if match.isexact():
1367 if match.isexact():
1370 # fast path -- filter the other way around, since typically files is
1368 # fast path -- filter the other way around, since typically files is
1371 # much smaller than dmap
1369 # much smaller than dmap
1372 return [f for f in files if f in dmap]
1370 return [f for f in files if f in dmap]
1373 if match.prefix() and all(fn in dmap for fn in files):
1371 if match.prefix() and all(fn in dmap for fn in files):
1374 # fast path -- all the values are known to be files, so just return
1372 # fast path -- all the values are known to be files, so just return
1375 # that
1373 # that
1376 return list(files)
1374 return list(files)
1377 return [f for f in dmap if match(f)]
1375 return [f for f in dmap if match(f)]
1378
1376
1379 def _actualfilename(self, tr):
1377 def _actualfilename(self, tr):
1380 if tr:
1378 if tr:
1381 return self._pendingfilename
1379 return self._pendingfilename
1382 else:
1380 else:
1383 return self._filename
1381 return self._filename
1384
1382
1385 def savebackup(self, tr, backupname):
1383 def savebackup(self, tr, backupname):
1386 '''Save current dirstate into backup file'''
1384 '''Save current dirstate into backup file'''
1387 filename = self._actualfilename(tr)
1385 filename = self._actualfilename(tr)
1388 assert backupname != filename
1386 assert backupname != filename
1389
1387
1390 # use '_writedirstate' instead of 'write' to write changes certainly,
1388 # use '_writedirstate' instead of 'write' to write changes certainly,
1391 # because the latter omits writing out if transaction is running.
1389 # because the latter omits writing out if transaction is running.
1392 # output file will be used to create backup of dirstate at this point.
1390 # output file will be used to create backup of dirstate at this point.
1393 if self._dirty or not self._opener.exists(filename):
1391 if self._dirty or not self._opener.exists(filename):
1394 self._writedirstate(
1392 self._writedirstate(
1395 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1393 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1396 )
1394 )
1397
1395
1398 if tr:
1396 if tr:
1399 # ensure that subsequent tr.writepending returns True for
1397 # ensure that subsequent tr.writepending returns True for
1400 # changes written out above, even if dirstate is never
1398 # changes written out above, even if dirstate is never
1401 # changed after this
1399 # changed after this
1402 tr.addfilegenerator(
1400 tr.addfilegenerator(
1403 b'dirstate',
1401 b'dirstate',
1404 (self._filename,),
1402 (self._filename,),
1405 self._writedirstate,
1403 self._writedirstate,
1406 location=b'plain',
1404 location=b'plain',
1407 )
1405 )
1408
1406
1409 # ensure that pending file written above is unlinked at
1407 # ensure that pending file written above is unlinked at
1410 # failure, even if tr.writepending isn't invoked until the
1408 # failure, even if tr.writepending isn't invoked until the
1411 # end of this transaction
1409 # end of this transaction
1412 tr.registertmp(filename, location=b'plain')
1410 tr.registertmp(filename, location=b'plain')
1413
1411
1414 self._opener.tryunlink(backupname)
1412 self._opener.tryunlink(backupname)
1415 # hardlink backup is okay because _writedirstate is always called
1413 # hardlink backup is okay because _writedirstate is always called
1416 # with an "atomictemp=True" file.
1414 # with an "atomictemp=True" file.
1417 util.copyfile(
1415 util.copyfile(
1418 self._opener.join(filename),
1416 self._opener.join(filename),
1419 self._opener.join(backupname),
1417 self._opener.join(backupname),
1420 hardlink=True,
1418 hardlink=True,
1421 )
1419 )
1422
1420
1423 def restorebackup(self, tr, backupname):
1421 def restorebackup(self, tr, backupname):
1424 '''Restore dirstate by backup file'''
1422 '''Restore dirstate by backup file'''
1425 # this "invalidate()" prevents "wlock.release()" from writing
1423 # this "invalidate()" prevents "wlock.release()" from writing
1426 # changes of dirstate out after restoring from backup file
1424 # changes of dirstate out after restoring from backup file
1427 self.invalidate()
1425 self.invalidate()
1428 filename = self._actualfilename(tr)
1426 filename = self._actualfilename(tr)
1429 o = self._opener
1427 o = self._opener
1430 if util.samefile(o.join(backupname), o.join(filename)):
1428 if util.samefile(o.join(backupname), o.join(filename)):
1431 o.unlink(backupname)
1429 o.unlink(backupname)
1432 else:
1430 else:
1433 o.rename(backupname, filename, checkambig=True)
1431 o.rename(backupname, filename, checkambig=True)
1434
1432
1435 def clearbackup(self, tr, backupname):
1433 def clearbackup(self, tr, backupname):
1436 '''Clear backup file'''
1434 '''Clear backup file'''
1437 self._opener.unlink(backupname)
1435 self._opener.unlink(backupname)
@@ -1,682 +1,687 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 parsers = policy.importmod('parsers')
21 parsers = policy.importmod('parsers')
22 rustmod = policy.importrust('dirstate')
22 rustmod = policy.importrust('dirstate')
23
23
24 propertycache = util.propertycache
24 propertycache = util.propertycache
25
25
26 dirstatetuple = parsers.dirstatetuple
26 dirstatetuple = parsers.dirstatetuple
27
27
28
28
29 # a special value used internally for `size` if the file come from the other parent
29 # a special value used internally for `size` if the file come from the other parent
30 FROM_P2 = -2
30 FROM_P2 = -2
31
31
32 # a special value used internally for `size` if the file is modified/merged/added
32 # a special value used internally for `size` if the file is modified/merged/added
33 NONNORMAL = -1
33 NONNORMAL = -1
34
34
35 # a special value used internally for `time` if the time is ambigeous
35 # a special value used internally for `time` if the time is ambigeous
36 AMBIGUOUS_TIME = -1
36 AMBIGUOUS_TIME = -1
37
37
38 rangemask = 0x7FFFFFFF
38 rangemask = 0x7FFFFFFF
39
39
40
40
41 class dirstatemap(object):
41 class dirstatemap(object):
42 """Map encapsulating the dirstate's contents.
42 """Map encapsulating the dirstate's contents.
43
43
44 The dirstate contains the following state:
44 The dirstate contains the following state:
45
45
46 - `identity` is the identity of the dirstate file, which can be used to
46 - `identity` is the identity of the dirstate file, which can be used to
47 detect when changes have occurred to the dirstate file.
47 detect when changes have occurred to the dirstate file.
48
48
49 - `parents` is a pair containing the parents of the working copy. The
49 - `parents` is a pair containing the parents of the working copy. The
50 parents are updated by calling `setparents`.
50 parents are updated by calling `setparents`.
51
51
52 - the state map maps filenames to tuples of (state, mode, size, mtime),
52 - the state map maps filenames to tuples of (state, mode, size, mtime),
53 where state is a single character representing 'normal', 'added',
53 where state is a single character representing 'normal', 'added',
54 'removed', or 'merged'. It is read by treating the dirstate as a
54 'removed', or 'merged'. It is read by treating the dirstate as a
55 dict. File state is updated by calling the `addfile`, `removefile` and
55 dict. File state is updated by calling the `addfile`, `removefile` and
56 `dropfile` methods.
56 `dropfile` methods.
57
57
58 - `copymap` maps destination filenames to their source filename.
58 - `copymap` maps destination filenames to their source filename.
59
59
60 The dirstate also provides the following views onto the state:
60 The dirstate also provides the following views onto the state:
61
61
62 - `nonnormalset` is a set of the filenames that have state other
62 - `nonnormalset` is a set of the filenames that have state other
63 than 'normal', or are normal but have an mtime of -1 ('normallookup').
63 than 'normal', or are normal but have an mtime of -1 ('normallookup').
64
64
65 - `otherparentset` is a set of the filenames that are marked as coming
65 - `otherparentset` is a set of the filenames that are marked as coming
66 from the second parent when the dirstate is currently being merged.
66 from the second parent when the dirstate is currently being merged.
67
67
68 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
68 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
69 form that they appear as in the dirstate.
69 form that they appear as in the dirstate.
70
70
71 - `dirfoldmap` is a dict mapping normalized directory names to the
71 - `dirfoldmap` is a dict mapping normalized directory names to the
72 denormalized form that they appear as in the dirstate.
72 denormalized form that they appear as in the dirstate.
73 """
73 """
74
74
75 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
75 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
76 self._ui = ui
76 self._ui = ui
77 self._opener = opener
77 self._opener = opener
78 self._root = root
78 self._root = root
79 self._filename = b'dirstate'
79 self._filename = b'dirstate'
80 self._nodelen = 20
80 self._nodelen = 20
81 self._nodeconstants = nodeconstants
81 self._nodeconstants = nodeconstants
82 assert (
82 assert (
83 not use_dirstate_v2
83 not use_dirstate_v2
84 ), "should have detected unsupported requirement"
84 ), "should have detected unsupported requirement"
85
85
86 self._parents = None
86 self._parents = None
87 self._dirtyparents = False
87 self._dirtyparents = False
88
88
89 # for consistent view between _pl() and _read() invocations
89 # for consistent view between _pl() and _read() invocations
90 self._pendingmode = None
90 self._pendingmode = None
91
91
92 @propertycache
92 @propertycache
93 def _map(self):
93 def _map(self):
94 self._map = {}
94 self._map = {}
95 self.read()
95 self.read()
96 return self._map
96 return self._map
97
97
98 @propertycache
98 @propertycache
99 def copymap(self):
99 def copymap(self):
100 self.copymap = {}
100 self.copymap = {}
101 self._map
101 self._map
102 return self.copymap
102 return self.copymap
103
103
104 def directories(self):
104 def directories(self):
105 # Rust / dirstate-v2 only
105 # Rust / dirstate-v2 only
106 return []
106 return []
107
107
108 def clear(self):
108 def clear(self):
109 self._map.clear()
109 self._map.clear()
110 self.copymap.clear()
110 self.copymap.clear()
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
112 util.clearcachedproperty(self, b"_dirs")
112 util.clearcachedproperty(self, b"_dirs")
113 util.clearcachedproperty(self, b"_alldirs")
113 util.clearcachedproperty(self, b"_alldirs")
114 util.clearcachedproperty(self, b"filefoldmap")
114 util.clearcachedproperty(self, b"filefoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
116 util.clearcachedproperty(self, b"nonnormalset")
116 util.clearcachedproperty(self, b"nonnormalset")
117 util.clearcachedproperty(self, b"otherparentset")
117 util.clearcachedproperty(self, b"otherparentset")
118
118
119 def items(self):
119 def items(self):
120 return pycompat.iteritems(self._map)
120 return pycompat.iteritems(self._map)
121
121
122 # forward for python2,3 compat
122 # forward for python2,3 compat
123 iteritems = items
123 iteritems = items
124
124
125 def __len__(self):
125 def __len__(self):
126 return len(self._map)
126 return len(self._map)
127
127
128 def __iter__(self):
128 def __iter__(self):
129 return iter(self._map)
129 return iter(self._map)
130
130
131 def get(self, key, default=None):
131 def get(self, key, default=None):
132 return self._map.get(key, default)
132 return self._map.get(key, default)
133
133
134 def __contains__(self, key):
134 def __contains__(self, key):
135 return key in self._map
135 return key in self._map
136
136
137 def __getitem__(self, key):
137 def __getitem__(self, key):
138 return self._map[key]
138 return self._map[key]
139
139
140 def keys(self):
140 def keys(self):
141 return self._map.keys()
141 return self._map.keys()
142
142
143 def preload(self):
143 def preload(self):
144 """Loads the underlying data, if it's not already loaded"""
144 """Loads the underlying data, if it's not already loaded"""
145 self._map
145 self._map
146
146
147 def addfile(
147 def addfile(
148 self,
148 self,
149 f,
149 f,
150 mode=0,
150 mode=0,
151 size=None,
151 size=None,
152 mtime=None,
152 mtime=None,
153 added=False,
153 added=False,
154 merged=False,
154 merged=False,
155 from_p2=False,
155 from_p2=False,
156 possibly_dirty=False,
156 possibly_dirty=False,
157 ):
157 ):
158 """Add a tracked file to the dirstate."""
158 """Add a tracked file to the dirstate."""
159 if added:
159 if added:
160 assert not merged
160 assert not merged
161 assert not possibly_dirty
161 assert not possibly_dirty
162 assert not from_p2
162 assert not from_p2
163 state = b'a'
163 state = b'a'
164 size = NONNORMAL
164 size = NONNORMAL
165 mtime = AMBIGUOUS_TIME
165 mtime = AMBIGUOUS_TIME
166 elif merged:
166 elif merged:
167 assert not possibly_dirty
167 assert not possibly_dirty
168 assert not from_p2
168 assert not from_p2
169 state = b'm'
169 state = b'm'
170 size = FROM_P2
170 size = FROM_P2
171 mtime = AMBIGUOUS_TIME
171 mtime = AMBIGUOUS_TIME
172 elif from_p2:
172 elif from_p2:
173 assert not possibly_dirty
173 assert not possibly_dirty
174 state = b'n'
174 state = b'n'
175 size = FROM_P2
175 size = FROM_P2
176 mtime = AMBIGUOUS_TIME
176 mtime = AMBIGUOUS_TIME
177 elif possibly_dirty:
177 elif possibly_dirty:
178 state = b'n'
178 state = b'n'
179 size = NONNORMAL
179 size = NONNORMAL
180 mtime = AMBIGUOUS_TIME
180 mtime = AMBIGUOUS_TIME
181 else:
181 else:
182 assert size != FROM_P2
182 assert size != FROM_P2
183 assert size != NONNORMAL
183 assert size != NONNORMAL
184 state = b'n'
184 state = b'n'
185 size = size & rangemask
185 size = size & rangemask
186 mtime = mtime & rangemask
186 mtime = mtime & rangemask
187 assert state is not None
187 assert state is not None
188 assert size is not None
188 assert size is not None
189 assert mtime is not None
189 assert mtime is not None
190 old_entry = self.get(f)
190 old_entry = self.get(f)
191 if (
191 if (
192 old_entry is None or old_entry.removed
192 old_entry is None or old_entry.removed
193 ) and "_dirs" in self.__dict__:
193 ) and "_dirs" in self.__dict__:
194 self._dirs.addpath(f)
194 self._dirs.addpath(f)
195 if old_entry is None and "_alldirs" in self.__dict__:
195 if old_entry is None and "_alldirs" in self.__dict__:
196 self._alldirs.addpath(f)
196 self._alldirs.addpath(f)
197 self._map[f] = dirstatetuple(state, mode, size, mtime)
197 self._map[f] = dirstatetuple(state, mode, size, mtime)
198 if state != b'n' or mtime == AMBIGUOUS_TIME:
198 if state != b'n' or mtime == AMBIGUOUS_TIME:
199 self.nonnormalset.add(f)
199 self.nonnormalset.add(f)
200 if size == FROM_P2:
200 if size == FROM_P2:
201 self.otherparentset.add(f)
201 self.otherparentset.add(f)
202
202
203 def removefile(self, f, in_merge=False):
203 def removefile(self, f, in_merge=False):
204 """
204 """
205 Mark a file as removed in the dirstate.
205 Mark a file as removed in the dirstate.
206
206
207 The `size` parameter is used to store sentinel values that indicate
207 The `size` parameter is used to store sentinel values that indicate
208 the file's previous state. In the future, we should refactor this
208 the file's previous state. In the future, we should refactor this
209 to be more explicit about what that state is.
209 to be more explicit about what that state is.
210 """
210 """
211 entry = self.get(f)
211 entry = self.get(f)
212 size = 0
212 size = 0
213 if in_merge:
213 if in_merge:
214 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
214 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
215 # during a merge. So I (marmoute) am not sure we need the
215 # during a merge. So I (marmoute) am not sure we need the
216 # conditionnal at all. Adding double checking this with assert
216 # conditionnal at all. Adding double checking this with assert
217 # would be nice.
217 # would be nice.
218 if entry is not None:
218 if entry is not None:
219 # backup the previous state
219 # backup the previous state
220 if entry.merged: # merge
220 if entry.merged: # merge
221 size = NONNORMAL
221 size = NONNORMAL
222 elif entry[0] == b'n' and entry.from_p2:
222 elif entry[0] == b'n' and entry.from_p2:
223 size = FROM_P2
223 size = FROM_P2
224 self.otherparentset.add(f)
224 self.otherparentset.add(f)
225 if size == 0:
225 if size == 0:
226 self.copymap.pop(f, None)
226 self.copymap.pop(f, None)
227
227
228 if entry is not None and entry[0] != b'r' and "_dirs" in self.__dict__:
228 if entry is not None and entry[0] != b'r' and "_dirs" in self.__dict__:
229 self._dirs.delpath(f)
229 self._dirs.delpath(f)
230 if entry is None and "_alldirs" in self.__dict__:
230 if entry is None and "_alldirs" in self.__dict__:
231 self._alldirs.addpath(f)
231 self._alldirs.addpath(f)
232 if "filefoldmap" in self.__dict__:
232 if "filefoldmap" in self.__dict__:
233 normed = util.normcase(f)
233 normed = util.normcase(f)
234 self.filefoldmap.pop(normed, None)
234 self.filefoldmap.pop(normed, None)
235 self._map[f] = dirstatetuple(b'r', 0, size, 0)
235 self._map[f] = dirstatetuple(b'r', 0, size, 0)
236 self.nonnormalset.add(f)
236 self.nonnormalset.add(f)
237
237
238 def dropfile(self, f, oldstate):
238 def dropfile(self, f):
239 """
239 """
240 Remove a file from the dirstate. Returns True if the file was
240 Remove a file from the dirstate. Returns True if the file was
241 previously recorded.
241 previously recorded.
242 """
242 """
243 exists = self._map.pop(f, None) is not None
243 old_entry = self._map.pop(f, None)
244 exists = False
245 oldstate = b'?'
246 if old_entry is not None:
247 exists = True
248 oldstate = old_entry.state
244 if exists:
249 if exists:
245 if oldstate != b"r" and "_dirs" in self.__dict__:
250 if oldstate != b"r" and "_dirs" in self.__dict__:
246 self._dirs.delpath(f)
251 self._dirs.delpath(f)
247 if "_alldirs" in self.__dict__:
252 if "_alldirs" in self.__dict__:
248 self._alldirs.delpath(f)
253 self._alldirs.delpath(f)
249 if "filefoldmap" in self.__dict__:
254 if "filefoldmap" in self.__dict__:
250 normed = util.normcase(f)
255 normed = util.normcase(f)
251 self.filefoldmap.pop(normed, None)
256 self.filefoldmap.pop(normed, None)
252 self.nonnormalset.discard(f)
257 self.nonnormalset.discard(f)
253 return exists
258 return exists
254
259
255 def clearambiguoustimes(self, files, now):
260 def clearambiguoustimes(self, files, now):
256 for f in files:
261 for f in files:
257 e = self.get(f)
262 e = self.get(f)
258 if e is not None and e[0] == b'n' and e[3] == now:
263 if e is not None and e[0] == b'n' and e[3] == now:
259 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
264 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
260 self.nonnormalset.add(f)
265 self.nonnormalset.add(f)
261
266
262 def nonnormalentries(self):
267 def nonnormalentries(self):
263 '''Compute the nonnormal dirstate entries from the dmap'''
268 '''Compute the nonnormal dirstate entries from the dmap'''
264 try:
269 try:
265 return parsers.nonnormalotherparententries(self._map)
270 return parsers.nonnormalotherparententries(self._map)
266 except AttributeError:
271 except AttributeError:
267 nonnorm = set()
272 nonnorm = set()
268 otherparent = set()
273 otherparent = set()
269 for fname, e in pycompat.iteritems(self._map):
274 for fname, e in pycompat.iteritems(self._map):
270 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
275 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
271 nonnorm.add(fname)
276 nonnorm.add(fname)
272 if e[0] == b'n' and e[2] == FROM_P2:
277 if e[0] == b'n' and e[2] == FROM_P2:
273 otherparent.add(fname)
278 otherparent.add(fname)
274 return nonnorm, otherparent
279 return nonnorm, otherparent
275
280
276 @propertycache
281 @propertycache
277 def filefoldmap(self):
282 def filefoldmap(self):
278 """Returns a dictionary mapping normalized case paths to their
283 """Returns a dictionary mapping normalized case paths to their
279 non-normalized versions.
284 non-normalized versions.
280 """
285 """
281 try:
286 try:
282 makefilefoldmap = parsers.make_file_foldmap
287 makefilefoldmap = parsers.make_file_foldmap
283 except AttributeError:
288 except AttributeError:
284 pass
289 pass
285 else:
290 else:
286 return makefilefoldmap(
291 return makefilefoldmap(
287 self._map, util.normcasespec, util.normcasefallback
292 self._map, util.normcasespec, util.normcasefallback
288 )
293 )
289
294
290 f = {}
295 f = {}
291 normcase = util.normcase
296 normcase = util.normcase
292 for name, s in pycompat.iteritems(self._map):
297 for name, s in pycompat.iteritems(self._map):
293 if s[0] != b'r':
298 if s[0] != b'r':
294 f[normcase(name)] = name
299 f[normcase(name)] = name
295 f[b'.'] = b'.' # prevents useless util.fspath() invocation
300 f[b'.'] = b'.' # prevents useless util.fspath() invocation
296 return f
301 return f
297
302
298 def hastrackeddir(self, d):
303 def hastrackeddir(self, d):
299 """
304 """
300 Returns True if the dirstate contains a tracked (not removed) file
305 Returns True if the dirstate contains a tracked (not removed) file
301 in this directory.
306 in this directory.
302 """
307 """
303 return d in self._dirs
308 return d in self._dirs
304
309
305 def hasdir(self, d):
310 def hasdir(self, d):
306 """
311 """
307 Returns True if the dirstate contains a file (tracked or removed)
312 Returns True if the dirstate contains a file (tracked or removed)
308 in this directory.
313 in this directory.
309 """
314 """
310 return d in self._alldirs
315 return d in self._alldirs
311
316
312 @propertycache
317 @propertycache
313 def _dirs(self):
318 def _dirs(self):
314 return pathutil.dirs(self._map, b'r')
319 return pathutil.dirs(self._map, b'r')
315
320
316 @propertycache
321 @propertycache
317 def _alldirs(self):
322 def _alldirs(self):
318 return pathutil.dirs(self._map)
323 return pathutil.dirs(self._map)
319
324
320 def _opendirstatefile(self):
325 def _opendirstatefile(self):
321 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
326 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
322 if self._pendingmode is not None and self._pendingmode != mode:
327 if self._pendingmode is not None and self._pendingmode != mode:
323 fp.close()
328 fp.close()
324 raise error.Abort(
329 raise error.Abort(
325 _(b'working directory state may be changed parallelly')
330 _(b'working directory state may be changed parallelly')
326 )
331 )
327 self._pendingmode = mode
332 self._pendingmode = mode
328 return fp
333 return fp
329
334
330 def parents(self):
335 def parents(self):
331 if not self._parents:
336 if not self._parents:
332 try:
337 try:
333 fp = self._opendirstatefile()
338 fp = self._opendirstatefile()
334 st = fp.read(2 * self._nodelen)
339 st = fp.read(2 * self._nodelen)
335 fp.close()
340 fp.close()
336 except IOError as err:
341 except IOError as err:
337 if err.errno != errno.ENOENT:
342 if err.errno != errno.ENOENT:
338 raise
343 raise
339 # File doesn't exist, so the current state is empty
344 # File doesn't exist, so the current state is empty
340 st = b''
345 st = b''
341
346
342 l = len(st)
347 l = len(st)
343 if l == self._nodelen * 2:
348 if l == self._nodelen * 2:
344 self._parents = (
349 self._parents = (
345 st[: self._nodelen],
350 st[: self._nodelen],
346 st[self._nodelen : 2 * self._nodelen],
351 st[self._nodelen : 2 * self._nodelen],
347 )
352 )
348 elif l == 0:
353 elif l == 0:
349 self._parents = (
354 self._parents = (
350 self._nodeconstants.nullid,
355 self._nodeconstants.nullid,
351 self._nodeconstants.nullid,
356 self._nodeconstants.nullid,
352 )
357 )
353 else:
358 else:
354 raise error.Abort(
359 raise error.Abort(
355 _(b'working directory state appears damaged!')
360 _(b'working directory state appears damaged!')
356 )
361 )
357
362
358 return self._parents
363 return self._parents
359
364
360 def setparents(self, p1, p2):
365 def setparents(self, p1, p2):
361 self._parents = (p1, p2)
366 self._parents = (p1, p2)
362 self._dirtyparents = True
367 self._dirtyparents = True
363
368
364 def read(self):
369 def read(self):
365 # ignore HG_PENDING because identity is used only for writing
370 # ignore HG_PENDING because identity is used only for writing
366 self.identity = util.filestat.frompath(
371 self.identity = util.filestat.frompath(
367 self._opener.join(self._filename)
372 self._opener.join(self._filename)
368 )
373 )
369
374
370 try:
375 try:
371 fp = self._opendirstatefile()
376 fp = self._opendirstatefile()
372 try:
377 try:
373 st = fp.read()
378 st = fp.read()
374 finally:
379 finally:
375 fp.close()
380 fp.close()
376 except IOError as err:
381 except IOError as err:
377 if err.errno != errno.ENOENT:
382 if err.errno != errno.ENOENT:
378 raise
383 raise
379 return
384 return
380 if not st:
385 if not st:
381 return
386 return
382
387
383 if util.safehasattr(parsers, b'dict_new_presized'):
388 if util.safehasattr(parsers, b'dict_new_presized'):
384 # Make an estimate of the number of files in the dirstate based on
389 # Make an estimate of the number of files in the dirstate based on
385 # its size. This trades wasting some memory for avoiding costly
390 # its size. This trades wasting some memory for avoiding costly
386 # resizes. Each entry have a prefix of 17 bytes followed by one or
391 # resizes. Each entry have a prefix of 17 bytes followed by one or
387 # two path names. Studies on various large-scale real-world repositories
392 # two path names. Studies on various large-scale real-world repositories
388 # found 54 bytes a reasonable upper limit for the average path names.
393 # found 54 bytes a reasonable upper limit for the average path names.
389 # Copy entries are ignored for the sake of this estimate.
394 # Copy entries are ignored for the sake of this estimate.
390 self._map = parsers.dict_new_presized(len(st) // 71)
395 self._map = parsers.dict_new_presized(len(st) // 71)
391
396
392 # Python's garbage collector triggers a GC each time a certain number
397 # Python's garbage collector triggers a GC each time a certain number
393 # of container objects (the number being defined by
398 # of container objects (the number being defined by
394 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
399 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
395 # for each file in the dirstate. The C version then immediately marks
400 # for each file in the dirstate. The C version then immediately marks
396 # them as not to be tracked by the collector. However, this has no
401 # them as not to be tracked by the collector. However, this has no
397 # effect on when GCs are triggered, only on what objects the GC looks
402 # effect on when GCs are triggered, only on what objects the GC looks
398 # into. This means that O(number of files) GCs are unavoidable.
403 # into. This means that O(number of files) GCs are unavoidable.
399 # Depending on when in the process's lifetime the dirstate is parsed,
404 # Depending on when in the process's lifetime the dirstate is parsed,
400 # this can get very expensive. As a workaround, disable GC while
405 # this can get very expensive. As a workaround, disable GC while
401 # parsing the dirstate.
406 # parsing the dirstate.
402 #
407 #
403 # (we cannot decorate the function directly since it is in a C module)
408 # (we cannot decorate the function directly since it is in a C module)
404 parse_dirstate = util.nogc(parsers.parse_dirstate)
409 parse_dirstate = util.nogc(parsers.parse_dirstate)
405 p = parse_dirstate(self._map, self.copymap, st)
410 p = parse_dirstate(self._map, self.copymap, st)
406 if not self._dirtyparents:
411 if not self._dirtyparents:
407 self.setparents(*p)
412 self.setparents(*p)
408
413
409 # Avoid excess attribute lookups by fast pathing certain checks
414 # Avoid excess attribute lookups by fast pathing certain checks
410 self.__contains__ = self._map.__contains__
415 self.__contains__ = self._map.__contains__
411 self.__getitem__ = self._map.__getitem__
416 self.__getitem__ = self._map.__getitem__
412 self.get = self._map.get
417 self.get = self._map.get
413
418
414 def write(self, st, now):
419 def write(self, st, now):
415 st.write(
420 st.write(
416 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
421 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
417 )
422 )
418 st.close()
423 st.close()
419 self._dirtyparents = False
424 self._dirtyparents = False
420 self.nonnormalset, self.otherparentset = self.nonnormalentries()
425 self.nonnormalset, self.otherparentset = self.nonnormalentries()
421
426
422 @propertycache
427 @propertycache
423 def nonnormalset(self):
428 def nonnormalset(self):
424 nonnorm, otherparents = self.nonnormalentries()
429 nonnorm, otherparents = self.nonnormalentries()
425 self.otherparentset = otherparents
430 self.otherparentset = otherparents
426 return nonnorm
431 return nonnorm
427
432
428 @propertycache
433 @propertycache
429 def otherparentset(self):
434 def otherparentset(self):
430 nonnorm, otherparents = self.nonnormalentries()
435 nonnorm, otherparents = self.nonnormalentries()
431 self.nonnormalset = nonnorm
436 self.nonnormalset = nonnorm
432 return otherparents
437 return otherparents
433
438
434 def non_normal_or_other_parent_paths(self):
439 def non_normal_or_other_parent_paths(self):
435 return self.nonnormalset.union(self.otherparentset)
440 return self.nonnormalset.union(self.otherparentset)
436
441
437 @propertycache
442 @propertycache
438 def identity(self):
443 def identity(self):
439 self._map
444 self._map
440 return self.identity
445 return self.identity
441
446
442 @propertycache
447 @propertycache
443 def dirfoldmap(self):
448 def dirfoldmap(self):
444 f = {}
449 f = {}
445 normcase = util.normcase
450 normcase = util.normcase
446 for name in self._dirs:
451 for name in self._dirs:
447 f[normcase(name)] = name
452 f[normcase(name)] = name
448 return f
453 return f
449
454
450
455
451 if rustmod is not None:
456 if rustmod is not None:
452
457
453 class dirstatemap(object):
458 class dirstatemap(object):
454 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
459 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
455 self._use_dirstate_v2 = use_dirstate_v2
460 self._use_dirstate_v2 = use_dirstate_v2
456 self._nodeconstants = nodeconstants
461 self._nodeconstants = nodeconstants
457 self._ui = ui
462 self._ui = ui
458 self._opener = opener
463 self._opener = opener
459 self._root = root
464 self._root = root
460 self._filename = b'dirstate'
465 self._filename = b'dirstate'
461 self._nodelen = 20 # Also update Rust code when changing this!
466 self._nodelen = 20 # Also update Rust code when changing this!
462 self._parents = None
467 self._parents = None
463 self._dirtyparents = False
468 self._dirtyparents = False
464
469
465 # for consistent view between _pl() and _read() invocations
470 # for consistent view between _pl() and _read() invocations
466 self._pendingmode = None
471 self._pendingmode = None
467
472
468 self._use_dirstate_tree = self._ui.configbool(
473 self._use_dirstate_tree = self._ui.configbool(
469 b"experimental",
474 b"experimental",
470 b"dirstate-tree.in-memory",
475 b"dirstate-tree.in-memory",
471 False,
476 False,
472 )
477 )
473
478
474 def addfile(
479 def addfile(
475 self,
480 self,
476 f,
481 f,
477 mode=0,
482 mode=0,
478 size=None,
483 size=None,
479 mtime=None,
484 mtime=None,
480 added=False,
485 added=False,
481 merged=False,
486 merged=False,
482 from_p2=False,
487 from_p2=False,
483 possibly_dirty=False,
488 possibly_dirty=False,
484 ):
489 ):
485 return self._rustmap.addfile(
490 return self._rustmap.addfile(
486 f,
491 f,
487 mode,
492 mode,
488 size,
493 size,
489 mtime,
494 mtime,
490 added,
495 added,
491 merged,
496 merged,
492 from_p2,
497 from_p2,
493 possibly_dirty,
498 possibly_dirty,
494 )
499 )
495
500
496 def removefile(self, *args, **kwargs):
501 def removefile(self, *args, **kwargs):
497 return self._rustmap.removefile(*args, **kwargs)
502 return self._rustmap.removefile(*args, **kwargs)
498
503
499 def dropfile(self, *args, **kwargs):
504 def dropfile(self, *args, **kwargs):
500 return self._rustmap.dropfile(*args, **kwargs)
505 return self._rustmap.dropfile(*args, **kwargs)
501
506
502 def clearambiguoustimes(self, *args, **kwargs):
507 def clearambiguoustimes(self, *args, **kwargs):
503 return self._rustmap.clearambiguoustimes(*args, **kwargs)
508 return self._rustmap.clearambiguoustimes(*args, **kwargs)
504
509
505 def nonnormalentries(self):
510 def nonnormalentries(self):
506 return self._rustmap.nonnormalentries()
511 return self._rustmap.nonnormalentries()
507
512
508 def get(self, *args, **kwargs):
513 def get(self, *args, **kwargs):
509 return self._rustmap.get(*args, **kwargs)
514 return self._rustmap.get(*args, **kwargs)
510
515
511 @property
516 @property
512 def copymap(self):
517 def copymap(self):
513 return self._rustmap.copymap()
518 return self._rustmap.copymap()
514
519
515 def directories(self):
520 def directories(self):
516 return self._rustmap.directories()
521 return self._rustmap.directories()
517
522
518 def preload(self):
523 def preload(self):
519 self._rustmap
524 self._rustmap
520
525
521 def clear(self):
526 def clear(self):
522 self._rustmap.clear()
527 self._rustmap.clear()
523 self.setparents(
528 self.setparents(
524 self._nodeconstants.nullid, self._nodeconstants.nullid
529 self._nodeconstants.nullid, self._nodeconstants.nullid
525 )
530 )
526 util.clearcachedproperty(self, b"_dirs")
531 util.clearcachedproperty(self, b"_dirs")
527 util.clearcachedproperty(self, b"_alldirs")
532 util.clearcachedproperty(self, b"_alldirs")
528 util.clearcachedproperty(self, b"dirfoldmap")
533 util.clearcachedproperty(self, b"dirfoldmap")
529
534
530 def items(self):
535 def items(self):
531 return self._rustmap.items()
536 return self._rustmap.items()
532
537
533 def keys(self):
538 def keys(self):
534 return iter(self._rustmap)
539 return iter(self._rustmap)
535
540
536 def __contains__(self, key):
541 def __contains__(self, key):
537 return key in self._rustmap
542 return key in self._rustmap
538
543
539 def __getitem__(self, item):
544 def __getitem__(self, item):
540 return self._rustmap[item]
545 return self._rustmap[item]
541
546
542 def __len__(self):
547 def __len__(self):
543 return len(self._rustmap)
548 return len(self._rustmap)
544
549
545 def __iter__(self):
550 def __iter__(self):
546 return iter(self._rustmap)
551 return iter(self._rustmap)
547
552
548 # forward for python2,3 compat
553 # forward for python2,3 compat
549 iteritems = items
554 iteritems = items
550
555
551 def _opendirstatefile(self):
556 def _opendirstatefile(self):
552 fp, mode = txnutil.trypending(
557 fp, mode = txnutil.trypending(
553 self._root, self._opener, self._filename
558 self._root, self._opener, self._filename
554 )
559 )
555 if self._pendingmode is not None and self._pendingmode != mode:
560 if self._pendingmode is not None and self._pendingmode != mode:
556 fp.close()
561 fp.close()
557 raise error.Abort(
562 raise error.Abort(
558 _(b'working directory state may be changed parallelly')
563 _(b'working directory state may be changed parallelly')
559 )
564 )
560 self._pendingmode = mode
565 self._pendingmode = mode
561 return fp
566 return fp
562
567
563 def setparents(self, p1, p2):
568 def setparents(self, p1, p2):
564 self._parents = (p1, p2)
569 self._parents = (p1, p2)
565 self._dirtyparents = True
570 self._dirtyparents = True
566
571
567 def parents(self):
572 def parents(self):
568 if not self._parents:
573 if not self._parents:
569 if self._use_dirstate_v2:
574 if self._use_dirstate_v2:
570 offset = len(rustmod.V2_FORMAT_MARKER)
575 offset = len(rustmod.V2_FORMAT_MARKER)
571 else:
576 else:
572 offset = 0
577 offset = 0
573 read_len = offset + self._nodelen * 2
578 read_len = offset + self._nodelen * 2
574 try:
579 try:
575 fp = self._opendirstatefile()
580 fp = self._opendirstatefile()
576 st = fp.read(read_len)
581 st = fp.read(read_len)
577 fp.close()
582 fp.close()
578 except IOError as err:
583 except IOError as err:
579 if err.errno != errno.ENOENT:
584 if err.errno != errno.ENOENT:
580 raise
585 raise
581 # File doesn't exist, so the current state is empty
586 # File doesn't exist, so the current state is empty
582 st = b''
587 st = b''
583
588
584 l = len(st)
589 l = len(st)
585 if l == read_len:
590 if l == read_len:
586 st = st[offset:]
591 st = st[offset:]
587 self._parents = (
592 self._parents = (
588 st[: self._nodelen],
593 st[: self._nodelen],
589 st[self._nodelen : 2 * self._nodelen],
594 st[self._nodelen : 2 * self._nodelen],
590 )
595 )
591 elif l == 0:
596 elif l == 0:
592 self._parents = (
597 self._parents = (
593 self._nodeconstants.nullid,
598 self._nodeconstants.nullid,
594 self._nodeconstants.nullid,
599 self._nodeconstants.nullid,
595 )
600 )
596 else:
601 else:
597 raise error.Abort(
602 raise error.Abort(
598 _(b'working directory state appears damaged!')
603 _(b'working directory state appears damaged!')
599 )
604 )
600
605
601 return self._parents
606 return self._parents
602
607
603 @propertycache
608 @propertycache
604 def _rustmap(self):
609 def _rustmap(self):
605 """
610 """
606 Fills the Dirstatemap when called.
611 Fills the Dirstatemap when called.
607 """
612 """
608 # ignore HG_PENDING because identity is used only for writing
613 # ignore HG_PENDING because identity is used only for writing
609 self.identity = util.filestat.frompath(
614 self.identity = util.filestat.frompath(
610 self._opener.join(self._filename)
615 self._opener.join(self._filename)
611 )
616 )
612
617
613 try:
618 try:
614 fp = self._opendirstatefile()
619 fp = self._opendirstatefile()
615 try:
620 try:
616 st = fp.read()
621 st = fp.read()
617 finally:
622 finally:
618 fp.close()
623 fp.close()
619 except IOError as err:
624 except IOError as err:
620 if err.errno != errno.ENOENT:
625 if err.errno != errno.ENOENT:
621 raise
626 raise
622 st = b''
627 st = b''
623
628
624 self._rustmap, parents = rustmod.DirstateMap.new(
629 self._rustmap, parents = rustmod.DirstateMap.new(
625 self._use_dirstate_tree, self._use_dirstate_v2, st
630 self._use_dirstate_tree, self._use_dirstate_v2, st
626 )
631 )
627
632
628 if parents and not self._dirtyparents:
633 if parents and not self._dirtyparents:
629 self.setparents(*parents)
634 self.setparents(*parents)
630
635
631 self.__contains__ = self._rustmap.__contains__
636 self.__contains__ = self._rustmap.__contains__
632 self.__getitem__ = self._rustmap.__getitem__
637 self.__getitem__ = self._rustmap.__getitem__
633 self.get = self._rustmap.get
638 self.get = self._rustmap.get
634 return self._rustmap
639 return self._rustmap
635
640
636 def write(self, st, now):
641 def write(self, st, now):
637 parents = self.parents()
642 parents = self.parents()
638 packed = self._rustmap.write(
643 packed = self._rustmap.write(
639 self._use_dirstate_v2, parents[0], parents[1], now
644 self._use_dirstate_v2, parents[0], parents[1], now
640 )
645 )
641 st.write(packed)
646 st.write(packed)
642 st.close()
647 st.close()
643 self._dirtyparents = False
648 self._dirtyparents = False
644
649
645 @propertycache
650 @propertycache
646 def filefoldmap(self):
651 def filefoldmap(self):
647 """Returns a dictionary mapping normalized case paths to their
652 """Returns a dictionary mapping normalized case paths to their
648 non-normalized versions.
653 non-normalized versions.
649 """
654 """
650 return self._rustmap.filefoldmapasdict()
655 return self._rustmap.filefoldmapasdict()
651
656
652 def hastrackeddir(self, d):
657 def hastrackeddir(self, d):
653 return self._rustmap.hastrackeddir(d)
658 return self._rustmap.hastrackeddir(d)
654
659
655 def hasdir(self, d):
660 def hasdir(self, d):
656 return self._rustmap.hasdir(d)
661 return self._rustmap.hasdir(d)
657
662
658 @propertycache
663 @propertycache
659 def identity(self):
664 def identity(self):
660 self._rustmap
665 self._rustmap
661 return self.identity
666 return self.identity
662
667
663 @property
668 @property
664 def nonnormalset(self):
669 def nonnormalset(self):
665 nonnorm = self._rustmap.non_normal_entries()
670 nonnorm = self._rustmap.non_normal_entries()
666 return nonnorm
671 return nonnorm
667
672
668 @propertycache
673 @propertycache
669 def otherparentset(self):
674 def otherparentset(self):
670 otherparents = self._rustmap.other_parent_entries()
675 otherparents = self._rustmap.other_parent_entries()
671 return otherparents
676 return otherparents
672
677
673 def non_normal_or_other_parent_paths(self):
678 def non_normal_or_other_parent_paths(self):
674 return self._rustmap.non_normal_or_other_parent_paths()
679 return self._rustmap.non_normal_or_other_parent_paths()
675
680
676 @propertycache
681 @propertycache
677 def dirfoldmap(self):
682 def dirfoldmap(self):
678 f = {}
683 f = {}
679 normcase = util.normcase
684 normcase = util.normcase
680 for name, _pseudo_entry in self.directories():
685 for name, _pseudo_entry in self.directories():
681 f[normcase(name)] = name
686 f[normcase(name)] = name
682 return f
687 return f
@@ -1,478 +1,481 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::dirstate::parsers::Timestamp;
8 use crate::dirstate::parsers::Timestamp;
9 use crate::{
9 use crate::{
10 dirstate::EntryState,
10 dirstate::EntryState,
11 dirstate::MTIME_UNSET,
11 dirstate::MTIME_UNSET,
12 dirstate::SIZE_FROM_OTHER_PARENT,
12 dirstate::SIZE_FROM_OTHER_PARENT,
13 dirstate::SIZE_NON_NORMAL,
13 dirstate::SIZE_NON_NORMAL,
14 dirstate::V1_RANGEMASK,
14 dirstate::V1_RANGEMASK,
15 pack_dirstate, parse_dirstate,
15 pack_dirstate, parse_dirstate,
16 utils::hg_path::{HgPath, HgPathBuf},
16 utils::hg_path::{HgPath, HgPathBuf},
17 CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateParents,
17 CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateParents,
18 StateMap,
18 StateMap,
19 };
19 };
20 use micro_timer::timed;
20 use micro_timer::timed;
21 use std::collections::HashSet;
21 use std::collections::HashSet;
22 use std::iter::FromIterator;
22 use std::iter::FromIterator;
23 use std::ops::Deref;
23 use std::ops::Deref;
24
24
25 #[derive(Default)]
25 #[derive(Default)]
26 pub struct DirstateMap {
26 pub struct DirstateMap {
27 state_map: StateMap,
27 state_map: StateMap,
28 pub copy_map: CopyMap,
28 pub copy_map: CopyMap,
29 pub dirs: Option<DirsMultiset>,
29 pub dirs: Option<DirsMultiset>,
30 pub all_dirs: Option<DirsMultiset>,
30 pub all_dirs: Option<DirsMultiset>,
31 non_normal_set: Option<HashSet<HgPathBuf>>,
31 non_normal_set: Option<HashSet<HgPathBuf>>,
32 other_parent_set: Option<HashSet<HgPathBuf>>,
32 other_parent_set: Option<HashSet<HgPathBuf>>,
33 }
33 }
34
34
35 /// Should only really be used in python interface code, for clarity
35 /// Should only really be used in python interface code, for clarity
36 impl Deref for DirstateMap {
36 impl Deref for DirstateMap {
37 type Target = StateMap;
37 type Target = StateMap;
38
38
39 fn deref(&self) -> &Self::Target {
39 fn deref(&self) -> &Self::Target {
40 &self.state_map
40 &self.state_map
41 }
41 }
42 }
42 }
43
43
44 impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap {
44 impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap {
45 fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>(
45 fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>(
46 iter: I,
46 iter: I,
47 ) -> Self {
47 ) -> Self {
48 Self {
48 Self {
49 state_map: iter.into_iter().collect(),
49 state_map: iter.into_iter().collect(),
50 ..Self::default()
50 ..Self::default()
51 }
51 }
52 }
52 }
53 }
53 }
54
54
55 impl DirstateMap {
55 impl DirstateMap {
56 pub fn new() -> Self {
56 pub fn new() -> Self {
57 Self::default()
57 Self::default()
58 }
58 }
59
59
60 pub fn clear(&mut self) {
60 pub fn clear(&mut self) {
61 self.state_map = StateMap::default();
61 self.state_map = StateMap::default();
62 self.copy_map.clear();
62 self.copy_map.clear();
63 self.non_normal_set = None;
63 self.non_normal_set = None;
64 self.other_parent_set = None;
64 self.other_parent_set = None;
65 }
65 }
66
66
67 /// Add a tracked file to the dirstate
67 /// Add a tracked file to the dirstate
68 pub fn add_file(
68 pub fn add_file(
69 &mut self,
69 &mut self,
70 filename: &HgPath,
70 filename: &HgPath,
71 entry: DirstateEntry,
71 entry: DirstateEntry,
72 // XXX once the dust settle this should probably become an enum
72 // XXX once the dust settle this should probably become an enum
73 added: bool,
73 added: bool,
74 merged: bool,
74 merged: bool,
75 from_p2: bool,
75 from_p2: bool,
76 possibly_dirty: bool,
76 possibly_dirty: bool,
77 ) -> Result<(), DirstateError> {
77 ) -> Result<(), DirstateError> {
78 let mut entry = entry;
78 let mut entry = entry;
79 if added {
79 if added {
80 assert!(!merged);
80 assert!(!merged);
81 assert!(!possibly_dirty);
81 assert!(!possibly_dirty);
82 assert!(!from_p2);
82 assert!(!from_p2);
83 entry.state = EntryState::Added;
83 entry.state = EntryState::Added;
84 entry.size = SIZE_NON_NORMAL;
84 entry.size = SIZE_NON_NORMAL;
85 entry.mtime = MTIME_UNSET;
85 entry.mtime = MTIME_UNSET;
86 } else if merged {
86 } else if merged {
87 assert!(!possibly_dirty);
87 assert!(!possibly_dirty);
88 assert!(!from_p2);
88 assert!(!from_p2);
89 entry.state = EntryState::Merged;
89 entry.state = EntryState::Merged;
90 entry.size = SIZE_FROM_OTHER_PARENT;
90 entry.size = SIZE_FROM_OTHER_PARENT;
91 entry.mtime = MTIME_UNSET;
91 entry.mtime = MTIME_UNSET;
92 } else if from_p2 {
92 } else if from_p2 {
93 assert!(!possibly_dirty);
93 assert!(!possibly_dirty);
94 entry.state = EntryState::Normal;
94 entry.state = EntryState::Normal;
95 entry.size = SIZE_FROM_OTHER_PARENT;
95 entry.size = SIZE_FROM_OTHER_PARENT;
96 entry.mtime = MTIME_UNSET;
96 entry.mtime = MTIME_UNSET;
97 } else if possibly_dirty {
97 } else if possibly_dirty {
98 entry.state = EntryState::Normal;
98 entry.state = EntryState::Normal;
99 entry.size = SIZE_NON_NORMAL;
99 entry.size = SIZE_NON_NORMAL;
100 entry.mtime = MTIME_UNSET;
100 entry.mtime = MTIME_UNSET;
101 } else {
101 } else {
102 entry.state = EntryState::Normal;
102 entry.state = EntryState::Normal;
103 entry.size = entry.size & V1_RANGEMASK;
103 entry.size = entry.size & V1_RANGEMASK;
104 entry.mtime = entry.mtime & V1_RANGEMASK;
104 entry.mtime = entry.mtime & V1_RANGEMASK;
105 }
105 }
106 let old_state = match self.get(filename) {
106 let old_state = match self.get(filename) {
107 Some(e) => e.state,
107 Some(e) => e.state,
108 None => EntryState::Unknown,
108 None => EntryState::Unknown,
109 };
109 };
110 if old_state == EntryState::Unknown || old_state == EntryState::Removed
110 if old_state == EntryState::Unknown || old_state == EntryState::Removed
111 {
111 {
112 if let Some(ref mut dirs) = self.dirs {
112 if let Some(ref mut dirs) = self.dirs {
113 dirs.add_path(filename)?;
113 dirs.add_path(filename)?;
114 }
114 }
115 }
115 }
116 if old_state == EntryState::Unknown {
116 if old_state == EntryState::Unknown {
117 if let Some(ref mut all_dirs) = self.all_dirs {
117 if let Some(ref mut all_dirs) = self.all_dirs {
118 all_dirs.add_path(filename)?;
118 all_dirs.add_path(filename)?;
119 }
119 }
120 }
120 }
121 self.state_map.insert(filename.to_owned(), entry.to_owned());
121 self.state_map.insert(filename.to_owned(), entry.to_owned());
122
122
123 if entry.is_non_normal() {
123 if entry.is_non_normal() {
124 self.get_non_normal_other_parent_entries()
124 self.get_non_normal_other_parent_entries()
125 .0
125 .0
126 .insert(filename.to_owned());
126 .insert(filename.to_owned());
127 }
127 }
128
128
129 if entry.is_from_other_parent() {
129 if entry.is_from_other_parent() {
130 self.get_non_normal_other_parent_entries()
130 self.get_non_normal_other_parent_entries()
131 .1
131 .1
132 .insert(filename.to_owned());
132 .insert(filename.to_owned());
133 }
133 }
134 Ok(())
134 Ok(())
135 }
135 }
136
136
137 /// Mark a file as removed in the dirstate.
137 /// Mark a file as removed in the dirstate.
138 ///
138 ///
139 /// The `size` parameter is used to store sentinel values that indicate
139 /// The `size` parameter is used to store sentinel values that indicate
140 /// the file's previous state. In the future, we should refactor this
140 /// the file's previous state. In the future, we should refactor this
141 /// to be more explicit about what that state is.
141 /// to be more explicit about what that state is.
142 pub fn remove_file(
142 pub fn remove_file(
143 &mut self,
143 &mut self,
144 filename: &HgPath,
144 filename: &HgPath,
145 in_merge: bool,
145 in_merge: bool,
146 ) -> Result<(), DirstateError> {
146 ) -> Result<(), DirstateError> {
147 let old_entry_opt = self.get(filename);
147 let old_entry_opt = self.get(filename);
148 let old_state = match old_entry_opt {
148 let old_state = match old_entry_opt {
149 Some(e) => e.state,
149 Some(e) => e.state,
150 None => EntryState::Unknown,
150 None => EntryState::Unknown,
151 };
151 };
152 let mut size = 0;
152 let mut size = 0;
153 if in_merge {
153 if in_merge {
154 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
154 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
155 // during a merge. So I (marmoute) am not sure we need the
155 // during a merge. So I (marmoute) am not sure we need the
156 // conditionnal at all. Adding double checking this with assert
156 // conditionnal at all. Adding double checking this with assert
157 // would be nice.
157 // would be nice.
158 if let Some(old_entry) = old_entry_opt {
158 if let Some(old_entry) = old_entry_opt {
159 // backup the previous state
159 // backup the previous state
160 if old_entry.state == EntryState::Merged {
160 if old_entry.state == EntryState::Merged {
161 size = SIZE_NON_NORMAL;
161 size = SIZE_NON_NORMAL;
162 } else if old_entry.state == EntryState::Normal
162 } else if old_entry.state == EntryState::Normal
163 && old_entry.size == SIZE_FROM_OTHER_PARENT
163 && old_entry.size == SIZE_FROM_OTHER_PARENT
164 {
164 {
165 // other parent
165 // other parent
166 size = SIZE_FROM_OTHER_PARENT;
166 size = SIZE_FROM_OTHER_PARENT;
167 self.get_non_normal_other_parent_entries()
167 self.get_non_normal_other_parent_entries()
168 .1
168 .1
169 .insert(filename.to_owned());
169 .insert(filename.to_owned());
170 }
170 }
171 }
171 }
172 }
172 }
173 if old_state != EntryState::Unknown && old_state != EntryState::Removed
173 if old_state != EntryState::Unknown && old_state != EntryState::Removed
174 {
174 {
175 if let Some(ref mut dirs) = self.dirs {
175 if let Some(ref mut dirs) = self.dirs {
176 dirs.delete_path(filename)?;
176 dirs.delete_path(filename)?;
177 }
177 }
178 }
178 }
179 if old_state == EntryState::Unknown {
179 if old_state == EntryState::Unknown {
180 if let Some(ref mut all_dirs) = self.all_dirs {
180 if let Some(ref mut all_dirs) = self.all_dirs {
181 all_dirs.add_path(filename)?;
181 all_dirs.add_path(filename)?;
182 }
182 }
183 }
183 }
184 if size == 0 {
184 if size == 0 {
185 self.copy_map.remove(filename);
185 self.copy_map.remove(filename);
186 }
186 }
187
187
188 self.state_map.insert(
188 self.state_map.insert(
189 filename.to_owned(),
189 filename.to_owned(),
190 DirstateEntry {
190 DirstateEntry {
191 state: EntryState::Removed,
191 state: EntryState::Removed,
192 mode: 0,
192 mode: 0,
193 size,
193 size,
194 mtime: 0,
194 mtime: 0,
195 },
195 },
196 );
196 );
197 self.get_non_normal_other_parent_entries()
197 self.get_non_normal_other_parent_entries()
198 .0
198 .0
199 .insert(filename.to_owned());
199 .insert(filename.to_owned());
200 Ok(())
200 Ok(())
201 }
201 }
202
202
203 /// Remove a file from the dirstate.
203 /// Remove a file from the dirstate.
204 /// Returns `true` if the file was previously recorded.
204 /// Returns `true` if the file was previously recorded.
205 pub fn drop_file(
205 pub fn drop_file(
206 &mut self,
206 &mut self,
207 filename: &HgPath,
207 filename: &HgPath,
208 old_state: EntryState,
209 ) -> Result<bool, DirstateError> {
208 ) -> Result<bool, DirstateError> {
209 let old_state = match self.get(filename) {
210 Some(e) => e.state,
211 None => EntryState::Unknown,
212 };
210 let exists = self.state_map.remove(filename).is_some();
213 let exists = self.state_map.remove(filename).is_some();
211
214
212 if exists {
215 if exists {
213 if old_state != EntryState::Removed {
216 if old_state != EntryState::Removed {
214 if let Some(ref mut dirs) = self.dirs {
217 if let Some(ref mut dirs) = self.dirs {
215 dirs.delete_path(filename)?;
218 dirs.delete_path(filename)?;
216 }
219 }
217 }
220 }
218 if let Some(ref mut all_dirs) = self.all_dirs {
221 if let Some(ref mut all_dirs) = self.all_dirs {
219 all_dirs.delete_path(filename)?;
222 all_dirs.delete_path(filename)?;
220 }
223 }
221 }
224 }
222 self.get_non_normal_other_parent_entries()
225 self.get_non_normal_other_parent_entries()
223 .0
226 .0
224 .remove(filename);
227 .remove(filename);
225
228
226 Ok(exists)
229 Ok(exists)
227 }
230 }
228
231
229 pub fn clear_ambiguous_times(
232 pub fn clear_ambiguous_times(
230 &mut self,
233 &mut self,
231 filenames: Vec<HgPathBuf>,
234 filenames: Vec<HgPathBuf>,
232 now: i32,
235 now: i32,
233 ) {
236 ) {
234 for filename in filenames {
237 for filename in filenames {
235 if let Some(entry) = self.state_map.get_mut(&filename) {
238 if let Some(entry) = self.state_map.get_mut(&filename) {
236 if entry.clear_ambiguous_mtime(now) {
239 if entry.clear_ambiguous_mtime(now) {
237 self.get_non_normal_other_parent_entries()
240 self.get_non_normal_other_parent_entries()
238 .0
241 .0
239 .insert(filename.to_owned());
242 .insert(filename.to_owned());
240 }
243 }
241 }
244 }
242 }
245 }
243 }
246 }
244
247
245 pub fn non_normal_entries_remove(&mut self, key: impl AsRef<HgPath>) {
248 pub fn non_normal_entries_remove(&mut self, key: impl AsRef<HgPath>) {
246 self.get_non_normal_other_parent_entries()
249 self.get_non_normal_other_parent_entries()
247 .0
250 .0
248 .remove(key.as_ref());
251 .remove(key.as_ref());
249 }
252 }
250
253
251 pub fn non_normal_entries_union(
254 pub fn non_normal_entries_union(
252 &mut self,
255 &mut self,
253 other: HashSet<HgPathBuf>,
256 other: HashSet<HgPathBuf>,
254 ) -> Vec<HgPathBuf> {
257 ) -> Vec<HgPathBuf> {
255 self.get_non_normal_other_parent_entries()
258 self.get_non_normal_other_parent_entries()
256 .0
259 .0
257 .union(&other)
260 .union(&other)
258 .map(ToOwned::to_owned)
261 .map(ToOwned::to_owned)
259 .collect()
262 .collect()
260 }
263 }
261
264
262 pub fn get_non_normal_other_parent_entries(
265 pub fn get_non_normal_other_parent_entries(
263 &mut self,
266 &mut self,
264 ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
267 ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
265 self.set_non_normal_other_parent_entries(false);
268 self.set_non_normal_other_parent_entries(false);
266 (
269 (
267 self.non_normal_set.as_mut().unwrap(),
270 self.non_normal_set.as_mut().unwrap(),
268 self.other_parent_set.as_mut().unwrap(),
271 self.other_parent_set.as_mut().unwrap(),
269 )
272 )
270 }
273 }
271
274
272 /// Useful to get immutable references to those sets in contexts where
275 /// Useful to get immutable references to those sets in contexts where
273 /// you only have an immutable reference to the `DirstateMap`, like when
276 /// you only have an immutable reference to the `DirstateMap`, like when
274 /// sharing references with Python.
277 /// sharing references with Python.
275 ///
278 ///
276 /// TODO, get rid of this along with the other "setter/getter" stuff when
279 /// TODO, get rid of this along with the other "setter/getter" stuff when
277 /// a nice typestate plan is defined.
280 /// a nice typestate plan is defined.
278 ///
281 ///
279 /// # Panics
282 /// # Panics
280 ///
283 ///
281 /// Will panic if either set is `None`.
284 /// Will panic if either set is `None`.
282 pub fn get_non_normal_other_parent_entries_panic(
285 pub fn get_non_normal_other_parent_entries_panic(
283 &self,
286 &self,
284 ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
287 ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
285 (
288 (
286 self.non_normal_set.as_ref().unwrap(),
289 self.non_normal_set.as_ref().unwrap(),
287 self.other_parent_set.as_ref().unwrap(),
290 self.other_parent_set.as_ref().unwrap(),
288 )
291 )
289 }
292 }
290
293
291 pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
294 pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
292 if !force
295 if !force
293 && self.non_normal_set.is_some()
296 && self.non_normal_set.is_some()
294 && self.other_parent_set.is_some()
297 && self.other_parent_set.is_some()
295 {
298 {
296 return;
299 return;
297 }
300 }
298 let mut non_normal = HashSet::new();
301 let mut non_normal = HashSet::new();
299 let mut other_parent = HashSet::new();
302 let mut other_parent = HashSet::new();
300
303
301 for (filename, entry) in self.state_map.iter() {
304 for (filename, entry) in self.state_map.iter() {
302 if entry.is_non_normal() {
305 if entry.is_non_normal() {
303 non_normal.insert(filename.to_owned());
306 non_normal.insert(filename.to_owned());
304 }
307 }
305 if entry.is_from_other_parent() {
308 if entry.is_from_other_parent() {
306 other_parent.insert(filename.to_owned());
309 other_parent.insert(filename.to_owned());
307 }
310 }
308 }
311 }
309 self.non_normal_set = Some(non_normal);
312 self.non_normal_set = Some(non_normal);
310 self.other_parent_set = Some(other_parent);
313 self.other_parent_set = Some(other_parent);
311 }
314 }
312
315
313 /// Both of these setters and their uses appear to be the simplest way to
316 /// Both of these setters and their uses appear to be the simplest way to
314 /// emulate a Python lazy property, but it is ugly and unidiomatic.
317 /// emulate a Python lazy property, but it is ugly and unidiomatic.
315 /// TODO One day, rewriting this struct using the typestate might be a
318 /// TODO One day, rewriting this struct using the typestate might be a
316 /// good idea.
319 /// good idea.
317 pub fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
320 pub fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
318 if self.all_dirs.is_none() {
321 if self.all_dirs.is_none() {
319 self.all_dirs = Some(DirsMultiset::from_dirstate(
322 self.all_dirs = Some(DirsMultiset::from_dirstate(
320 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
323 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
321 None,
324 None,
322 )?);
325 )?);
323 }
326 }
324 Ok(())
327 Ok(())
325 }
328 }
326
329
327 pub fn set_dirs(&mut self) -> Result<(), DirstateError> {
330 pub fn set_dirs(&mut self) -> Result<(), DirstateError> {
328 if self.dirs.is_none() {
331 if self.dirs.is_none() {
329 self.dirs = Some(DirsMultiset::from_dirstate(
332 self.dirs = Some(DirsMultiset::from_dirstate(
330 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
333 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
331 Some(EntryState::Removed),
334 Some(EntryState::Removed),
332 )?);
335 )?);
333 }
336 }
334 Ok(())
337 Ok(())
335 }
338 }
336
339
337 pub fn has_tracked_dir(
340 pub fn has_tracked_dir(
338 &mut self,
341 &mut self,
339 directory: &HgPath,
342 directory: &HgPath,
340 ) -> Result<bool, DirstateError> {
343 ) -> Result<bool, DirstateError> {
341 self.set_dirs()?;
344 self.set_dirs()?;
342 Ok(self.dirs.as_ref().unwrap().contains(directory))
345 Ok(self.dirs.as_ref().unwrap().contains(directory))
343 }
346 }
344
347
345 pub fn has_dir(
348 pub fn has_dir(
346 &mut self,
349 &mut self,
347 directory: &HgPath,
350 directory: &HgPath,
348 ) -> Result<bool, DirstateError> {
351 ) -> Result<bool, DirstateError> {
349 self.set_all_dirs()?;
352 self.set_all_dirs()?;
350 Ok(self.all_dirs.as_ref().unwrap().contains(directory))
353 Ok(self.all_dirs.as_ref().unwrap().contains(directory))
351 }
354 }
352
355
353 #[timed]
356 #[timed]
354 pub fn read(
357 pub fn read(
355 &mut self,
358 &mut self,
356 file_contents: &[u8],
359 file_contents: &[u8],
357 ) -> Result<Option<DirstateParents>, DirstateError> {
360 ) -> Result<Option<DirstateParents>, DirstateError> {
358 if file_contents.is_empty() {
361 if file_contents.is_empty() {
359 return Ok(None);
362 return Ok(None);
360 }
363 }
361
364
362 let (parents, entries, copies) = parse_dirstate(file_contents)?;
365 let (parents, entries, copies) = parse_dirstate(file_contents)?;
363 self.state_map.extend(
366 self.state_map.extend(
364 entries
367 entries
365 .into_iter()
368 .into_iter()
366 .map(|(path, entry)| (path.to_owned(), entry)),
369 .map(|(path, entry)| (path.to_owned(), entry)),
367 );
370 );
368 self.copy_map.extend(
371 self.copy_map.extend(
369 copies
372 copies
370 .into_iter()
373 .into_iter()
371 .map(|(path, copy)| (path.to_owned(), copy.to_owned())),
374 .map(|(path, copy)| (path.to_owned(), copy.to_owned())),
372 );
375 );
373 Ok(Some(parents.clone()))
376 Ok(Some(parents.clone()))
374 }
377 }
375
378
376 pub fn pack(
379 pub fn pack(
377 &mut self,
380 &mut self,
378 parents: DirstateParents,
381 parents: DirstateParents,
379 now: Timestamp,
382 now: Timestamp,
380 ) -> Result<Vec<u8>, DirstateError> {
383 ) -> Result<Vec<u8>, DirstateError> {
381 let packed =
384 let packed =
382 pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?;
385 pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?;
383
386
384 self.set_non_normal_other_parent_entries(true);
387 self.set_non_normal_other_parent_entries(true);
385 Ok(packed)
388 Ok(packed)
386 }
389 }
387 }
390 }
388
391
389 #[cfg(test)]
392 #[cfg(test)]
390 mod tests {
393 mod tests {
391 use super::*;
394 use super::*;
392
395
393 #[test]
396 #[test]
394 fn test_dirs_multiset() {
397 fn test_dirs_multiset() {
395 let mut map = DirstateMap::new();
398 let mut map = DirstateMap::new();
396 assert!(map.dirs.is_none());
399 assert!(map.dirs.is_none());
397 assert!(map.all_dirs.is_none());
400 assert!(map.all_dirs.is_none());
398
401
399 assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false);
402 assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false);
400 assert!(map.all_dirs.is_some());
403 assert!(map.all_dirs.is_some());
401 assert!(map.dirs.is_none());
404 assert!(map.dirs.is_none());
402
405
403 assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false);
406 assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false);
404 assert!(map.dirs.is_some());
407 assert!(map.dirs.is_some());
405 }
408 }
406
409
407 #[test]
410 #[test]
408 fn test_add_file() {
411 fn test_add_file() {
409 let mut map = DirstateMap::new();
412 let mut map = DirstateMap::new();
410
413
411 assert_eq!(0, map.len());
414 assert_eq!(0, map.len());
412
415
413 map.add_file(
416 map.add_file(
414 HgPath::new(b"meh"),
417 HgPath::new(b"meh"),
415 DirstateEntry {
418 DirstateEntry {
416 state: EntryState::Normal,
419 state: EntryState::Normal,
417 mode: 1337,
420 mode: 1337,
418 mtime: 1337,
421 mtime: 1337,
419 size: 1337,
422 size: 1337,
420 },
423 },
421 false,
424 false,
422 false,
425 false,
423 false,
426 false,
424 false,
427 false,
425 )
428 )
426 .unwrap();
429 .unwrap();
427
430
428 assert_eq!(1, map.len());
431 assert_eq!(1, map.len());
429 assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
432 assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
430 assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
433 assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
431 }
434 }
432
435
433 #[test]
436 #[test]
434 fn test_non_normal_other_parent_entries() {
437 fn test_non_normal_other_parent_entries() {
435 let mut map: DirstateMap = [
438 let mut map: DirstateMap = [
436 (b"f1", (EntryState::Removed, 1337, 1337, 1337)),
439 (b"f1", (EntryState::Removed, 1337, 1337, 1337)),
437 (b"f2", (EntryState::Normal, 1337, 1337, -1)),
440 (b"f2", (EntryState::Normal, 1337, 1337, -1)),
438 (b"f3", (EntryState::Normal, 1337, 1337, 1337)),
441 (b"f3", (EntryState::Normal, 1337, 1337, 1337)),
439 (b"f4", (EntryState::Normal, 1337, -2, 1337)),
442 (b"f4", (EntryState::Normal, 1337, -2, 1337)),
440 (b"f5", (EntryState::Added, 1337, 1337, 1337)),
443 (b"f5", (EntryState::Added, 1337, 1337, 1337)),
441 (b"f6", (EntryState::Added, 1337, 1337, -1)),
444 (b"f6", (EntryState::Added, 1337, 1337, -1)),
442 (b"f7", (EntryState::Merged, 1337, 1337, -1)),
445 (b"f7", (EntryState::Merged, 1337, 1337, -1)),
443 (b"f8", (EntryState::Merged, 1337, 1337, 1337)),
446 (b"f8", (EntryState::Merged, 1337, 1337, 1337)),
444 (b"f9", (EntryState::Merged, 1337, -2, 1337)),
447 (b"f9", (EntryState::Merged, 1337, -2, 1337)),
445 (b"fa", (EntryState::Added, 1337, -2, 1337)),
448 (b"fa", (EntryState::Added, 1337, -2, 1337)),
446 (b"fb", (EntryState::Removed, 1337, -2, 1337)),
449 (b"fb", (EntryState::Removed, 1337, -2, 1337)),
447 ]
450 ]
448 .iter()
451 .iter()
449 .map(|(fname, (state, mode, size, mtime))| {
452 .map(|(fname, (state, mode, size, mtime))| {
450 (
453 (
451 HgPathBuf::from_bytes(fname.as_ref()),
454 HgPathBuf::from_bytes(fname.as_ref()),
452 DirstateEntry {
455 DirstateEntry {
453 state: *state,
456 state: *state,
454 mode: *mode,
457 mode: *mode,
455 size: *size,
458 size: *size,
456 mtime: *mtime,
459 mtime: *mtime,
457 },
460 },
458 )
461 )
459 })
462 })
460 .collect();
463 .collect();
461
464
462 let mut non_normal = [
465 let mut non_normal = [
463 b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
466 b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
464 ]
467 ]
465 .iter()
468 .iter()
466 .map(|x| HgPathBuf::from_bytes(x.as_ref()))
469 .map(|x| HgPathBuf::from_bytes(x.as_ref()))
467 .collect();
470 .collect();
468
471
469 let mut other_parent = HashSet::new();
472 let mut other_parent = HashSet::new();
470 other_parent.insert(HgPathBuf::from_bytes(b"f4"));
473 other_parent.insert(HgPathBuf::from_bytes(b"f4"));
471 let entries = map.get_non_normal_other_parent_entries();
474 let entries = map.get_non_normal_other_parent_entries();
472
475
473 assert_eq!(
476 assert_eq!(
474 (&mut non_normal, &mut other_parent),
477 (&mut non_normal, &mut other_parent),
475 (entries.0, entries.1)
478 (entries.0, entries.1)
476 );
479 );
477 }
480 }
478 }
481 }
@@ -1,1206 +1,1206 b''
1 use bytes_cast::BytesCast;
1 use bytes_cast::BytesCast;
2 use micro_timer::timed;
2 use micro_timer::timed;
3 use std::borrow::Cow;
3 use std::borrow::Cow;
4 use std::convert::TryInto;
4 use std::convert::TryInto;
5 use std::path::PathBuf;
5 use std::path::PathBuf;
6
6
7 use super::on_disk;
7 use super::on_disk;
8 use super::on_disk::DirstateV2ParseError;
8 use super::on_disk::DirstateV2ParseError;
9 use super::path_with_basename::WithBasename;
9 use super::path_with_basename::WithBasename;
10 use crate::dirstate::parsers::pack_entry;
10 use crate::dirstate::parsers::pack_entry;
11 use crate::dirstate::parsers::packed_entry_size;
11 use crate::dirstate::parsers::packed_entry_size;
12 use crate::dirstate::parsers::parse_dirstate_entries;
12 use crate::dirstate::parsers::parse_dirstate_entries;
13 use crate::dirstate::parsers::Timestamp;
13 use crate::dirstate::parsers::Timestamp;
14 use crate::dirstate::MTIME_UNSET;
14 use crate::dirstate::MTIME_UNSET;
15 use crate::dirstate::SIZE_FROM_OTHER_PARENT;
15 use crate::dirstate::SIZE_FROM_OTHER_PARENT;
16 use crate::dirstate::SIZE_NON_NORMAL;
16 use crate::dirstate::SIZE_NON_NORMAL;
17 use crate::dirstate::V1_RANGEMASK;
17 use crate::dirstate::V1_RANGEMASK;
18 use crate::matchers::Matcher;
18 use crate::matchers::Matcher;
19 use crate::utils::hg_path::{HgPath, HgPathBuf};
19 use crate::utils::hg_path::{HgPath, HgPathBuf};
20 use crate::CopyMapIter;
20 use crate::CopyMapIter;
21 use crate::DirstateEntry;
21 use crate::DirstateEntry;
22 use crate::DirstateError;
22 use crate::DirstateError;
23 use crate::DirstateParents;
23 use crate::DirstateParents;
24 use crate::DirstateStatus;
24 use crate::DirstateStatus;
25 use crate::EntryState;
25 use crate::EntryState;
26 use crate::FastHashMap;
26 use crate::FastHashMap;
27 use crate::PatternFileWarning;
27 use crate::PatternFileWarning;
28 use crate::StateMapIter;
28 use crate::StateMapIter;
29 use crate::StatusError;
29 use crate::StatusError;
30 use crate::StatusOptions;
30 use crate::StatusOptions;
31
31
32 pub struct DirstateMap<'on_disk> {
32 pub struct DirstateMap<'on_disk> {
33 /// Contents of the `.hg/dirstate` file
33 /// Contents of the `.hg/dirstate` file
34 pub(super) on_disk: &'on_disk [u8],
34 pub(super) on_disk: &'on_disk [u8],
35
35
36 pub(super) root: ChildNodes<'on_disk>,
36 pub(super) root: ChildNodes<'on_disk>,
37
37
38 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
38 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
39 pub(super) nodes_with_entry_count: u32,
39 pub(super) nodes_with_entry_count: u32,
40
40
41 /// Number of nodes anywhere in the tree that have
41 /// Number of nodes anywhere in the tree that have
42 /// `.copy_source.is_some()`.
42 /// `.copy_source.is_some()`.
43 pub(super) nodes_with_copy_source_count: u32,
43 pub(super) nodes_with_copy_source_count: u32,
44
44
45 /// See on_disk::Header
45 /// See on_disk::Header
46 pub(super) ignore_patterns_hash: on_disk::IgnorePatternsHash,
46 pub(super) ignore_patterns_hash: on_disk::IgnorePatternsHash,
47 }
47 }
48
48
49 /// Using a plain `HgPathBuf` of the full path from the repository root as a
49 /// Using a plain `HgPathBuf` of the full path from the repository root as a
50 /// map key would also work: all paths in a given map have the same parent
50 /// map key would also work: all paths in a given map have the same parent
51 /// path, so comparing full paths gives the same result as comparing base
51 /// path, so comparing full paths gives the same result as comparing base
52 /// names. However `HashMap` would waste time always re-hashing the same
52 /// names. However `HashMap` would waste time always re-hashing the same
53 /// string prefix.
53 /// string prefix.
54 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
54 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
55
55
56 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
56 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
57 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
57 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
58 pub(super) enum BorrowedPath<'tree, 'on_disk> {
58 pub(super) enum BorrowedPath<'tree, 'on_disk> {
59 InMemory(&'tree HgPathBuf),
59 InMemory(&'tree HgPathBuf),
60 OnDisk(&'on_disk HgPath),
60 OnDisk(&'on_disk HgPath),
61 }
61 }
62
62
63 pub(super) enum ChildNodes<'on_disk> {
63 pub(super) enum ChildNodes<'on_disk> {
64 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
64 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
65 OnDisk(&'on_disk [on_disk::Node]),
65 OnDisk(&'on_disk [on_disk::Node]),
66 }
66 }
67
67
68 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
68 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
69 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
69 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
70 OnDisk(&'on_disk [on_disk::Node]),
70 OnDisk(&'on_disk [on_disk::Node]),
71 }
71 }
72
72
73 pub(super) enum NodeRef<'tree, 'on_disk> {
73 pub(super) enum NodeRef<'tree, 'on_disk> {
74 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
74 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
75 OnDisk(&'on_disk on_disk::Node),
75 OnDisk(&'on_disk on_disk::Node),
76 }
76 }
77
77
78 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
78 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
79 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
79 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
80 match *self {
80 match *self {
81 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
81 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
82 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
82 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
83 }
83 }
84 }
84 }
85 }
85 }
86
86
87 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
87 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
88 type Target = HgPath;
88 type Target = HgPath;
89
89
90 fn deref(&self) -> &HgPath {
90 fn deref(&self) -> &HgPath {
91 match *self {
91 match *self {
92 BorrowedPath::InMemory(in_memory) => in_memory,
92 BorrowedPath::InMemory(in_memory) => in_memory,
93 BorrowedPath::OnDisk(on_disk) => on_disk,
93 BorrowedPath::OnDisk(on_disk) => on_disk,
94 }
94 }
95 }
95 }
96 }
96 }
97
97
98 impl Default for ChildNodes<'_> {
98 impl Default for ChildNodes<'_> {
99 fn default() -> Self {
99 fn default() -> Self {
100 ChildNodes::InMemory(Default::default())
100 ChildNodes::InMemory(Default::default())
101 }
101 }
102 }
102 }
103
103
104 impl<'on_disk> ChildNodes<'on_disk> {
104 impl<'on_disk> ChildNodes<'on_disk> {
105 pub(super) fn as_ref<'tree>(
105 pub(super) fn as_ref<'tree>(
106 &'tree self,
106 &'tree self,
107 ) -> ChildNodesRef<'tree, 'on_disk> {
107 ) -> ChildNodesRef<'tree, 'on_disk> {
108 match self {
108 match self {
109 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
109 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
110 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
110 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
111 }
111 }
112 }
112 }
113
113
114 pub(super) fn is_empty(&self) -> bool {
114 pub(super) fn is_empty(&self) -> bool {
115 match self {
115 match self {
116 ChildNodes::InMemory(nodes) => nodes.is_empty(),
116 ChildNodes::InMemory(nodes) => nodes.is_empty(),
117 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
117 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
118 }
118 }
119 }
119 }
120
120
121 pub(super) fn make_mut(
121 pub(super) fn make_mut(
122 &mut self,
122 &mut self,
123 on_disk: &'on_disk [u8],
123 on_disk: &'on_disk [u8],
124 ) -> Result<
124 ) -> Result<
125 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
125 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
126 DirstateV2ParseError,
126 DirstateV2ParseError,
127 > {
127 > {
128 match self {
128 match self {
129 ChildNodes::InMemory(nodes) => Ok(nodes),
129 ChildNodes::InMemory(nodes) => Ok(nodes),
130 ChildNodes::OnDisk(nodes) => {
130 ChildNodes::OnDisk(nodes) => {
131 let nodes = nodes
131 let nodes = nodes
132 .iter()
132 .iter()
133 .map(|node| {
133 .map(|node| {
134 Ok((
134 Ok((
135 node.path(on_disk)?,
135 node.path(on_disk)?,
136 node.to_in_memory_node(on_disk)?,
136 node.to_in_memory_node(on_disk)?,
137 ))
137 ))
138 })
138 })
139 .collect::<Result<_, _>>()?;
139 .collect::<Result<_, _>>()?;
140 *self = ChildNodes::InMemory(nodes);
140 *self = ChildNodes::InMemory(nodes);
141 match self {
141 match self {
142 ChildNodes::InMemory(nodes) => Ok(nodes),
142 ChildNodes::InMemory(nodes) => Ok(nodes),
143 ChildNodes::OnDisk(_) => unreachable!(),
143 ChildNodes::OnDisk(_) => unreachable!(),
144 }
144 }
145 }
145 }
146 }
146 }
147 }
147 }
148 }
148 }
149
149
150 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
150 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
151 pub(super) fn get(
151 pub(super) fn get(
152 &self,
152 &self,
153 base_name: &HgPath,
153 base_name: &HgPath,
154 on_disk: &'on_disk [u8],
154 on_disk: &'on_disk [u8],
155 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
155 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
156 match self {
156 match self {
157 ChildNodesRef::InMemory(nodes) => Ok(nodes
157 ChildNodesRef::InMemory(nodes) => Ok(nodes
158 .get_key_value(base_name)
158 .get_key_value(base_name)
159 .map(|(k, v)| NodeRef::InMemory(k, v))),
159 .map(|(k, v)| NodeRef::InMemory(k, v))),
160 ChildNodesRef::OnDisk(nodes) => {
160 ChildNodesRef::OnDisk(nodes) => {
161 let mut parse_result = Ok(());
161 let mut parse_result = Ok(());
162 let search_result = nodes.binary_search_by(|node| {
162 let search_result = nodes.binary_search_by(|node| {
163 match node.base_name(on_disk) {
163 match node.base_name(on_disk) {
164 Ok(node_base_name) => node_base_name.cmp(base_name),
164 Ok(node_base_name) => node_base_name.cmp(base_name),
165 Err(e) => {
165 Err(e) => {
166 parse_result = Err(e);
166 parse_result = Err(e);
167 // Dummy comparison result, `search_result` won’t
167 // Dummy comparison result, `search_result` won’t
168 // be used since `parse_result` is an error
168 // be used since `parse_result` is an error
169 std::cmp::Ordering::Equal
169 std::cmp::Ordering::Equal
170 }
170 }
171 }
171 }
172 });
172 });
173 parse_result.map(|()| {
173 parse_result.map(|()| {
174 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
174 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
175 })
175 })
176 }
176 }
177 }
177 }
178 }
178 }
179
179
180 /// Iterate in undefined order
180 /// Iterate in undefined order
181 pub(super) fn iter(
181 pub(super) fn iter(
182 &self,
182 &self,
183 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
183 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
184 match self {
184 match self {
185 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
185 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
186 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
186 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
187 ),
187 ),
188 ChildNodesRef::OnDisk(nodes) => {
188 ChildNodesRef::OnDisk(nodes) => {
189 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
189 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
190 }
190 }
191 }
191 }
192 }
192 }
193
193
194 /// Iterate in parallel in undefined order
194 /// Iterate in parallel in undefined order
195 pub(super) fn par_iter(
195 pub(super) fn par_iter(
196 &self,
196 &self,
197 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
197 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
198 {
198 {
199 use rayon::prelude::*;
199 use rayon::prelude::*;
200 match self {
200 match self {
201 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
201 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
202 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
202 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
203 ),
203 ),
204 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
204 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
205 nodes.par_iter().map(NodeRef::OnDisk),
205 nodes.par_iter().map(NodeRef::OnDisk),
206 ),
206 ),
207 }
207 }
208 }
208 }
209
209
210 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
210 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
211 match self {
211 match self {
212 ChildNodesRef::InMemory(nodes) => {
212 ChildNodesRef::InMemory(nodes) => {
213 let mut vec: Vec<_> = nodes
213 let mut vec: Vec<_> = nodes
214 .iter()
214 .iter()
215 .map(|(k, v)| NodeRef::InMemory(k, v))
215 .map(|(k, v)| NodeRef::InMemory(k, v))
216 .collect();
216 .collect();
217 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
217 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
218 match node {
218 match node {
219 NodeRef::InMemory(path, _node) => path.base_name(),
219 NodeRef::InMemory(path, _node) => path.base_name(),
220 NodeRef::OnDisk(_) => unreachable!(),
220 NodeRef::OnDisk(_) => unreachable!(),
221 }
221 }
222 }
222 }
223 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
223 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
224 // value: https://github.com/rust-lang/rust/issues/34162
224 // value: https://github.com/rust-lang/rust/issues/34162
225 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
225 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
226 vec
226 vec
227 }
227 }
228 ChildNodesRef::OnDisk(nodes) => {
228 ChildNodesRef::OnDisk(nodes) => {
229 // Nodes on disk are already sorted
229 // Nodes on disk are already sorted
230 nodes.iter().map(NodeRef::OnDisk).collect()
230 nodes.iter().map(NodeRef::OnDisk).collect()
231 }
231 }
232 }
232 }
233 }
233 }
234 }
234 }
235
235
236 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
236 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
237 pub(super) fn full_path(
237 pub(super) fn full_path(
238 &self,
238 &self,
239 on_disk: &'on_disk [u8],
239 on_disk: &'on_disk [u8],
240 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
240 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
241 match self {
241 match self {
242 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
242 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
243 NodeRef::OnDisk(node) => node.full_path(on_disk),
243 NodeRef::OnDisk(node) => node.full_path(on_disk),
244 }
244 }
245 }
245 }
246
246
247 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
247 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
248 /// HgPath>` detached from `'tree`
248 /// HgPath>` detached from `'tree`
249 pub(super) fn full_path_borrowed(
249 pub(super) fn full_path_borrowed(
250 &self,
250 &self,
251 on_disk: &'on_disk [u8],
251 on_disk: &'on_disk [u8],
252 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
252 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
253 match self {
253 match self {
254 NodeRef::InMemory(path, _node) => match path.full_path() {
254 NodeRef::InMemory(path, _node) => match path.full_path() {
255 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
255 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
256 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
256 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
257 },
257 },
258 NodeRef::OnDisk(node) => {
258 NodeRef::OnDisk(node) => {
259 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
259 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
260 }
260 }
261 }
261 }
262 }
262 }
263
263
264 pub(super) fn base_name(
264 pub(super) fn base_name(
265 &self,
265 &self,
266 on_disk: &'on_disk [u8],
266 on_disk: &'on_disk [u8],
267 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
267 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
268 match self {
268 match self {
269 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
269 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
270 NodeRef::OnDisk(node) => node.base_name(on_disk),
270 NodeRef::OnDisk(node) => node.base_name(on_disk),
271 }
271 }
272 }
272 }
273
273
274 pub(super) fn children(
274 pub(super) fn children(
275 &self,
275 &self,
276 on_disk: &'on_disk [u8],
276 on_disk: &'on_disk [u8],
277 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
277 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
278 match self {
278 match self {
279 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
279 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
280 NodeRef::OnDisk(node) => {
280 NodeRef::OnDisk(node) => {
281 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
281 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
282 }
282 }
283 }
283 }
284 }
284 }
285
285
286 pub(super) fn has_copy_source(&self) -> bool {
286 pub(super) fn has_copy_source(&self) -> bool {
287 match self {
287 match self {
288 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
288 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
289 NodeRef::OnDisk(node) => node.has_copy_source(),
289 NodeRef::OnDisk(node) => node.has_copy_source(),
290 }
290 }
291 }
291 }
292
292
293 pub(super) fn copy_source(
293 pub(super) fn copy_source(
294 &self,
294 &self,
295 on_disk: &'on_disk [u8],
295 on_disk: &'on_disk [u8],
296 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
296 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
297 match self {
297 match self {
298 NodeRef::InMemory(_path, node) => {
298 NodeRef::InMemory(_path, node) => {
299 Ok(node.copy_source.as_ref().map(|s| &**s))
299 Ok(node.copy_source.as_ref().map(|s| &**s))
300 }
300 }
301 NodeRef::OnDisk(node) => node.copy_source(on_disk),
301 NodeRef::OnDisk(node) => node.copy_source(on_disk),
302 }
302 }
303 }
303 }
304
304
305 pub(super) fn entry(
305 pub(super) fn entry(
306 &self,
306 &self,
307 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
307 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
308 match self {
308 match self {
309 NodeRef::InMemory(_path, node) => {
309 NodeRef::InMemory(_path, node) => {
310 Ok(node.data.as_entry().copied())
310 Ok(node.data.as_entry().copied())
311 }
311 }
312 NodeRef::OnDisk(node) => node.entry(),
312 NodeRef::OnDisk(node) => node.entry(),
313 }
313 }
314 }
314 }
315
315
316 pub(super) fn state(
316 pub(super) fn state(
317 &self,
317 &self,
318 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
318 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
319 match self {
319 match self {
320 NodeRef::InMemory(_path, node) => {
320 NodeRef::InMemory(_path, node) => {
321 Ok(node.data.as_entry().map(|entry| entry.state))
321 Ok(node.data.as_entry().map(|entry| entry.state))
322 }
322 }
323 NodeRef::OnDisk(node) => node.state(),
323 NodeRef::OnDisk(node) => node.state(),
324 }
324 }
325 }
325 }
326
326
327 pub(super) fn cached_directory_mtime(
327 pub(super) fn cached_directory_mtime(
328 &self,
328 &self,
329 ) -> Option<&'tree on_disk::Timestamp> {
329 ) -> Option<&'tree on_disk::Timestamp> {
330 match self {
330 match self {
331 NodeRef::InMemory(_path, node) => match &node.data {
331 NodeRef::InMemory(_path, node) => match &node.data {
332 NodeData::CachedDirectory { mtime } => Some(mtime),
332 NodeData::CachedDirectory { mtime } => Some(mtime),
333 _ => None,
333 _ => None,
334 },
334 },
335 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
335 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
336 }
336 }
337 }
337 }
338
338
339 pub(super) fn descendants_with_entry_count(&self) -> u32 {
339 pub(super) fn descendants_with_entry_count(&self) -> u32 {
340 match self {
340 match self {
341 NodeRef::InMemory(_path, node) => {
341 NodeRef::InMemory(_path, node) => {
342 node.descendants_with_entry_count
342 node.descendants_with_entry_count
343 }
343 }
344 NodeRef::OnDisk(node) => node.descendants_with_entry_count.get(),
344 NodeRef::OnDisk(node) => node.descendants_with_entry_count.get(),
345 }
345 }
346 }
346 }
347
347
348 pub(super) fn tracked_descendants_count(&self) -> u32 {
348 pub(super) fn tracked_descendants_count(&self) -> u32 {
349 match self {
349 match self {
350 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
350 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
351 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
351 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
352 }
352 }
353 }
353 }
354 }
354 }
355
355
356 /// Represents a file or a directory
356 /// Represents a file or a directory
357 #[derive(Default)]
357 #[derive(Default)]
358 pub(super) struct Node<'on_disk> {
358 pub(super) struct Node<'on_disk> {
359 pub(super) data: NodeData,
359 pub(super) data: NodeData,
360
360
361 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
361 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
362
362
363 pub(super) children: ChildNodes<'on_disk>,
363 pub(super) children: ChildNodes<'on_disk>,
364
364
365 /// How many (non-inclusive) descendants of this node have an entry.
365 /// How many (non-inclusive) descendants of this node have an entry.
366 pub(super) descendants_with_entry_count: u32,
366 pub(super) descendants_with_entry_count: u32,
367
367
368 /// How many (non-inclusive) descendants of this node have an entry whose
368 /// How many (non-inclusive) descendants of this node have an entry whose
369 /// state is "tracked".
369 /// state is "tracked".
370 pub(super) tracked_descendants_count: u32,
370 pub(super) tracked_descendants_count: u32,
371 }
371 }
372
372
373 pub(super) enum NodeData {
373 pub(super) enum NodeData {
374 Entry(DirstateEntry),
374 Entry(DirstateEntry),
375 CachedDirectory { mtime: on_disk::Timestamp },
375 CachedDirectory { mtime: on_disk::Timestamp },
376 None,
376 None,
377 }
377 }
378
378
379 impl Default for NodeData {
379 impl Default for NodeData {
380 fn default() -> Self {
380 fn default() -> Self {
381 NodeData::None
381 NodeData::None
382 }
382 }
383 }
383 }
384
384
385 impl NodeData {
385 impl NodeData {
386 fn has_entry(&self) -> bool {
386 fn has_entry(&self) -> bool {
387 match self {
387 match self {
388 NodeData::Entry(_) => true,
388 NodeData::Entry(_) => true,
389 _ => false,
389 _ => false,
390 }
390 }
391 }
391 }
392
392
393 fn as_entry(&self) -> Option<&DirstateEntry> {
393 fn as_entry(&self) -> Option<&DirstateEntry> {
394 match self {
394 match self {
395 NodeData::Entry(entry) => Some(entry),
395 NodeData::Entry(entry) => Some(entry),
396 _ => None,
396 _ => None,
397 }
397 }
398 }
398 }
399 }
399 }
400
400
401 impl<'on_disk> DirstateMap<'on_disk> {
401 impl<'on_disk> DirstateMap<'on_disk> {
402 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
402 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
403 Self {
403 Self {
404 on_disk,
404 on_disk,
405 root: ChildNodes::default(),
405 root: ChildNodes::default(),
406 nodes_with_entry_count: 0,
406 nodes_with_entry_count: 0,
407 nodes_with_copy_source_count: 0,
407 nodes_with_copy_source_count: 0,
408 ignore_patterns_hash: [0; on_disk::IGNORE_PATTERNS_HASH_LEN],
408 ignore_patterns_hash: [0; on_disk::IGNORE_PATTERNS_HASH_LEN],
409 }
409 }
410 }
410 }
411
411
412 #[timed]
412 #[timed]
413 pub fn new_v2(
413 pub fn new_v2(
414 on_disk: &'on_disk [u8],
414 on_disk: &'on_disk [u8],
415 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
415 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
416 Ok(on_disk::read(on_disk)?)
416 Ok(on_disk::read(on_disk)?)
417 }
417 }
418
418
419 #[timed]
419 #[timed]
420 pub fn new_v1(
420 pub fn new_v1(
421 on_disk: &'on_disk [u8],
421 on_disk: &'on_disk [u8],
422 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
422 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
423 let mut map = Self::empty(on_disk);
423 let mut map = Self::empty(on_disk);
424 if map.on_disk.is_empty() {
424 if map.on_disk.is_empty() {
425 return Ok((map, None));
425 return Ok((map, None));
426 }
426 }
427
427
428 let parents = parse_dirstate_entries(
428 let parents = parse_dirstate_entries(
429 map.on_disk,
429 map.on_disk,
430 |path, entry, copy_source| {
430 |path, entry, copy_source| {
431 let tracked = entry.state.is_tracked();
431 let tracked = entry.state.is_tracked();
432 let node = Self::get_or_insert_node(
432 let node = Self::get_or_insert_node(
433 map.on_disk,
433 map.on_disk,
434 &mut map.root,
434 &mut map.root,
435 path,
435 path,
436 WithBasename::to_cow_borrowed,
436 WithBasename::to_cow_borrowed,
437 |ancestor| {
437 |ancestor| {
438 if tracked {
438 if tracked {
439 ancestor.tracked_descendants_count += 1
439 ancestor.tracked_descendants_count += 1
440 }
440 }
441 ancestor.descendants_with_entry_count += 1
441 ancestor.descendants_with_entry_count += 1
442 },
442 },
443 )?;
443 )?;
444 assert!(
444 assert!(
445 !node.data.has_entry(),
445 !node.data.has_entry(),
446 "duplicate dirstate entry in read"
446 "duplicate dirstate entry in read"
447 );
447 );
448 assert!(
448 assert!(
449 node.copy_source.is_none(),
449 node.copy_source.is_none(),
450 "duplicate dirstate entry in read"
450 "duplicate dirstate entry in read"
451 );
451 );
452 node.data = NodeData::Entry(*entry);
452 node.data = NodeData::Entry(*entry);
453 node.copy_source = copy_source.map(Cow::Borrowed);
453 node.copy_source = copy_source.map(Cow::Borrowed);
454 map.nodes_with_entry_count += 1;
454 map.nodes_with_entry_count += 1;
455 if copy_source.is_some() {
455 if copy_source.is_some() {
456 map.nodes_with_copy_source_count += 1
456 map.nodes_with_copy_source_count += 1
457 }
457 }
458 Ok(())
458 Ok(())
459 },
459 },
460 )?;
460 )?;
461 let parents = Some(parents.clone());
461 let parents = Some(parents.clone());
462
462
463 Ok((map, parents))
463 Ok((map, parents))
464 }
464 }
465
465
466 fn get_node<'tree>(
466 fn get_node<'tree>(
467 &'tree self,
467 &'tree self,
468 path: &HgPath,
468 path: &HgPath,
469 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
469 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
470 let mut children = self.root.as_ref();
470 let mut children = self.root.as_ref();
471 let mut components = path.components();
471 let mut components = path.components();
472 let mut component =
472 let mut component =
473 components.next().expect("expected at least one components");
473 components.next().expect("expected at least one components");
474 loop {
474 loop {
475 if let Some(child) = children.get(component, self.on_disk)? {
475 if let Some(child) = children.get(component, self.on_disk)? {
476 if let Some(next_component) = components.next() {
476 if let Some(next_component) = components.next() {
477 component = next_component;
477 component = next_component;
478 children = child.children(self.on_disk)?;
478 children = child.children(self.on_disk)?;
479 } else {
479 } else {
480 return Ok(Some(child));
480 return Ok(Some(child));
481 }
481 }
482 } else {
482 } else {
483 return Ok(None);
483 return Ok(None);
484 }
484 }
485 }
485 }
486 }
486 }
487
487
488 /// Returns a mutable reference to the node at `path` if it exists
488 /// Returns a mutable reference to the node at `path` if it exists
489 ///
489 ///
490 /// This takes `root` instead of `&mut self` so that callers can mutate
490 /// This takes `root` instead of `&mut self` so that callers can mutate
491 /// other fields while the returned borrow is still valid
491 /// other fields while the returned borrow is still valid
492 fn get_node_mut<'tree>(
492 fn get_node_mut<'tree>(
493 on_disk: &'on_disk [u8],
493 on_disk: &'on_disk [u8],
494 root: &'tree mut ChildNodes<'on_disk>,
494 root: &'tree mut ChildNodes<'on_disk>,
495 path: &HgPath,
495 path: &HgPath,
496 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
496 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
497 let mut children = root;
497 let mut children = root;
498 let mut components = path.components();
498 let mut components = path.components();
499 let mut component =
499 let mut component =
500 components.next().expect("expected at least one components");
500 components.next().expect("expected at least one components");
501 loop {
501 loop {
502 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
502 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
503 {
503 {
504 if let Some(next_component) = components.next() {
504 if let Some(next_component) = components.next() {
505 component = next_component;
505 component = next_component;
506 children = &mut child.children;
506 children = &mut child.children;
507 } else {
507 } else {
508 return Ok(Some(child));
508 return Ok(Some(child));
509 }
509 }
510 } else {
510 } else {
511 return Ok(None);
511 return Ok(None);
512 }
512 }
513 }
513 }
514 }
514 }
515
515
516 pub(super) fn get_or_insert<'tree, 'path>(
516 pub(super) fn get_or_insert<'tree, 'path>(
517 &'tree mut self,
517 &'tree mut self,
518 path: &HgPath,
518 path: &HgPath,
519 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
519 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
520 Self::get_or_insert_node(
520 Self::get_or_insert_node(
521 self.on_disk,
521 self.on_disk,
522 &mut self.root,
522 &mut self.root,
523 path,
523 path,
524 WithBasename::to_cow_owned,
524 WithBasename::to_cow_owned,
525 |_| {},
525 |_| {},
526 )
526 )
527 }
527 }
528
528
529 pub(super) fn get_or_insert_node<'tree, 'path>(
529 pub(super) fn get_or_insert_node<'tree, 'path>(
530 on_disk: &'on_disk [u8],
530 on_disk: &'on_disk [u8],
531 root: &'tree mut ChildNodes<'on_disk>,
531 root: &'tree mut ChildNodes<'on_disk>,
532 path: &'path HgPath,
532 path: &'path HgPath,
533 to_cow: impl Fn(
533 to_cow: impl Fn(
534 WithBasename<&'path HgPath>,
534 WithBasename<&'path HgPath>,
535 ) -> WithBasename<Cow<'on_disk, HgPath>>,
535 ) -> WithBasename<Cow<'on_disk, HgPath>>,
536 mut each_ancestor: impl FnMut(&mut Node),
536 mut each_ancestor: impl FnMut(&mut Node),
537 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
537 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
538 let mut child_nodes = root;
538 let mut child_nodes = root;
539 let mut inclusive_ancestor_paths =
539 let mut inclusive_ancestor_paths =
540 WithBasename::inclusive_ancestors_of(path);
540 WithBasename::inclusive_ancestors_of(path);
541 let mut ancestor_path = inclusive_ancestor_paths
541 let mut ancestor_path = inclusive_ancestor_paths
542 .next()
542 .next()
543 .expect("expected at least one inclusive ancestor");
543 .expect("expected at least one inclusive ancestor");
544 loop {
544 loop {
545 // TODO: can we avoid allocating an owned key in cases where the
545 // TODO: can we avoid allocating an owned key in cases where the
546 // map already contains that key, without introducing double
546 // map already contains that key, without introducing double
547 // lookup?
547 // lookup?
548 let child_node = child_nodes
548 let child_node = child_nodes
549 .make_mut(on_disk)?
549 .make_mut(on_disk)?
550 .entry(to_cow(ancestor_path))
550 .entry(to_cow(ancestor_path))
551 .or_default();
551 .or_default();
552 if let Some(next) = inclusive_ancestor_paths.next() {
552 if let Some(next) = inclusive_ancestor_paths.next() {
553 each_ancestor(child_node);
553 each_ancestor(child_node);
554 ancestor_path = next;
554 ancestor_path = next;
555 child_nodes = &mut child_node.children;
555 child_nodes = &mut child_node.children;
556 } else {
556 } else {
557 return Ok(child_node);
557 return Ok(child_node);
558 }
558 }
559 }
559 }
560 }
560 }
561
561
562 fn add_or_remove_file(
562 fn add_or_remove_file(
563 &mut self,
563 &mut self,
564 path: &HgPath,
564 path: &HgPath,
565 old_state: EntryState,
565 old_state: EntryState,
566 new_entry: DirstateEntry,
566 new_entry: DirstateEntry,
567 ) -> Result<(), DirstateV2ParseError> {
567 ) -> Result<(), DirstateV2ParseError> {
568 let had_entry = old_state != EntryState::Unknown;
568 let had_entry = old_state != EntryState::Unknown;
569 let tracked_count_increment =
569 let tracked_count_increment =
570 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
570 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
571 (false, true) => 1,
571 (false, true) => 1,
572 (true, false) => -1,
572 (true, false) => -1,
573 _ => 0,
573 _ => 0,
574 };
574 };
575
575
576 let node = Self::get_or_insert_node(
576 let node = Self::get_or_insert_node(
577 self.on_disk,
577 self.on_disk,
578 &mut self.root,
578 &mut self.root,
579 path,
579 path,
580 WithBasename::to_cow_owned,
580 WithBasename::to_cow_owned,
581 |ancestor| {
581 |ancestor| {
582 if !had_entry {
582 if !had_entry {
583 ancestor.descendants_with_entry_count += 1;
583 ancestor.descendants_with_entry_count += 1;
584 }
584 }
585
585
586 // We can’t use `+= increment` because the counter is unsigned,
586 // We can’t use `+= increment` because the counter is unsigned,
587 // and we want debug builds to detect accidental underflow
587 // and we want debug builds to detect accidental underflow
588 // through zero
588 // through zero
589 match tracked_count_increment {
589 match tracked_count_increment {
590 1 => ancestor.tracked_descendants_count += 1,
590 1 => ancestor.tracked_descendants_count += 1,
591 -1 => ancestor.tracked_descendants_count -= 1,
591 -1 => ancestor.tracked_descendants_count -= 1,
592 _ => {}
592 _ => {}
593 }
593 }
594 },
594 },
595 )?;
595 )?;
596 if !had_entry {
596 if !had_entry {
597 self.nodes_with_entry_count += 1
597 self.nodes_with_entry_count += 1
598 }
598 }
599 node.data = NodeData::Entry(new_entry);
599 node.data = NodeData::Entry(new_entry);
600 Ok(())
600 Ok(())
601 }
601 }
602
602
603 fn iter_nodes<'tree>(
603 fn iter_nodes<'tree>(
604 &'tree self,
604 &'tree self,
605 ) -> impl Iterator<
605 ) -> impl Iterator<
606 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
606 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
607 > + 'tree {
607 > + 'tree {
608 // Depth first tree traversal.
608 // Depth first tree traversal.
609 //
609 //
610 // If we could afford internal iteration and recursion,
610 // If we could afford internal iteration and recursion,
611 // this would look like:
611 // this would look like:
612 //
612 //
613 // ```
613 // ```
614 // fn traverse_children(
614 // fn traverse_children(
615 // children: &ChildNodes,
615 // children: &ChildNodes,
616 // each: &mut impl FnMut(&Node),
616 // each: &mut impl FnMut(&Node),
617 // ) {
617 // ) {
618 // for child in children.values() {
618 // for child in children.values() {
619 // traverse_children(&child.children, each);
619 // traverse_children(&child.children, each);
620 // each(child);
620 // each(child);
621 // }
621 // }
622 // }
622 // }
623 // ```
623 // ```
624 //
624 //
625 // However we want an external iterator and therefore can’t use the
625 // However we want an external iterator and therefore can’t use the
626 // call stack. Use an explicit stack instead:
626 // call stack. Use an explicit stack instead:
627 let mut stack = Vec::new();
627 let mut stack = Vec::new();
628 let mut iter = self.root.as_ref().iter();
628 let mut iter = self.root.as_ref().iter();
629 std::iter::from_fn(move || {
629 std::iter::from_fn(move || {
630 while let Some(child_node) = iter.next() {
630 while let Some(child_node) = iter.next() {
631 let children = match child_node.children(self.on_disk) {
631 let children = match child_node.children(self.on_disk) {
632 Ok(children) => children,
632 Ok(children) => children,
633 Err(error) => return Some(Err(error)),
633 Err(error) => return Some(Err(error)),
634 };
634 };
635 // Pseudo-recursion
635 // Pseudo-recursion
636 let new_iter = children.iter();
636 let new_iter = children.iter();
637 let old_iter = std::mem::replace(&mut iter, new_iter);
637 let old_iter = std::mem::replace(&mut iter, new_iter);
638 stack.push((child_node, old_iter));
638 stack.push((child_node, old_iter));
639 }
639 }
640 // Found the end of a `children.iter()` iterator.
640 // Found the end of a `children.iter()` iterator.
641 if let Some((child_node, next_iter)) = stack.pop() {
641 if let Some((child_node, next_iter)) = stack.pop() {
642 // "Return" from pseudo-recursion by restoring state from the
642 // "Return" from pseudo-recursion by restoring state from the
643 // explicit stack
643 // explicit stack
644 iter = next_iter;
644 iter = next_iter;
645
645
646 Some(Ok(child_node))
646 Some(Ok(child_node))
647 } else {
647 } else {
648 // Reached the bottom of the stack, we’re done
648 // Reached the bottom of the stack, we’re done
649 None
649 None
650 }
650 }
651 })
651 })
652 }
652 }
653
653
654 fn clear_known_ambiguous_mtimes(
654 fn clear_known_ambiguous_mtimes(
655 &mut self,
655 &mut self,
656 paths: &[impl AsRef<HgPath>],
656 paths: &[impl AsRef<HgPath>],
657 ) -> Result<(), DirstateV2ParseError> {
657 ) -> Result<(), DirstateV2ParseError> {
658 for path in paths {
658 for path in paths {
659 if let Some(node) = Self::get_node_mut(
659 if let Some(node) = Self::get_node_mut(
660 self.on_disk,
660 self.on_disk,
661 &mut self.root,
661 &mut self.root,
662 path.as_ref(),
662 path.as_ref(),
663 )? {
663 )? {
664 if let NodeData::Entry(entry) = &mut node.data {
664 if let NodeData::Entry(entry) = &mut node.data {
665 entry.clear_mtime();
665 entry.clear_mtime();
666 }
666 }
667 }
667 }
668 }
668 }
669 Ok(())
669 Ok(())
670 }
670 }
671
671
672 /// Return a faillilble iterator of full paths of nodes that have an
672 /// Return a faillilble iterator of full paths of nodes that have an
673 /// `entry` for which the given `predicate` returns true.
673 /// `entry` for which the given `predicate` returns true.
674 ///
674 ///
675 /// Fallibility means that each iterator item is a `Result`, which may
675 /// Fallibility means that each iterator item is a `Result`, which may
676 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
676 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
677 /// should only happen if Mercurial is buggy or a repository is corrupted.
677 /// should only happen if Mercurial is buggy or a repository is corrupted.
678 fn filter_full_paths<'tree>(
678 fn filter_full_paths<'tree>(
679 &'tree self,
679 &'tree self,
680 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
680 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
681 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
681 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
682 {
682 {
683 filter_map_results(self.iter_nodes(), move |node| {
683 filter_map_results(self.iter_nodes(), move |node| {
684 if let Some(entry) = node.entry()? {
684 if let Some(entry) = node.entry()? {
685 if predicate(&entry) {
685 if predicate(&entry) {
686 return Ok(Some(node.full_path(self.on_disk)?));
686 return Ok(Some(node.full_path(self.on_disk)?));
687 }
687 }
688 }
688 }
689 Ok(None)
689 Ok(None)
690 })
690 })
691 }
691 }
692 }
692 }
693
693
694 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
694 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
695 ///
695 ///
696 /// The callback is only called for incoming `Ok` values. Errors are passed
696 /// The callback is only called for incoming `Ok` values. Errors are passed
697 /// through as-is. In order to let it use the `?` operator the callback is
697 /// through as-is. In order to let it use the `?` operator the callback is
698 /// expected to return a `Result` of `Option`, instead of an `Option` of
698 /// expected to return a `Result` of `Option`, instead of an `Option` of
699 /// `Result`.
699 /// `Result`.
700 fn filter_map_results<'a, I, F, A, B, E>(
700 fn filter_map_results<'a, I, F, A, B, E>(
701 iter: I,
701 iter: I,
702 f: F,
702 f: F,
703 ) -> impl Iterator<Item = Result<B, E>> + 'a
703 ) -> impl Iterator<Item = Result<B, E>> + 'a
704 where
704 where
705 I: Iterator<Item = Result<A, E>> + 'a,
705 I: Iterator<Item = Result<A, E>> + 'a,
706 F: Fn(A) -> Result<Option<B>, E> + 'a,
706 F: Fn(A) -> Result<Option<B>, E> + 'a,
707 {
707 {
708 iter.filter_map(move |result| match result {
708 iter.filter_map(move |result| match result {
709 Ok(node) => f(node).transpose(),
709 Ok(node) => f(node).transpose(),
710 Err(e) => Some(Err(e)),
710 Err(e) => Some(Err(e)),
711 })
711 })
712 }
712 }
713
713
714 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
714 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
715 fn clear(&mut self) {
715 fn clear(&mut self) {
716 self.root = Default::default();
716 self.root = Default::default();
717 self.nodes_with_entry_count = 0;
717 self.nodes_with_entry_count = 0;
718 self.nodes_with_copy_source_count = 0;
718 self.nodes_with_copy_source_count = 0;
719 }
719 }
720
720
721 fn add_file(
721 fn add_file(
722 &mut self,
722 &mut self,
723 filename: &HgPath,
723 filename: &HgPath,
724 entry: DirstateEntry,
724 entry: DirstateEntry,
725 added: bool,
725 added: bool,
726 merged: bool,
726 merged: bool,
727 from_p2: bool,
727 from_p2: bool,
728 possibly_dirty: bool,
728 possibly_dirty: bool,
729 ) -> Result<(), DirstateError> {
729 ) -> Result<(), DirstateError> {
730 let mut entry = entry;
730 let mut entry = entry;
731 if added {
731 if added {
732 assert!(!possibly_dirty);
732 assert!(!possibly_dirty);
733 assert!(!from_p2);
733 assert!(!from_p2);
734 entry.state = EntryState::Added;
734 entry.state = EntryState::Added;
735 entry.size = SIZE_NON_NORMAL;
735 entry.size = SIZE_NON_NORMAL;
736 entry.mtime = MTIME_UNSET;
736 entry.mtime = MTIME_UNSET;
737 } else if merged {
737 } else if merged {
738 assert!(!possibly_dirty);
738 assert!(!possibly_dirty);
739 assert!(!from_p2);
739 assert!(!from_p2);
740 entry.state = EntryState::Merged;
740 entry.state = EntryState::Merged;
741 entry.size = SIZE_FROM_OTHER_PARENT;
741 entry.size = SIZE_FROM_OTHER_PARENT;
742 entry.mtime = MTIME_UNSET;
742 entry.mtime = MTIME_UNSET;
743 } else if from_p2 {
743 } else if from_p2 {
744 assert!(!possibly_dirty);
744 assert!(!possibly_dirty);
745 entry.state = EntryState::Normal;
745 entry.state = EntryState::Normal;
746 entry.size = SIZE_FROM_OTHER_PARENT;
746 entry.size = SIZE_FROM_OTHER_PARENT;
747 entry.mtime = MTIME_UNSET;
747 entry.mtime = MTIME_UNSET;
748 } else if possibly_dirty {
748 } else if possibly_dirty {
749 entry.state = EntryState::Normal;
749 entry.state = EntryState::Normal;
750 entry.size = SIZE_NON_NORMAL;
750 entry.size = SIZE_NON_NORMAL;
751 entry.mtime = MTIME_UNSET;
751 entry.mtime = MTIME_UNSET;
752 } else {
752 } else {
753 entry.state = EntryState::Normal;
753 entry.state = EntryState::Normal;
754 entry.size = entry.size & V1_RANGEMASK;
754 entry.size = entry.size & V1_RANGEMASK;
755 entry.mtime = entry.mtime & V1_RANGEMASK;
755 entry.mtime = entry.mtime & V1_RANGEMASK;
756 }
756 }
757
757
758 let old_state = match self.get(filename)? {
758 let old_state = match self.get(filename)? {
759 Some(e) => e.state,
759 Some(e) => e.state,
760 None => EntryState::Unknown,
760 None => EntryState::Unknown,
761 };
761 };
762
762
763 Ok(self.add_or_remove_file(filename, old_state, entry)?)
763 Ok(self.add_or_remove_file(filename, old_state, entry)?)
764 }
764 }
765
765
766 fn remove_file(
766 fn remove_file(
767 &mut self,
767 &mut self,
768 filename: &HgPath,
768 filename: &HgPath,
769 in_merge: bool,
769 in_merge: bool,
770 ) -> Result<(), DirstateError> {
770 ) -> Result<(), DirstateError> {
771 let old_entry_opt = self.get(filename)?;
771 let old_entry_opt = self.get(filename)?;
772 let old_state = match old_entry_opt {
772 let old_state = match old_entry_opt {
773 Some(e) => e.state,
773 Some(e) => e.state,
774 None => EntryState::Unknown,
774 None => EntryState::Unknown,
775 };
775 };
776 let mut size = 0;
776 let mut size = 0;
777 if in_merge {
777 if in_merge {
778 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
778 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
779 // during a merge. So I (marmoute) am not sure we need the
779 // during a merge. So I (marmoute) am not sure we need the
780 // conditionnal at all. Adding double checking this with assert
780 // conditionnal at all. Adding double checking this with assert
781 // would be nice.
781 // would be nice.
782 if let Some(old_entry) = old_entry_opt {
782 if let Some(old_entry) = old_entry_opt {
783 // backup the previous state
783 // backup the previous state
784 if old_entry.state == EntryState::Merged {
784 if old_entry.state == EntryState::Merged {
785 size = SIZE_NON_NORMAL;
785 size = SIZE_NON_NORMAL;
786 } else if old_entry.state == EntryState::Normal
786 } else if old_entry.state == EntryState::Normal
787 && old_entry.size == SIZE_FROM_OTHER_PARENT
787 && old_entry.size == SIZE_FROM_OTHER_PARENT
788 {
788 {
789 // other parent
789 // other parent
790 size = SIZE_FROM_OTHER_PARENT;
790 size = SIZE_FROM_OTHER_PARENT;
791 }
791 }
792 }
792 }
793 }
793 }
794 if size == 0 {
794 if size == 0 {
795 self.copy_map_remove(filename)?;
795 self.copy_map_remove(filename)?;
796 }
796 }
797 let entry = DirstateEntry {
797 let entry = DirstateEntry {
798 state: EntryState::Removed,
798 state: EntryState::Removed,
799 mode: 0,
799 mode: 0,
800 size,
800 size,
801 mtime: 0,
801 mtime: 0,
802 };
802 };
803 Ok(self.add_or_remove_file(filename, old_state, entry)?)
803 Ok(self.add_or_remove_file(filename, old_state, entry)?)
804 }
804 }
805
805
806 fn drop_file(
806 fn drop_file(&mut self, filename: &HgPath) -> Result<bool, DirstateError> {
807 &mut self,
807 let old_state = match self.get(filename)? {
808 filename: &HgPath,
808 Some(e) => e.state,
809 old_state: EntryState,
809 None => EntryState::Unknown,
810 ) -> Result<bool, DirstateError> {
810 };
811 struct Dropped {
811 struct Dropped {
812 was_tracked: bool,
812 was_tracked: bool,
813 had_entry: bool,
813 had_entry: bool,
814 had_copy_source: bool,
814 had_copy_source: bool,
815 }
815 }
816
816
817 /// If this returns `Ok(Some((dropped, removed)))`, then
817 /// If this returns `Ok(Some((dropped, removed)))`, then
818 ///
818 ///
819 /// * `dropped` is about the leaf node that was at `filename`
819 /// * `dropped` is about the leaf node that was at `filename`
820 /// * `removed` is whether this particular level of recursion just
820 /// * `removed` is whether this particular level of recursion just
821 /// removed a node in `nodes`.
821 /// removed a node in `nodes`.
822 fn recur<'on_disk>(
822 fn recur<'on_disk>(
823 on_disk: &'on_disk [u8],
823 on_disk: &'on_disk [u8],
824 nodes: &mut ChildNodes<'on_disk>,
824 nodes: &mut ChildNodes<'on_disk>,
825 path: &HgPath,
825 path: &HgPath,
826 ) -> Result<Option<(Dropped, bool)>, DirstateV2ParseError> {
826 ) -> Result<Option<(Dropped, bool)>, DirstateV2ParseError> {
827 let (first_path_component, rest_of_path) =
827 let (first_path_component, rest_of_path) =
828 path.split_first_component();
828 path.split_first_component();
829 let node = if let Some(node) =
829 let node = if let Some(node) =
830 nodes.make_mut(on_disk)?.get_mut(first_path_component)
830 nodes.make_mut(on_disk)?.get_mut(first_path_component)
831 {
831 {
832 node
832 node
833 } else {
833 } else {
834 return Ok(None);
834 return Ok(None);
835 };
835 };
836 let dropped;
836 let dropped;
837 if let Some(rest) = rest_of_path {
837 if let Some(rest) = rest_of_path {
838 if let Some((d, removed)) =
838 if let Some((d, removed)) =
839 recur(on_disk, &mut node.children, rest)?
839 recur(on_disk, &mut node.children, rest)?
840 {
840 {
841 dropped = d;
841 dropped = d;
842 if dropped.had_entry {
842 if dropped.had_entry {
843 node.descendants_with_entry_count -= 1;
843 node.descendants_with_entry_count -= 1;
844 }
844 }
845 if dropped.was_tracked {
845 if dropped.was_tracked {
846 node.tracked_descendants_count -= 1;
846 node.tracked_descendants_count -= 1;
847 }
847 }
848
848
849 // Directory caches must be invalidated when removing a
849 // Directory caches must be invalidated when removing a
850 // child node
850 // child node
851 if removed {
851 if removed {
852 if let NodeData::CachedDirectory { .. } = &node.data {
852 if let NodeData::CachedDirectory { .. } = &node.data {
853 node.data = NodeData::None
853 node.data = NodeData::None
854 }
854 }
855 }
855 }
856 } else {
856 } else {
857 return Ok(None);
857 return Ok(None);
858 }
858 }
859 } else {
859 } else {
860 let had_entry = node.data.has_entry();
860 let had_entry = node.data.has_entry();
861 if had_entry {
861 if had_entry {
862 node.data = NodeData::None
862 node.data = NodeData::None
863 }
863 }
864 dropped = Dropped {
864 dropped = Dropped {
865 was_tracked: node
865 was_tracked: node
866 .data
866 .data
867 .as_entry()
867 .as_entry()
868 .map_or(false, |entry| entry.state.is_tracked()),
868 .map_or(false, |entry| entry.state.is_tracked()),
869 had_entry,
869 had_entry,
870 had_copy_source: node.copy_source.take().is_some(),
870 had_copy_source: node.copy_source.take().is_some(),
871 };
871 };
872 }
872 }
873 // After recursion, for both leaf (rest_of_path is None) nodes and
873 // After recursion, for both leaf (rest_of_path is None) nodes and
874 // parent nodes, remove a node if it just became empty.
874 // parent nodes, remove a node if it just became empty.
875 let remove = !node.data.has_entry()
875 let remove = !node.data.has_entry()
876 && node.copy_source.is_none()
876 && node.copy_source.is_none()
877 && node.children.is_empty();
877 && node.children.is_empty();
878 if remove {
878 if remove {
879 nodes.make_mut(on_disk)?.remove(first_path_component);
879 nodes.make_mut(on_disk)?.remove(first_path_component);
880 }
880 }
881 Ok(Some((dropped, remove)))
881 Ok(Some((dropped, remove)))
882 }
882 }
883
883
884 if let Some((dropped, _removed)) =
884 if let Some((dropped, _removed)) =
885 recur(self.on_disk, &mut self.root, filename)?
885 recur(self.on_disk, &mut self.root, filename)?
886 {
886 {
887 if dropped.had_entry {
887 if dropped.had_entry {
888 self.nodes_with_entry_count -= 1
888 self.nodes_with_entry_count -= 1
889 }
889 }
890 if dropped.had_copy_source {
890 if dropped.had_copy_source {
891 self.nodes_with_copy_source_count -= 1
891 self.nodes_with_copy_source_count -= 1
892 }
892 }
893 Ok(dropped.had_entry)
893 Ok(dropped.had_entry)
894 } else {
894 } else {
895 debug_assert!(!old_state.is_tracked());
895 debug_assert!(!old_state.is_tracked());
896 Ok(false)
896 Ok(false)
897 }
897 }
898 }
898 }
899
899
900 fn clear_ambiguous_times(
900 fn clear_ambiguous_times(
901 &mut self,
901 &mut self,
902 filenames: Vec<HgPathBuf>,
902 filenames: Vec<HgPathBuf>,
903 now: i32,
903 now: i32,
904 ) -> Result<(), DirstateV2ParseError> {
904 ) -> Result<(), DirstateV2ParseError> {
905 for filename in filenames {
905 for filename in filenames {
906 if let Some(node) =
906 if let Some(node) =
907 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
907 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
908 {
908 {
909 if let NodeData::Entry(entry) = &mut node.data {
909 if let NodeData::Entry(entry) = &mut node.data {
910 entry.clear_ambiguous_mtime(now);
910 entry.clear_ambiguous_mtime(now);
911 }
911 }
912 }
912 }
913 }
913 }
914 Ok(())
914 Ok(())
915 }
915 }
916
916
917 fn non_normal_entries_contains(
917 fn non_normal_entries_contains(
918 &mut self,
918 &mut self,
919 key: &HgPath,
919 key: &HgPath,
920 ) -> Result<bool, DirstateV2ParseError> {
920 ) -> Result<bool, DirstateV2ParseError> {
921 Ok(if let Some(node) = self.get_node(key)? {
921 Ok(if let Some(node) = self.get_node(key)? {
922 node.entry()?.map_or(false, |entry| entry.is_non_normal())
922 node.entry()?.map_or(false, |entry| entry.is_non_normal())
923 } else {
923 } else {
924 false
924 false
925 })
925 })
926 }
926 }
927
927
928 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
928 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
929 // Do nothing, this `DirstateMap` does not have a separate "non normal
929 // Do nothing, this `DirstateMap` does not have a separate "non normal
930 // entries" set that need to be kept up to date
930 // entries" set that need to be kept up to date
931 }
931 }
932
932
933 fn non_normal_or_other_parent_paths(
933 fn non_normal_or_other_parent_paths(
934 &mut self,
934 &mut self,
935 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
935 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
936 {
936 {
937 Box::new(self.filter_full_paths(|entry| {
937 Box::new(self.filter_full_paths(|entry| {
938 entry.is_non_normal() || entry.is_from_other_parent()
938 entry.is_non_normal() || entry.is_from_other_parent()
939 }))
939 }))
940 }
940 }
941
941
942 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
942 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
943 // Do nothing, this `DirstateMap` does not have a separate "non normal
943 // Do nothing, this `DirstateMap` does not have a separate "non normal
944 // entries" and "from other parent" sets that need to be recomputed
944 // entries" and "from other parent" sets that need to be recomputed
945 }
945 }
946
946
947 fn iter_non_normal_paths(
947 fn iter_non_normal_paths(
948 &mut self,
948 &mut self,
949 ) -> Box<
949 ) -> Box<
950 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
950 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
951 > {
951 > {
952 self.iter_non_normal_paths_panic()
952 self.iter_non_normal_paths_panic()
953 }
953 }
954
954
955 fn iter_non_normal_paths_panic(
955 fn iter_non_normal_paths_panic(
956 &self,
956 &self,
957 ) -> Box<
957 ) -> Box<
958 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
958 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
959 > {
959 > {
960 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
960 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
961 }
961 }
962
962
963 fn iter_other_parent_paths(
963 fn iter_other_parent_paths(
964 &mut self,
964 &mut self,
965 ) -> Box<
965 ) -> Box<
966 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
966 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
967 > {
967 > {
968 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
968 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
969 }
969 }
970
970
971 fn has_tracked_dir(
971 fn has_tracked_dir(
972 &mut self,
972 &mut self,
973 directory: &HgPath,
973 directory: &HgPath,
974 ) -> Result<bool, DirstateError> {
974 ) -> Result<bool, DirstateError> {
975 if let Some(node) = self.get_node(directory)? {
975 if let Some(node) = self.get_node(directory)? {
976 // A node without a `DirstateEntry` was created to hold child
976 // A node without a `DirstateEntry` was created to hold child
977 // nodes, and is therefore a directory.
977 // nodes, and is therefore a directory.
978 let state = node.state()?;
978 let state = node.state()?;
979 Ok(state.is_none() && node.tracked_descendants_count() > 0)
979 Ok(state.is_none() && node.tracked_descendants_count() > 0)
980 } else {
980 } else {
981 Ok(false)
981 Ok(false)
982 }
982 }
983 }
983 }
984
984
985 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
985 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
986 if let Some(node) = self.get_node(directory)? {
986 if let Some(node) = self.get_node(directory)? {
987 // A node without a `DirstateEntry` was created to hold child
987 // A node without a `DirstateEntry` was created to hold child
988 // nodes, and is therefore a directory.
988 // nodes, and is therefore a directory.
989 let state = node.state()?;
989 let state = node.state()?;
990 Ok(state.is_none() && node.descendants_with_entry_count() > 0)
990 Ok(state.is_none() && node.descendants_with_entry_count() > 0)
991 } else {
991 } else {
992 Ok(false)
992 Ok(false)
993 }
993 }
994 }
994 }
995
995
996 #[timed]
996 #[timed]
997 fn pack_v1(
997 fn pack_v1(
998 &mut self,
998 &mut self,
999 parents: DirstateParents,
999 parents: DirstateParents,
1000 now: Timestamp,
1000 now: Timestamp,
1001 ) -> Result<Vec<u8>, DirstateError> {
1001 ) -> Result<Vec<u8>, DirstateError> {
1002 let now: i32 = now.0.try_into().expect("time overflow");
1002 let now: i32 = now.0.try_into().expect("time overflow");
1003 let mut ambiguous_mtimes = Vec::new();
1003 let mut ambiguous_mtimes = Vec::new();
1004 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
1004 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
1005 // reallocations
1005 // reallocations
1006 let mut size = parents.as_bytes().len();
1006 let mut size = parents.as_bytes().len();
1007 for node in self.iter_nodes() {
1007 for node in self.iter_nodes() {
1008 let node = node?;
1008 let node = node?;
1009 if let Some(entry) = node.entry()? {
1009 if let Some(entry) = node.entry()? {
1010 size += packed_entry_size(
1010 size += packed_entry_size(
1011 node.full_path(self.on_disk)?,
1011 node.full_path(self.on_disk)?,
1012 node.copy_source(self.on_disk)?,
1012 node.copy_source(self.on_disk)?,
1013 );
1013 );
1014 if entry.mtime_is_ambiguous(now) {
1014 if entry.mtime_is_ambiguous(now) {
1015 ambiguous_mtimes.push(
1015 ambiguous_mtimes.push(
1016 node.full_path_borrowed(self.on_disk)?
1016 node.full_path_borrowed(self.on_disk)?
1017 .detach_from_tree(),
1017 .detach_from_tree(),
1018 )
1018 )
1019 }
1019 }
1020 }
1020 }
1021 }
1021 }
1022 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
1022 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
1023
1023
1024 let mut packed = Vec::with_capacity(size);
1024 let mut packed = Vec::with_capacity(size);
1025 packed.extend(parents.as_bytes());
1025 packed.extend(parents.as_bytes());
1026
1026
1027 for node in self.iter_nodes() {
1027 for node in self.iter_nodes() {
1028 let node = node?;
1028 let node = node?;
1029 if let Some(entry) = node.entry()? {
1029 if let Some(entry) = node.entry()? {
1030 pack_entry(
1030 pack_entry(
1031 node.full_path(self.on_disk)?,
1031 node.full_path(self.on_disk)?,
1032 &entry,
1032 &entry,
1033 node.copy_source(self.on_disk)?,
1033 node.copy_source(self.on_disk)?,
1034 &mut packed,
1034 &mut packed,
1035 );
1035 );
1036 }
1036 }
1037 }
1037 }
1038 Ok(packed)
1038 Ok(packed)
1039 }
1039 }
1040
1040
1041 #[timed]
1041 #[timed]
1042 fn pack_v2(
1042 fn pack_v2(
1043 &mut self,
1043 &mut self,
1044 parents: DirstateParents,
1044 parents: DirstateParents,
1045 now: Timestamp,
1045 now: Timestamp,
1046 ) -> Result<Vec<u8>, DirstateError> {
1046 ) -> Result<Vec<u8>, DirstateError> {
1047 // TODO:Β how do we want to handle this in 2038?
1047 // TODO:Β how do we want to handle this in 2038?
1048 let now: i32 = now.0.try_into().expect("time overflow");
1048 let now: i32 = now.0.try_into().expect("time overflow");
1049 let mut paths = Vec::new();
1049 let mut paths = Vec::new();
1050 for node in self.iter_nodes() {
1050 for node in self.iter_nodes() {
1051 let node = node?;
1051 let node = node?;
1052 if let Some(entry) = node.entry()? {
1052 if let Some(entry) = node.entry()? {
1053 if entry.mtime_is_ambiguous(now) {
1053 if entry.mtime_is_ambiguous(now) {
1054 paths.push(
1054 paths.push(
1055 node.full_path_borrowed(self.on_disk)?
1055 node.full_path_borrowed(self.on_disk)?
1056 .detach_from_tree(),
1056 .detach_from_tree(),
1057 )
1057 )
1058 }
1058 }
1059 }
1059 }
1060 }
1060 }
1061 // Borrow of `self` ends here since we collect cloned paths
1061 // Borrow of `self` ends here since we collect cloned paths
1062
1062
1063 self.clear_known_ambiguous_mtimes(&paths)?;
1063 self.clear_known_ambiguous_mtimes(&paths)?;
1064
1064
1065 on_disk::write(self, parents)
1065 on_disk::write(self, parents)
1066 }
1066 }
1067
1067
1068 fn status<'a>(
1068 fn status<'a>(
1069 &'a mut self,
1069 &'a mut self,
1070 matcher: &'a (dyn Matcher + Sync),
1070 matcher: &'a (dyn Matcher + Sync),
1071 root_dir: PathBuf,
1071 root_dir: PathBuf,
1072 ignore_files: Vec<PathBuf>,
1072 ignore_files: Vec<PathBuf>,
1073 options: StatusOptions,
1073 options: StatusOptions,
1074 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
1074 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
1075 {
1075 {
1076 super::status::status(self, matcher, root_dir, ignore_files, options)
1076 super::status::status(self, matcher, root_dir, ignore_files, options)
1077 }
1077 }
1078
1078
1079 fn copy_map_len(&self) -> usize {
1079 fn copy_map_len(&self) -> usize {
1080 self.nodes_with_copy_source_count as usize
1080 self.nodes_with_copy_source_count as usize
1081 }
1081 }
1082
1082
1083 fn copy_map_iter(&self) -> CopyMapIter<'_> {
1083 fn copy_map_iter(&self) -> CopyMapIter<'_> {
1084 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1084 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1085 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
1085 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
1086 Some((node.full_path(self.on_disk)?, source))
1086 Some((node.full_path(self.on_disk)?, source))
1087 } else {
1087 } else {
1088 None
1088 None
1089 })
1089 })
1090 }))
1090 }))
1091 }
1091 }
1092
1092
1093 fn copy_map_contains_key(
1093 fn copy_map_contains_key(
1094 &self,
1094 &self,
1095 key: &HgPath,
1095 key: &HgPath,
1096 ) -> Result<bool, DirstateV2ParseError> {
1096 ) -> Result<bool, DirstateV2ParseError> {
1097 Ok(if let Some(node) = self.get_node(key)? {
1097 Ok(if let Some(node) = self.get_node(key)? {
1098 node.has_copy_source()
1098 node.has_copy_source()
1099 } else {
1099 } else {
1100 false
1100 false
1101 })
1101 })
1102 }
1102 }
1103
1103
1104 fn copy_map_get(
1104 fn copy_map_get(
1105 &self,
1105 &self,
1106 key: &HgPath,
1106 key: &HgPath,
1107 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
1107 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
1108 if let Some(node) = self.get_node(key)? {
1108 if let Some(node) = self.get_node(key)? {
1109 if let Some(source) = node.copy_source(self.on_disk)? {
1109 if let Some(source) = node.copy_source(self.on_disk)? {
1110 return Ok(Some(source));
1110 return Ok(Some(source));
1111 }
1111 }
1112 }
1112 }
1113 Ok(None)
1113 Ok(None)
1114 }
1114 }
1115
1115
1116 fn copy_map_remove(
1116 fn copy_map_remove(
1117 &mut self,
1117 &mut self,
1118 key: &HgPath,
1118 key: &HgPath,
1119 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1119 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1120 let count = &mut self.nodes_with_copy_source_count;
1120 let count = &mut self.nodes_with_copy_source_count;
1121 Ok(
1121 Ok(
1122 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1122 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1123 |node| {
1123 |node| {
1124 if node.copy_source.is_some() {
1124 if node.copy_source.is_some() {
1125 *count -= 1
1125 *count -= 1
1126 }
1126 }
1127 node.copy_source.take().map(Cow::into_owned)
1127 node.copy_source.take().map(Cow::into_owned)
1128 },
1128 },
1129 ),
1129 ),
1130 )
1130 )
1131 }
1131 }
1132
1132
1133 fn copy_map_insert(
1133 fn copy_map_insert(
1134 &mut self,
1134 &mut self,
1135 key: HgPathBuf,
1135 key: HgPathBuf,
1136 value: HgPathBuf,
1136 value: HgPathBuf,
1137 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1137 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1138 let node = Self::get_or_insert_node(
1138 let node = Self::get_or_insert_node(
1139 self.on_disk,
1139 self.on_disk,
1140 &mut self.root,
1140 &mut self.root,
1141 &key,
1141 &key,
1142 WithBasename::to_cow_owned,
1142 WithBasename::to_cow_owned,
1143 |_ancestor| {},
1143 |_ancestor| {},
1144 )?;
1144 )?;
1145 if node.copy_source.is_none() {
1145 if node.copy_source.is_none() {
1146 self.nodes_with_copy_source_count += 1
1146 self.nodes_with_copy_source_count += 1
1147 }
1147 }
1148 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1148 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1149 }
1149 }
1150
1150
1151 fn len(&self) -> usize {
1151 fn len(&self) -> usize {
1152 self.nodes_with_entry_count as usize
1152 self.nodes_with_entry_count as usize
1153 }
1153 }
1154
1154
1155 fn contains_key(
1155 fn contains_key(
1156 &self,
1156 &self,
1157 key: &HgPath,
1157 key: &HgPath,
1158 ) -> Result<bool, DirstateV2ParseError> {
1158 ) -> Result<bool, DirstateV2ParseError> {
1159 Ok(self.get(key)?.is_some())
1159 Ok(self.get(key)?.is_some())
1160 }
1160 }
1161
1161
1162 fn get(
1162 fn get(
1163 &self,
1163 &self,
1164 key: &HgPath,
1164 key: &HgPath,
1165 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1165 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1166 Ok(if let Some(node) = self.get_node(key)? {
1166 Ok(if let Some(node) = self.get_node(key)? {
1167 node.entry()?
1167 node.entry()?
1168 } else {
1168 } else {
1169 None
1169 None
1170 })
1170 })
1171 }
1171 }
1172
1172
1173 fn iter(&self) -> StateMapIter<'_> {
1173 fn iter(&self) -> StateMapIter<'_> {
1174 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1174 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1175 Ok(if let Some(entry) = node.entry()? {
1175 Ok(if let Some(entry) = node.entry()? {
1176 Some((node.full_path(self.on_disk)?, entry))
1176 Some((node.full_path(self.on_disk)?, entry))
1177 } else {
1177 } else {
1178 None
1178 None
1179 })
1179 })
1180 }))
1180 }))
1181 }
1181 }
1182
1182
1183 fn iter_directories(
1183 fn iter_directories(
1184 &self,
1184 &self,
1185 ) -> Box<
1185 ) -> Box<
1186 dyn Iterator<
1186 dyn Iterator<
1187 Item = Result<
1187 Item = Result<
1188 (&HgPath, Option<Timestamp>),
1188 (&HgPath, Option<Timestamp>),
1189 DirstateV2ParseError,
1189 DirstateV2ParseError,
1190 >,
1190 >,
1191 > + Send
1191 > + Send
1192 + '_,
1192 + '_,
1193 > {
1193 > {
1194 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1194 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1195 Ok(if node.state()?.is_none() {
1195 Ok(if node.state()?.is_none() {
1196 Some((
1196 Some((
1197 node.full_path(self.on_disk)?,
1197 node.full_path(self.on_disk)?,
1198 node.cached_directory_mtime()
1198 node.cached_directory_mtime()
1199 .map(|mtime| Timestamp(mtime.seconds())),
1199 .map(|mtime| Timestamp(mtime.seconds())),
1200 ))
1200 ))
1201 } else {
1201 } else {
1202 None
1202 None
1203 })
1203 })
1204 }))
1204 }))
1205 }
1205 }
1206 }
1206 }
@@ -1,496 +1,487 b''
1 use std::path::PathBuf;
1 use std::path::PathBuf;
2
2
3 use crate::dirstate::parsers::Timestamp;
3 use crate::dirstate::parsers::Timestamp;
4 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
4 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
5 use crate::matchers::Matcher;
5 use crate::matchers::Matcher;
6 use crate::utils::hg_path::{HgPath, HgPathBuf};
6 use crate::utils::hg_path::{HgPath, HgPathBuf};
7 use crate::CopyMapIter;
7 use crate::CopyMapIter;
8 use crate::DirstateEntry;
8 use crate::DirstateEntry;
9 use crate::DirstateError;
9 use crate::DirstateError;
10 use crate::DirstateMap;
10 use crate::DirstateMap;
11 use crate::DirstateParents;
11 use crate::DirstateParents;
12 use crate::DirstateStatus;
12 use crate::DirstateStatus;
13 use crate::EntryState;
14 use crate::PatternFileWarning;
13 use crate::PatternFileWarning;
15 use crate::StateMapIter;
14 use crate::StateMapIter;
16 use crate::StatusError;
15 use crate::StatusError;
17 use crate::StatusOptions;
16 use crate::StatusOptions;
18
17
19 /// `rust/hg-cpython/src/dirstate/dirstate_map.rs` implements in Rust a
18 /// `rust/hg-cpython/src/dirstate/dirstate_map.rs` implements in Rust a
20 /// `DirstateMap` Python class that wraps `Box<dyn DirstateMapMethods + Send>`,
19 /// `DirstateMap` Python class that wraps `Box<dyn DirstateMapMethods + Send>`,
21 /// a trait object of this trait. Except for constructors, this trait defines
20 /// a trait object of this trait. Except for constructors, this trait defines
22 /// all APIs that the class needs to interact with its inner dirstate map.
21 /// all APIs that the class needs to interact with its inner dirstate map.
23 ///
22 ///
24 /// A trait object is used to support two different concrete types:
23 /// A trait object is used to support two different concrete types:
25 ///
24 ///
26 /// * `rust/hg-core/src/dirstate/dirstate_map.rs` defines the "flat dirstate
25 /// * `rust/hg-core/src/dirstate/dirstate_map.rs` defines the "flat dirstate
27 /// map" which is based on a few large `HgPath`-keyed `HashMap` and `HashSet`
26 /// map" which is based on a few large `HgPath`-keyed `HashMap` and `HashSet`
28 /// fields.
27 /// fields.
29 /// * `rust/hg-core/src/dirstate_tree/dirstate_map.rs` defines the "tree
28 /// * `rust/hg-core/src/dirstate_tree/dirstate_map.rs` defines the "tree
30 /// dirstate map" based on a tree data struture with nodes for directories
29 /// dirstate map" based on a tree data struture with nodes for directories
31 /// containing child nodes for their files and sub-directories. This tree
30 /// containing child nodes for their files and sub-directories. This tree
32 /// enables a more efficient algorithm for `hg status`, but its details are
31 /// enables a more efficient algorithm for `hg status`, but its details are
33 /// abstracted in this trait.
32 /// abstracted in this trait.
34 ///
33 ///
35 /// The dirstate map associates paths of files in the working directory to
34 /// The dirstate map associates paths of files in the working directory to
36 /// various information about the state of those files.
35 /// various information about the state of those files.
37 pub trait DirstateMapMethods {
36 pub trait DirstateMapMethods {
38 /// Remove information about all files in this map
37 /// Remove information about all files in this map
39 fn clear(&mut self);
38 fn clear(&mut self);
40
39
41 /// Add or change the information associated to a given file.
40 /// Add or change the information associated to a given file.
42 ///
41 ///
43 /// `old_state` is the state in the entry that `get` would have returned
42 /// `old_state` is the state in the entry that `get` would have returned
44 /// before this call, or `EntryState::Unknown` if there was no such entry.
43 /// before this call, or `EntryState::Unknown` if there was no such entry.
45 ///
44 ///
46 /// `entry.state` should never be `EntryState::Unknown`.
45 /// `entry.state` should never be `EntryState::Unknown`.
47 fn add_file(
46 fn add_file(
48 &mut self,
47 &mut self,
49 filename: &HgPath,
48 filename: &HgPath,
50 entry: DirstateEntry,
49 entry: DirstateEntry,
51 added: bool,
50 added: bool,
52 merged: bool,
51 merged: bool,
53 from_p2: bool,
52 from_p2: bool,
54 possibly_dirty: bool,
53 possibly_dirty: bool,
55 ) -> Result<(), DirstateError>;
54 ) -> Result<(), DirstateError>;
56
55
57 /// Mark a file as "removed" (as in `hg rm`).
56 /// Mark a file as "removed" (as in `hg rm`).
58 ///
57 ///
59 /// `old_state` is the state in the entry that `get` would have returned
58 /// `old_state` is the state in the entry that `get` would have returned
60 /// before this call, or `EntryState::Unknown` if there was no such entry.
59 /// before this call, or `EntryState::Unknown` if there was no such entry.
61 ///
60 ///
62 /// `size` is not actually a size but the 0 or -1 or -2 value that would be
61 /// `size` is not actually a size but the 0 or -1 or -2 value that would be
63 /// put in the size field in the dirstate-v1Β format.
62 /// put in the size field in the dirstate-v1Β format.
64 fn remove_file(
63 fn remove_file(
65 &mut self,
64 &mut self,
66 filename: &HgPath,
65 filename: &HgPath,
67 in_merge: bool,
66 in_merge: bool,
68 ) -> Result<(), DirstateError>;
67 ) -> Result<(), DirstateError>;
69
68
70 /// Drop information about this file from the map if any, and return
69 /// Drop information about this file from the map if any, and return
71 /// whether there was any.
70 /// whether there was any.
72 ///
71 ///
73 /// `get` will now return `None` for this filename.
72 /// `get` will now return `None` for this filename.
74 ///
73 ///
75 /// `old_state` is the state in the entry that `get` would have returned
74 /// `old_state` is the state in the entry that `get` would have returned
76 /// before this call, or `EntryState::Unknown` if there was no such entry.
75 /// before this call, or `EntryState::Unknown` if there was no such entry.
77 fn drop_file(
76 fn drop_file(&mut self, filename: &HgPath) -> Result<bool, DirstateError>;
78 &mut self,
79 filename: &HgPath,
80 old_state: EntryState,
81 ) -> Result<bool, DirstateError>;
82
77
83 /// Among given files, mark the stored `mtime` as ambiguous if there is one
78 /// Among given files, mark the stored `mtime` as ambiguous if there is one
84 /// (if `state == EntryState::Normal`) equal to the given current Unix
79 /// (if `state == EntryState::Normal`) equal to the given current Unix
85 /// timestamp.
80 /// timestamp.
86 fn clear_ambiguous_times(
81 fn clear_ambiguous_times(
87 &mut self,
82 &mut self,
88 filenames: Vec<HgPathBuf>,
83 filenames: Vec<HgPathBuf>,
89 now: i32,
84 now: i32,
90 ) -> Result<(), DirstateV2ParseError>;
85 ) -> Result<(), DirstateV2ParseError>;
91
86
92 /// Return whether the map has an "non-normal" entry for the given
87 /// Return whether the map has an "non-normal" entry for the given
93 /// filename. That is, any entry with a `state` other than
88 /// filename. That is, any entry with a `state` other than
94 /// `EntryState::Normal` or with an ambiguous `mtime`.
89 /// `EntryState::Normal` or with an ambiguous `mtime`.
95 fn non_normal_entries_contains(
90 fn non_normal_entries_contains(
96 &mut self,
91 &mut self,
97 key: &HgPath,
92 key: &HgPath,
98 ) -> Result<bool, DirstateV2ParseError>;
93 ) -> Result<bool, DirstateV2ParseError>;
99
94
100 /// Mark the given path as "normal" file. This is only relevant in the flat
95 /// Mark the given path as "normal" file. This is only relevant in the flat
101 /// dirstate map where there is a separate `HashSet` that needs to be kept
96 /// dirstate map where there is a separate `HashSet` that needs to be kept
102 /// up to date.
97 /// up to date.
103 fn non_normal_entries_remove(&mut self, key: &HgPath);
98 fn non_normal_entries_remove(&mut self, key: &HgPath);
104
99
105 /// Return an iterator of paths whose respective entry are either
100 /// Return an iterator of paths whose respective entry are either
106 /// "non-normal" (see `non_normal_entries_contains`) or "from other
101 /// "non-normal" (see `non_normal_entries_contains`) or "from other
107 /// parent".
102 /// parent".
108 ///
103 ///
109 /// If that information is cached, create the cache as needed.
104 /// If that information is cached, create the cache as needed.
110 ///
105 ///
111 /// "From other parent" is defined as `state == Normal && size == -2`.
106 /// "From other parent" is defined as `state == Normal && size == -2`.
112 ///
107 ///
113 /// Because parse errors can happen during iteration, the iterated items
108 /// Because parse errors can happen during iteration, the iterated items
114 /// are `Result`s.
109 /// are `Result`s.
115 fn non_normal_or_other_parent_paths(
110 fn non_normal_or_other_parent_paths(
116 &mut self,
111 &mut self,
117 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>;
112 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>;
118
113
119 /// Create the cache for `non_normal_or_other_parent_paths` if needed.
114 /// Create the cache for `non_normal_or_other_parent_paths` if needed.
120 ///
115 ///
121 /// If `force` is true, the cache is re-created even if it already exists.
116 /// If `force` is true, the cache is re-created even if it already exists.
122 fn set_non_normal_other_parent_entries(&mut self, force: bool);
117 fn set_non_normal_other_parent_entries(&mut self, force: bool);
123
118
124 /// Return an iterator of paths whose respective entry are "non-normal"
119 /// Return an iterator of paths whose respective entry are "non-normal"
125 /// (see `non_normal_entries_contains`).
120 /// (see `non_normal_entries_contains`).
126 ///
121 ///
127 /// If that information is cached, create the cache as needed.
122 /// If that information is cached, create the cache as needed.
128 ///
123 ///
129 /// Because parse errors can happen during iteration, the iterated items
124 /// Because parse errors can happen during iteration, the iterated items
130 /// are `Result`s.
125 /// are `Result`s.
131 fn iter_non_normal_paths(
126 fn iter_non_normal_paths(
132 &mut self,
127 &mut self,
133 ) -> Box<
128 ) -> Box<
134 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
129 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
135 >;
130 >;
136
131
137 /// Same as `iter_non_normal_paths`, but takes `&self` instead of `&mut
132 /// Same as `iter_non_normal_paths`, but takes `&self` instead of `&mut
138 /// self`.
133 /// self`.
139 ///
134 ///
140 /// Panics if a cache is necessary but does not exist yet.
135 /// Panics if a cache is necessary but does not exist yet.
141 fn iter_non_normal_paths_panic(
136 fn iter_non_normal_paths_panic(
142 &self,
137 &self,
143 ) -> Box<
138 ) -> Box<
144 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
139 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
145 >;
140 >;
146
141
147 /// Return an iterator of paths whose respective entry are "from other
142 /// Return an iterator of paths whose respective entry are "from other
148 /// parent".
143 /// parent".
149 ///
144 ///
150 /// If that information is cached, create the cache as needed.
145 /// If that information is cached, create the cache as needed.
151 ///
146 ///
152 /// "From other parent" is defined as `state == Normal && size == -2`.
147 /// "From other parent" is defined as `state == Normal && size == -2`.
153 ///
148 ///
154 /// Because parse errors can happen during iteration, the iterated items
149 /// Because parse errors can happen during iteration, the iterated items
155 /// are `Result`s.
150 /// are `Result`s.
156 fn iter_other_parent_paths(
151 fn iter_other_parent_paths(
157 &mut self,
152 &mut self,
158 ) -> Box<
153 ) -> Box<
159 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
154 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
160 >;
155 >;
161
156
162 /// Returns whether the sub-tree rooted at the given directory contains any
157 /// Returns whether the sub-tree rooted at the given directory contains any
163 /// tracked file.
158 /// tracked file.
164 ///
159 ///
165 /// A file is tracked if it has a `state` other than `EntryState::Removed`.
160 /// A file is tracked if it has a `state` other than `EntryState::Removed`.
166 fn has_tracked_dir(
161 fn has_tracked_dir(
167 &mut self,
162 &mut self,
168 directory: &HgPath,
163 directory: &HgPath,
169 ) -> Result<bool, DirstateError>;
164 ) -> Result<bool, DirstateError>;
170
165
171 /// Returns whether the sub-tree rooted at the given directory contains any
166 /// Returns whether the sub-tree rooted at the given directory contains any
172 /// file with a dirstate entry.
167 /// file with a dirstate entry.
173 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError>;
168 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError>;
174
169
175 /// Clear mtimes that are ambigous with `now` (similar to
170 /// Clear mtimes that are ambigous with `now` (similar to
176 /// `clear_ambiguous_times` but for all files in the dirstate map), and
171 /// `clear_ambiguous_times` but for all files in the dirstate map), and
177 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v1
172 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v1
178 /// format.
173 /// format.
179 fn pack_v1(
174 fn pack_v1(
180 &mut self,
175 &mut self,
181 parents: DirstateParents,
176 parents: DirstateParents,
182 now: Timestamp,
177 now: Timestamp,
183 ) -> Result<Vec<u8>, DirstateError>;
178 ) -> Result<Vec<u8>, DirstateError>;
184
179
185 /// Clear mtimes that are ambigous with `now` (similar to
180 /// Clear mtimes that are ambigous with `now` (similar to
186 /// `clear_ambiguous_times` but for all files in the dirstate map), and
181 /// `clear_ambiguous_times` but for all files in the dirstate map), and
187 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v2
182 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v2
188 /// format.
183 /// format.
189 ///
184 ///
190 /// Note: this is only supported by the tree dirstate map.
185 /// Note: this is only supported by the tree dirstate map.
191 fn pack_v2(
186 fn pack_v2(
192 &mut self,
187 &mut self,
193 parents: DirstateParents,
188 parents: DirstateParents,
194 now: Timestamp,
189 now: Timestamp,
195 ) -> Result<Vec<u8>, DirstateError>;
190 ) -> Result<Vec<u8>, DirstateError>;
196
191
197 /// Run the status algorithm.
192 /// Run the status algorithm.
198 ///
193 ///
199 /// This is not sematically a method of the dirstate map, but a different
194 /// This is not sematically a method of the dirstate map, but a different
200 /// algorithm is used for the flat v.s. tree dirstate map so having it in
195 /// algorithm is used for the flat v.s. tree dirstate map so having it in
201 /// this trait enables the same dynamic dispatch as with other methods.
196 /// this trait enables the same dynamic dispatch as with other methods.
202 fn status<'a>(
197 fn status<'a>(
203 &'a mut self,
198 &'a mut self,
204 matcher: &'a (dyn Matcher + Sync),
199 matcher: &'a (dyn Matcher + Sync),
205 root_dir: PathBuf,
200 root_dir: PathBuf,
206 ignore_files: Vec<PathBuf>,
201 ignore_files: Vec<PathBuf>,
207 options: StatusOptions,
202 options: StatusOptions,
208 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
203 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
209
204
210 /// Returns how many files in the dirstate map have a recorded copy source.
205 /// Returns how many files in the dirstate map have a recorded copy source.
211 fn copy_map_len(&self) -> usize;
206 fn copy_map_len(&self) -> usize;
212
207
213 /// Returns an iterator of `(path, copy_source)` for all files that have a
208 /// Returns an iterator of `(path, copy_source)` for all files that have a
214 /// copy source.
209 /// copy source.
215 fn copy_map_iter(&self) -> CopyMapIter<'_>;
210 fn copy_map_iter(&self) -> CopyMapIter<'_>;
216
211
217 /// Returns whether the givef file has a copy source.
212 /// Returns whether the givef file has a copy source.
218 fn copy_map_contains_key(
213 fn copy_map_contains_key(
219 &self,
214 &self,
220 key: &HgPath,
215 key: &HgPath,
221 ) -> Result<bool, DirstateV2ParseError>;
216 ) -> Result<bool, DirstateV2ParseError>;
222
217
223 /// Returns the copy source for the given file.
218 /// Returns the copy source for the given file.
224 fn copy_map_get(
219 fn copy_map_get(
225 &self,
220 &self,
226 key: &HgPath,
221 key: &HgPath,
227 ) -> Result<Option<&HgPath>, DirstateV2ParseError>;
222 ) -> Result<Option<&HgPath>, DirstateV2ParseError>;
228
223
229 /// Removes the recorded copy source if any for the given file, and returns
224 /// Removes the recorded copy source if any for the given file, and returns
230 /// it.
225 /// it.
231 fn copy_map_remove(
226 fn copy_map_remove(
232 &mut self,
227 &mut self,
233 key: &HgPath,
228 key: &HgPath,
234 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
229 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
235
230
236 /// Set the given `value` copy source for the given `key` file.
231 /// Set the given `value` copy source for the given `key` file.
237 fn copy_map_insert(
232 fn copy_map_insert(
238 &mut self,
233 &mut self,
239 key: HgPathBuf,
234 key: HgPathBuf,
240 value: HgPathBuf,
235 value: HgPathBuf,
241 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
236 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
242
237
243 /// Returns the number of files that have an entry.
238 /// Returns the number of files that have an entry.
244 fn len(&self) -> usize;
239 fn len(&self) -> usize;
245
240
246 /// Returns whether the given file has an entry.
241 /// Returns whether the given file has an entry.
247 fn contains_key(&self, key: &HgPath)
242 fn contains_key(&self, key: &HgPath)
248 -> Result<bool, DirstateV2ParseError>;
243 -> Result<bool, DirstateV2ParseError>;
249
244
250 /// Returns the entry, if any, for the given file.
245 /// Returns the entry, if any, for the given file.
251 fn get(
246 fn get(
252 &self,
247 &self,
253 key: &HgPath,
248 key: &HgPath,
254 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError>;
249 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError>;
255
250
256 /// Returns a `(path, entry)` iterator of files that have an entry.
251 /// Returns a `(path, entry)` iterator of files that have an entry.
257 ///
252 ///
258 /// Because parse errors can happen during iteration, the iterated items
253 /// Because parse errors can happen during iteration, the iterated items
259 /// are `Result`s.
254 /// are `Result`s.
260 fn iter(&self) -> StateMapIter<'_>;
255 fn iter(&self) -> StateMapIter<'_>;
261
256
262 /// In the tree dirstate, return an iterator of "directory" (entry-less)
257 /// In the tree dirstate, return an iterator of "directory" (entry-less)
263 /// nodes with the data stored for them. This is for `hg debugdirstate
258 /// nodes with the data stored for them. This is for `hg debugdirstate
264 /// --dirs`.
259 /// --dirs`.
265 ///
260 ///
266 /// In the flat dirstate, returns an empty iterator.
261 /// In the flat dirstate, returns an empty iterator.
267 ///
262 ///
268 /// Because parse errors can happen during iteration, the iterated items
263 /// Because parse errors can happen during iteration, the iterated items
269 /// are `Result`s.
264 /// are `Result`s.
270 fn iter_directories(
265 fn iter_directories(
271 &self,
266 &self,
272 ) -> Box<
267 ) -> Box<
273 dyn Iterator<
268 dyn Iterator<
274 Item = Result<
269 Item = Result<
275 (&HgPath, Option<Timestamp>),
270 (&HgPath, Option<Timestamp>),
276 DirstateV2ParseError,
271 DirstateV2ParseError,
277 >,
272 >,
278 > + Send
273 > + Send
279 + '_,
274 + '_,
280 >;
275 >;
281 }
276 }
282
277
283 impl DirstateMapMethods for DirstateMap {
278 impl DirstateMapMethods for DirstateMap {
284 fn clear(&mut self) {
279 fn clear(&mut self) {
285 self.clear()
280 self.clear()
286 }
281 }
287
282
288 fn add_file(
283 fn add_file(
289 &mut self,
284 &mut self,
290 filename: &HgPath,
285 filename: &HgPath,
291 entry: DirstateEntry,
286 entry: DirstateEntry,
292 added: bool,
287 added: bool,
293 merged: bool,
288 merged: bool,
294 from_p2: bool,
289 from_p2: bool,
295 possibly_dirty: bool,
290 possibly_dirty: bool,
296 ) -> Result<(), DirstateError> {
291 ) -> Result<(), DirstateError> {
297 self.add_file(filename, entry, added, merged, from_p2, possibly_dirty)
292 self.add_file(filename, entry, added, merged, from_p2, possibly_dirty)
298 }
293 }
299
294
300 fn remove_file(
295 fn remove_file(
301 &mut self,
296 &mut self,
302 filename: &HgPath,
297 filename: &HgPath,
303 in_merge: bool,
298 in_merge: bool,
304 ) -> Result<(), DirstateError> {
299 ) -> Result<(), DirstateError> {
305 self.remove_file(filename, in_merge)
300 self.remove_file(filename, in_merge)
306 }
301 }
307
302
308 fn drop_file(
303 fn drop_file(&mut self, filename: &HgPath) -> Result<bool, DirstateError> {
309 &mut self,
304 self.drop_file(filename)
310 filename: &HgPath,
311 old_state: EntryState,
312 ) -> Result<bool, DirstateError> {
313 self.drop_file(filename, old_state)
314 }
305 }
315
306
316 fn clear_ambiguous_times(
307 fn clear_ambiguous_times(
317 &mut self,
308 &mut self,
318 filenames: Vec<HgPathBuf>,
309 filenames: Vec<HgPathBuf>,
319 now: i32,
310 now: i32,
320 ) -> Result<(), DirstateV2ParseError> {
311 ) -> Result<(), DirstateV2ParseError> {
321 Ok(self.clear_ambiguous_times(filenames, now))
312 Ok(self.clear_ambiguous_times(filenames, now))
322 }
313 }
323
314
324 fn non_normal_entries_contains(
315 fn non_normal_entries_contains(
325 &mut self,
316 &mut self,
326 key: &HgPath,
317 key: &HgPath,
327 ) -> Result<bool, DirstateV2ParseError> {
318 ) -> Result<bool, DirstateV2ParseError> {
328 let (non_normal, _other_parent) =
319 let (non_normal, _other_parent) =
329 self.get_non_normal_other_parent_entries();
320 self.get_non_normal_other_parent_entries();
330 Ok(non_normal.contains(key))
321 Ok(non_normal.contains(key))
331 }
322 }
332
323
333 fn non_normal_entries_remove(&mut self, key: &HgPath) {
324 fn non_normal_entries_remove(&mut self, key: &HgPath) {
334 self.non_normal_entries_remove(key)
325 self.non_normal_entries_remove(key)
335 }
326 }
336
327
337 fn non_normal_or_other_parent_paths(
328 fn non_normal_or_other_parent_paths(
338 &mut self,
329 &mut self,
339 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
330 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
340 {
331 {
341 let (non_normal, other_parent) =
332 let (non_normal, other_parent) =
342 self.get_non_normal_other_parent_entries();
333 self.get_non_normal_other_parent_entries();
343 Box::new(non_normal.union(other_parent).map(|p| Ok(&**p)))
334 Box::new(non_normal.union(other_parent).map(|p| Ok(&**p)))
344 }
335 }
345
336
346 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
337 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
347 self.set_non_normal_other_parent_entries(force)
338 self.set_non_normal_other_parent_entries(force)
348 }
339 }
349
340
350 fn iter_non_normal_paths(
341 fn iter_non_normal_paths(
351 &mut self,
342 &mut self,
352 ) -> Box<
343 ) -> Box<
353 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
344 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
354 > {
345 > {
355 let (non_normal, _other_parent) =
346 let (non_normal, _other_parent) =
356 self.get_non_normal_other_parent_entries();
347 self.get_non_normal_other_parent_entries();
357 Box::new(non_normal.iter().map(|p| Ok(&**p)))
348 Box::new(non_normal.iter().map(|p| Ok(&**p)))
358 }
349 }
359
350
360 fn iter_non_normal_paths_panic(
351 fn iter_non_normal_paths_panic(
361 &self,
352 &self,
362 ) -> Box<
353 ) -> Box<
363 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
354 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
364 > {
355 > {
365 let (non_normal, _other_parent) =
356 let (non_normal, _other_parent) =
366 self.get_non_normal_other_parent_entries_panic();
357 self.get_non_normal_other_parent_entries_panic();
367 Box::new(non_normal.iter().map(|p| Ok(&**p)))
358 Box::new(non_normal.iter().map(|p| Ok(&**p)))
368 }
359 }
369
360
370 fn iter_other_parent_paths(
361 fn iter_other_parent_paths(
371 &mut self,
362 &mut self,
372 ) -> Box<
363 ) -> Box<
373 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
364 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
374 > {
365 > {
375 let (_non_normal, other_parent) =
366 let (_non_normal, other_parent) =
376 self.get_non_normal_other_parent_entries();
367 self.get_non_normal_other_parent_entries();
377 Box::new(other_parent.iter().map(|p| Ok(&**p)))
368 Box::new(other_parent.iter().map(|p| Ok(&**p)))
378 }
369 }
379
370
380 fn has_tracked_dir(
371 fn has_tracked_dir(
381 &mut self,
372 &mut self,
382 directory: &HgPath,
373 directory: &HgPath,
383 ) -> Result<bool, DirstateError> {
374 ) -> Result<bool, DirstateError> {
384 self.has_tracked_dir(directory)
375 self.has_tracked_dir(directory)
385 }
376 }
386
377
387 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
378 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
388 self.has_dir(directory)
379 self.has_dir(directory)
389 }
380 }
390
381
391 fn pack_v1(
382 fn pack_v1(
392 &mut self,
383 &mut self,
393 parents: DirstateParents,
384 parents: DirstateParents,
394 now: Timestamp,
385 now: Timestamp,
395 ) -> Result<Vec<u8>, DirstateError> {
386 ) -> Result<Vec<u8>, DirstateError> {
396 self.pack(parents, now)
387 self.pack(parents, now)
397 }
388 }
398
389
399 fn pack_v2(
390 fn pack_v2(
400 &mut self,
391 &mut self,
401 _parents: DirstateParents,
392 _parents: DirstateParents,
402 _now: Timestamp,
393 _now: Timestamp,
403 ) -> Result<Vec<u8>, DirstateError> {
394 ) -> Result<Vec<u8>, DirstateError> {
404 panic!(
395 panic!(
405 "should have used dirstate_tree::DirstateMap to use the v2 format"
396 "should have used dirstate_tree::DirstateMap to use the v2 format"
406 )
397 )
407 }
398 }
408
399
409 fn status<'a>(
400 fn status<'a>(
410 &'a mut self,
401 &'a mut self,
411 matcher: &'a (dyn Matcher + Sync),
402 matcher: &'a (dyn Matcher + Sync),
412 root_dir: PathBuf,
403 root_dir: PathBuf,
413 ignore_files: Vec<PathBuf>,
404 ignore_files: Vec<PathBuf>,
414 options: StatusOptions,
405 options: StatusOptions,
415 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
406 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
416 {
407 {
417 crate::status(self, matcher, root_dir, ignore_files, options)
408 crate::status(self, matcher, root_dir, ignore_files, options)
418 }
409 }
419
410
420 fn copy_map_len(&self) -> usize {
411 fn copy_map_len(&self) -> usize {
421 self.copy_map.len()
412 self.copy_map.len()
422 }
413 }
423
414
424 fn copy_map_iter(&self) -> CopyMapIter<'_> {
415 fn copy_map_iter(&self) -> CopyMapIter<'_> {
425 Box::new(
416 Box::new(
426 self.copy_map
417 self.copy_map
427 .iter()
418 .iter()
428 .map(|(key, value)| Ok((&**key, &**value))),
419 .map(|(key, value)| Ok((&**key, &**value))),
429 )
420 )
430 }
421 }
431
422
432 fn copy_map_contains_key(
423 fn copy_map_contains_key(
433 &self,
424 &self,
434 key: &HgPath,
425 key: &HgPath,
435 ) -> Result<bool, DirstateV2ParseError> {
426 ) -> Result<bool, DirstateV2ParseError> {
436 Ok(self.copy_map.contains_key(key))
427 Ok(self.copy_map.contains_key(key))
437 }
428 }
438
429
439 fn copy_map_get(
430 fn copy_map_get(
440 &self,
431 &self,
441 key: &HgPath,
432 key: &HgPath,
442 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
433 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
443 Ok(self.copy_map.get(key).map(|p| &**p))
434 Ok(self.copy_map.get(key).map(|p| &**p))
444 }
435 }
445
436
446 fn copy_map_remove(
437 fn copy_map_remove(
447 &mut self,
438 &mut self,
448 key: &HgPath,
439 key: &HgPath,
449 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
440 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
450 Ok(self.copy_map.remove(key))
441 Ok(self.copy_map.remove(key))
451 }
442 }
452
443
453 fn copy_map_insert(
444 fn copy_map_insert(
454 &mut self,
445 &mut self,
455 key: HgPathBuf,
446 key: HgPathBuf,
456 value: HgPathBuf,
447 value: HgPathBuf,
457 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
448 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
458 Ok(self.copy_map.insert(key, value))
449 Ok(self.copy_map.insert(key, value))
459 }
450 }
460
451
461 fn len(&self) -> usize {
452 fn len(&self) -> usize {
462 (&**self).len()
453 (&**self).len()
463 }
454 }
464
455
465 fn contains_key(
456 fn contains_key(
466 &self,
457 &self,
467 key: &HgPath,
458 key: &HgPath,
468 ) -> Result<bool, DirstateV2ParseError> {
459 ) -> Result<bool, DirstateV2ParseError> {
469 Ok((&**self).contains_key(key))
460 Ok((&**self).contains_key(key))
470 }
461 }
471
462
472 fn get(
463 fn get(
473 &self,
464 &self,
474 key: &HgPath,
465 key: &HgPath,
475 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
466 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
476 Ok((&**self).get(key).cloned())
467 Ok((&**self).get(key).cloned())
477 }
468 }
478
469
479 fn iter(&self) -> StateMapIter<'_> {
470 fn iter(&self) -> StateMapIter<'_> {
480 Box::new((&**self).iter().map(|(key, value)| Ok((&**key, *value))))
471 Box::new((&**self).iter().map(|(key, value)| Ok((&**key, *value))))
481 }
472 }
482
473
483 fn iter_directories(
474 fn iter_directories(
484 &self,
475 &self,
485 ) -> Box<
476 ) -> Box<
486 dyn Iterator<
477 dyn Iterator<
487 Item = Result<
478 Item = Result<
488 (&HgPath, Option<Timestamp>),
479 (&HgPath, Option<Timestamp>),
489 DirstateV2ParseError,
480 DirstateV2ParseError,
490 >,
481 >,
491 > + Send
482 > + Send
492 + '_,
483 + '_,
493 > {
484 > {
494 Box::new(std::iter::empty())
485 Box::new(std::iter::empty())
495 }
486 }
496 }
487 }
@@ -1,587 +1,580 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10
10
11 use std::cell::{RefCell, RefMut};
11 use std::cell::{RefCell, RefMut};
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13
13
14 use cpython::{
14 use cpython::{
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
17 UnsafePyLeaked,
17 UnsafePyLeaked,
18 };
18 };
19
19
20 use crate::{
20 use crate::{
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 dirstate::make_dirstate_tuple,
22 dirstate::make_dirstate_tuple,
23 dirstate::non_normal_entries::{
23 dirstate::non_normal_entries::{
24 NonNormalEntries, NonNormalEntriesIterator,
24 NonNormalEntries, NonNormalEntriesIterator,
25 },
25 },
26 dirstate::owning::OwningDirstateMap,
26 dirstate::owning::OwningDirstateMap,
27 parsers::dirstate_parents_to_pytuple,
27 parsers::dirstate_parents_to_pytuple,
28 };
28 };
29 use hg::{
29 use hg::{
30 dirstate::parsers::Timestamp,
30 dirstate::parsers::Timestamp,
31 dirstate::MTIME_UNSET,
31 dirstate::MTIME_UNSET,
32 dirstate::SIZE_NON_NORMAL,
32 dirstate::SIZE_NON_NORMAL,
33 dirstate_tree::dispatch::DirstateMapMethods,
33 dirstate_tree::dispatch::DirstateMapMethods,
34 dirstate_tree::on_disk::DirstateV2ParseError,
34 dirstate_tree::on_disk::DirstateV2ParseError,
35 errors::HgError,
36 revlog::Node,
35 revlog::Node,
37 utils::files::normalize_case,
36 utils::files::normalize_case,
38 utils::hg_path::{HgPath, HgPathBuf},
37 utils::hg_path::{HgPath, HgPathBuf},
39 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
38 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
40 DirstateParents, EntryState, StateMapIter,
39 DirstateParents, EntryState, StateMapIter,
41 };
40 };
42
41
43 // TODO
42 // TODO
44 // This object needs to share references to multiple members of its Rust
43 // This object needs to share references to multiple members of its Rust
45 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
44 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
46 // Right now `CopyMap` is done, but it needs to have an explicit reference
45 // Right now `CopyMap` is done, but it needs to have an explicit reference
47 // to `RustDirstateMap` which itself needs to have an encapsulation for
46 // to `RustDirstateMap` which itself needs to have an encapsulation for
48 // every method in `CopyMap` (copymapcopy, etc.).
47 // every method in `CopyMap` (copymapcopy, etc.).
49 // This is ugly and hard to maintain.
48 // This is ugly and hard to maintain.
50 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
49 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
51 // `py_class!` is already implemented and does not mention
50 // `py_class!` is already implemented and does not mention
52 // `RustDirstateMap`, rightfully so.
51 // `RustDirstateMap`, rightfully so.
53 // All attributes also have to have a separate refcount data attribute for
52 // All attributes also have to have a separate refcount data attribute for
54 // leaks, with all methods that go along for reference sharing.
53 // leaks, with all methods that go along for reference sharing.
55 py_class!(pub class DirstateMap |py| {
54 py_class!(pub class DirstateMap |py| {
56 @shared data inner: Box<dyn DirstateMapMethods + Send>;
55 @shared data inner: Box<dyn DirstateMapMethods + Send>;
57
56
58 /// Returns a `(dirstate_map, parents)` tuple
57 /// Returns a `(dirstate_map, parents)` tuple
59 @staticmethod
58 @staticmethod
60 def new(
59 def new(
61 use_dirstate_tree: bool,
60 use_dirstate_tree: bool,
62 use_dirstate_v2: bool,
61 use_dirstate_v2: bool,
63 on_disk: PyBytes,
62 on_disk: PyBytes,
64 ) -> PyResult<PyObject> {
63 ) -> PyResult<PyObject> {
65 let dirstate_error = |e: DirstateError| {
64 let dirstate_error = |e: DirstateError| {
66 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
65 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
67 };
66 };
68 let (inner, parents) = if use_dirstate_tree || use_dirstate_v2 {
67 let (inner, parents) = if use_dirstate_tree || use_dirstate_v2 {
69 let (map, parents) =
68 let (map, parents) =
70 OwningDirstateMap::new(py, on_disk, use_dirstate_v2)
69 OwningDirstateMap::new(py, on_disk, use_dirstate_v2)
71 .map_err(dirstate_error)?;
70 .map_err(dirstate_error)?;
72 (Box::new(map) as _, parents)
71 (Box::new(map) as _, parents)
73 } else {
72 } else {
74 let bytes = on_disk.data(py);
73 let bytes = on_disk.data(py);
75 let mut map = RustDirstateMap::default();
74 let mut map = RustDirstateMap::default();
76 let parents = map.read(bytes).map_err(dirstate_error)?;
75 let parents = map.read(bytes).map_err(dirstate_error)?;
77 (Box::new(map) as _, parents)
76 (Box::new(map) as _, parents)
78 };
77 };
79 let map = Self::create_instance(py, inner)?;
78 let map = Self::create_instance(py, inner)?;
80 let parents = parents.map(|p| dirstate_parents_to_pytuple(py, &p));
79 let parents = parents.map(|p| dirstate_parents_to_pytuple(py, &p));
81 Ok((map, parents).to_py_object(py).into_object())
80 Ok((map, parents).to_py_object(py).into_object())
82 }
81 }
83
82
84 def clear(&self) -> PyResult<PyObject> {
83 def clear(&self) -> PyResult<PyObject> {
85 self.inner(py).borrow_mut().clear();
84 self.inner(py).borrow_mut().clear();
86 Ok(py.None())
85 Ok(py.None())
87 }
86 }
88
87
89 def get(
88 def get(
90 &self,
89 &self,
91 key: PyObject,
90 key: PyObject,
92 default: Option<PyObject> = None
91 default: Option<PyObject> = None
93 ) -> PyResult<Option<PyObject>> {
92 ) -> PyResult<Option<PyObject>> {
94 let key = key.extract::<PyBytes>(py)?;
93 let key = key.extract::<PyBytes>(py)?;
95 match self
94 match self
96 .inner(py)
95 .inner(py)
97 .borrow()
96 .borrow()
98 .get(HgPath::new(key.data(py)))
97 .get(HgPath::new(key.data(py)))
99 .map_err(|e| v2_error(py, e))?
98 .map_err(|e| v2_error(py, e))?
100 {
99 {
101 Some(entry) => {
100 Some(entry) => {
102 Ok(Some(make_dirstate_tuple(py, &entry)?))
101 Ok(Some(make_dirstate_tuple(py, &entry)?))
103 },
102 },
104 None => Ok(default)
103 None => Ok(default)
105 }
104 }
106 }
105 }
107
106
108 def addfile(
107 def addfile(
109 &self,
108 &self,
110 f: PyObject,
109 f: PyObject,
111 mode: PyObject,
110 mode: PyObject,
112 size: PyObject,
111 size: PyObject,
113 mtime: PyObject,
112 mtime: PyObject,
114 added: PyObject,
113 added: PyObject,
115 merged: PyObject,
114 merged: PyObject,
116 from_p2: PyObject,
115 from_p2: PyObject,
117 possibly_dirty: PyObject,
116 possibly_dirty: PyObject,
118 ) -> PyResult<PyObject> {
117 ) -> PyResult<PyObject> {
119 let f = f.extract::<PyBytes>(py)?;
118 let f = f.extract::<PyBytes>(py)?;
120 let filename = HgPath::new(f.data(py));
119 let filename = HgPath::new(f.data(py));
121 let mode = if mode.is_none(py) {
120 let mode = if mode.is_none(py) {
122 // fallback default value
121 // fallback default value
123 0
122 0
124 } else {
123 } else {
125 mode.extract(py)?
124 mode.extract(py)?
126 };
125 };
127 let size = if size.is_none(py) {
126 let size = if size.is_none(py) {
128 // fallback default value
127 // fallback default value
129 SIZE_NON_NORMAL
128 SIZE_NON_NORMAL
130 } else {
129 } else {
131 size.extract(py)?
130 size.extract(py)?
132 };
131 };
133 let mtime = if mtime.is_none(py) {
132 let mtime = if mtime.is_none(py) {
134 // fallback default value
133 // fallback default value
135 MTIME_UNSET
134 MTIME_UNSET
136 } else {
135 } else {
137 mtime.extract(py)?
136 mtime.extract(py)?
138 };
137 };
139 let entry = DirstateEntry {
138 let entry = DirstateEntry {
140 // XXX Arbitrary default value since the value is determined later
139 // XXX Arbitrary default value since the value is determined later
141 state: EntryState::Normal,
140 state: EntryState::Normal,
142 mode: mode,
141 mode: mode,
143 size: size,
142 size: size,
144 mtime: mtime,
143 mtime: mtime,
145 };
144 };
146 let added = added.extract::<PyBool>(py)?.is_true();
145 let added = added.extract::<PyBool>(py)?.is_true();
147 let merged = merged.extract::<PyBool>(py)?.is_true();
146 let merged = merged.extract::<PyBool>(py)?.is_true();
148 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
147 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
149 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
148 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
150 self.inner(py).borrow_mut().add_file(
149 self.inner(py).borrow_mut().add_file(
151 filename,
150 filename,
152 entry,
151 entry,
153 added,
152 added,
154 merged,
153 merged,
155 from_p2,
154 from_p2,
156 possibly_dirty
155 possibly_dirty
157 ).and(Ok(py.None())).or_else(|e: DirstateError| {
156 ).and(Ok(py.None())).or_else(|e: DirstateError| {
158 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
157 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
159 })
158 })
160 }
159 }
161
160
162 def removefile(
161 def removefile(
163 &self,
162 &self,
164 f: PyObject,
163 f: PyObject,
165 in_merge: PyObject
164 in_merge: PyObject
166 ) -> PyResult<PyObject> {
165 ) -> PyResult<PyObject> {
167 self.inner(py).borrow_mut()
166 self.inner(py).borrow_mut()
168 .remove_file(
167 .remove_file(
169 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
168 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
170 in_merge.extract::<PyBool>(py)?.is_true(),
169 in_merge.extract::<PyBool>(py)?.is_true(),
171 )
170 )
172 .or_else(|_| {
171 .or_else(|_| {
173 Err(PyErr::new::<exc::OSError, _>(
172 Err(PyErr::new::<exc::OSError, _>(
174 py,
173 py,
175 "Dirstate error".to_string(),
174 "Dirstate error".to_string(),
176 ))
175 ))
177 })?;
176 })?;
178 Ok(py.None())
177 Ok(py.None())
179 }
178 }
180
179
181 def dropfile(
180 def dropfile(
182 &self,
181 &self,
183 f: PyObject,
182 f: PyObject,
184 oldstate: PyObject
185 ) -> PyResult<PyBool> {
183 ) -> PyResult<PyBool> {
186 self.inner(py).borrow_mut()
184 self.inner(py).borrow_mut()
187 .drop_file(
185 .drop_file(
188 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
186 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
189 oldstate.extract::<PyBytes>(py)?.data(py)[0]
190 .try_into()
191 .map_err(|e: HgError| {
192 PyErr::new::<exc::ValueError, _>(py, e.to_string())
193 })?,
194 )
187 )
195 .and_then(|b| Ok(b.to_py_object(py)))
188 .and_then(|b| Ok(b.to_py_object(py)))
196 .or_else(|e| {
189 .or_else(|e| {
197 Err(PyErr::new::<exc::OSError, _>(
190 Err(PyErr::new::<exc::OSError, _>(
198 py,
191 py,
199 format!("Dirstate error: {}", e.to_string()),
192 format!("Dirstate error: {}", e.to_string()),
200 ))
193 ))
201 })
194 })
202 }
195 }
203
196
204 def clearambiguoustimes(
197 def clearambiguoustimes(
205 &self,
198 &self,
206 files: PyObject,
199 files: PyObject,
207 now: PyObject
200 now: PyObject
208 ) -> PyResult<PyObject> {
201 ) -> PyResult<PyObject> {
209 let files: PyResult<Vec<HgPathBuf>> = files
202 let files: PyResult<Vec<HgPathBuf>> = files
210 .iter(py)?
203 .iter(py)?
211 .map(|filename| {
204 .map(|filename| {
212 Ok(HgPathBuf::from_bytes(
205 Ok(HgPathBuf::from_bytes(
213 filename?.extract::<PyBytes>(py)?.data(py),
206 filename?.extract::<PyBytes>(py)?.data(py),
214 ))
207 ))
215 })
208 })
216 .collect();
209 .collect();
217 self.inner(py)
210 self.inner(py)
218 .borrow_mut()
211 .borrow_mut()
219 .clear_ambiguous_times(files?, now.extract(py)?)
212 .clear_ambiguous_times(files?, now.extract(py)?)
220 .map_err(|e| v2_error(py, e))?;
213 .map_err(|e| v2_error(py, e))?;
221 Ok(py.None())
214 Ok(py.None())
222 }
215 }
223
216
224 def other_parent_entries(&self) -> PyResult<PyObject> {
217 def other_parent_entries(&self) -> PyResult<PyObject> {
225 let mut inner_shared = self.inner(py).borrow_mut();
218 let mut inner_shared = self.inner(py).borrow_mut();
226 let set = PySet::empty(py)?;
219 let set = PySet::empty(py)?;
227 for path in inner_shared.iter_other_parent_paths() {
220 for path in inner_shared.iter_other_parent_paths() {
228 let path = path.map_err(|e| v2_error(py, e))?;
221 let path = path.map_err(|e| v2_error(py, e))?;
229 set.add(py, PyBytes::new(py, path.as_bytes()))?;
222 set.add(py, PyBytes::new(py, path.as_bytes()))?;
230 }
223 }
231 Ok(set.into_object())
224 Ok(set.into_object())
232 }
225 }
233
226
234 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
227 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
235 NonNormalEntries::from_inner(py, self.clone_ref(py))
228 NonNormalEntries::from_inner(py, self.clone_ref(py))
236 }
229 }
237
230
238 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
231 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
239 let key = key.extract::<PyBytes>(py)?;
232 let key = key.extract::<PyBytes>(py)?;
240 self.inner(py)
233 self.inner(py)
241 .borrow_mut()
234 .borrow_mut()
242 .non_normal_entries_contains(HgPath::new(key.data(py)))
235 .non_normal_entries_contains(HgPath::new(key.data(py)))
243 .map_err(|e| v2_error(py, e))
236 .map_err(|e| v2_error(py, e))
244 }
237 }
245
238
246 def non_normal_entries_display(&self) -> PyResult<PyString> {
239 def non_normal_entries_display(&self) -> PyResult<PyString> {
247 let mut inner = self.inner(py).borrow_mut();
240 let mut inner = self.inner(py).borrow_mut();
248 let paths = inner
241 let paths = inner
249 .iter_non_normal_paths()
242 .iter_non_normal_paths()
250 .collect::<Result<Vec<_>, _>>()
243 .collect::<Result<Vec<_>, _>>()
251 .map_err(|e| v2_error(py, e))?;
244 .map_err(|e| v2_error(py, e))?;
252 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
245 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
253 Ok(PyString::new(py, &formatted))
246 Ok(PyString::new(py, &formatted))
254 }
247 }
255
248
256 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
249 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
257 let key = key.extract::<PyBytes>(py)?;
250 let key = key.extract::<PyBytes>(py)?;
258 self
251 self
259 .inner(py)
252 .inner(py)
260 .borrow_mut()
253 .borrow_mut()
261 .non_normal_entries_remove(HgPath::new(key.data(py)));
254 .non_normal_entries_remove(HgPath::new(key.data(py)));
262 Ok(py.None())
255 Ok(py.None())
263 }
256 }
264
257
265 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
258 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
266 let mut inner = self.inner(py).borrow_mut();
259 let mut inner = self.inner(py).borrow_mut();
267
260
268 let ret = PyList::new(py, &[]);
261 let ret = PyList::new(py, &[]);
269 for filename in inner.non_normal_or_other_parent_paths() {
262 for filename in inner.non_normal_or_other_parent_paths() {
270 let filename = filename.map_err(|e| v2_error(py, e))?;
263 let filename = filename.map_err(|e| v2_error(py, e))?;
271 let as_pystring = PyBytes::new(py, filename.as_bytes());
264 let as_pystring = PyBytes::new(py, filename.as_bytes());
272 ret.append(py, as_pystring.into_object());
265 ret.append(py, as_pystring.into_object());
273 }
266 }
274 Ok(ret)
267 Ok(ret)
275 }
268 }
276
269
277 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
270 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
278 // Make sure the sets are defined before we no longer have a mutable
271 // Make sure the sets are defined before we no longer have a mutable
279 // reference to the dmap.
272 // reference to the dmap.
280 self.inner(py)
273 self.inner(py)
281 .borrow_mut()
274 .borrow_mut()
282 .set_non_normal_other_parent_entries(false);
275 .set_non_normal_other_parent_entries(false);
283
276
284 let leaked_ref = self.inner(py).leak_immutable();
277 let leaked_ref = self.inner(py).leak_immutable();
285
278
286 NonNormalEntriesIterator::from_inner(py, unsafe {
279 NonNormalEntriesIterator::from_inner(py, unsafe {
287 leaked_ref.map(py, |o| {
280 leaked_ref.map(py, |o| {
288 o.iter_non_normal_paths_panic()
281 o.iter_non_normal_paths_panic()
289 })
282 })
290 })
283 })
291 }
284 }
292
285
293 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
286 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
294 let d = d.extract::<PyBytes>(py)?;
287 let d = d.extract::<PyBytes>(py)?;
295 Ok(self.inner(py).borrow_mut()
288 Ok(self.inner(py).borrow_mut()
296 .has_tracked_dir(HgPath::new(d.data(py)))
289 .has_tracked_dir(HgPath::new(d.data(py)))
297 .map_err(|e| {
290 .map_err(|e| {
298 PyErr::new::<exc::ValueError, _>(py, e.to_string())
291 PyErr::new::<exc::ValueError, _>(py, e.to_string())
299 })?
292 })?
300 .to_py_object(py))
293 .to_py_object(py))
301 }
294 }
302
295
303 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
296 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
304 let d = d.extract::<PyBytes>(py)?;
297 let d = d.extract::<PyBytes>(py)?;
305 Ok(self.inner(py).borrow_mut()
298 Ok(self.inner(py).borrow_mut()
306 .has_dir(HgPath::new(d.data(py)))
299 .has_dir(HgPath::new(d.data(py)))
307 .map_err(|e| {
300 .map_err(|e| {
308 PyErr::new::<exc::ValueError, _>(py, e.to_string())
301 PyErr::new::<exc::ValueError, _>(py, e.to_string())
309 })?
302 })?
310 .to_py_object(py))
303 .to_py_object(py))
311 }
304 }
312
305
313 def write(
306 def write(
314 &self,
307 &self,
315 use_dirstate_v2: bool,
308 use_dirstate_v2: bool,
316 p1: PyObject,
309 p1: PyObject,
317 p2: PyObject,
310 p2: PyObject,
318 now: PyObject
311 now: PyObject
319 ) -> PyResult<PyBytes> {
312 ) -> PyResult<PyBytes> {
320 let now = Timestamp(now.extract(py)?);
313 let now = Timestamp(now.extract(py)?);
321 let parents = DirstateParents {
314 let parents = DirstateParents {
322 p1: extract_node_id(py, &p1)?,
315 p1: extract_node_id(py, &p1)?,
323 p2: extract_node_id(py, &p2)?,
316 p2: extract_node_id(py, &p2)?,
324 };
317 };
325
318
326 let mut inner = self.inner(py).borrow_mut();
319 let mut inner = self.inner(py).borrow_mut();
327 let result = if use_dirstate_v2 {
320 let result = if use_dirstate_v2 {
328 inner.pack_v2(parents, now)
321 inner.pack_v2(parents, now)
329 } else {
322 } else {
330 inner.pack_v1(parents, now)
323 inner.pack_v1(parents, now)
331 };
324 };
332 match result {
325 match result {
333 Ok(packed) => Ok(PyBytes::new(py, &packed)),
326 Ok(packed) => Ok(PyBytes::new(py, &packed)),
334 Err(_) => Err(PyErr::new::<exc::OSError, _>(
327 Err(_) => Err(PyErr::new::<exc::OSError, _>(
335 py,
328 py,
336 "Dirstate error".to_string(),
329 "Dirstate error".to_string(),
337 )),
330 )),
338 }
331 }
339 }
332 }
340
333
341 def filefoldmapasdict(&self) -> PyResult<PyDict> {
334 def filefoldmapasdict(&self) -> PyResult<PyDict> {
342 let dict = PyDict::new(py);
335 let dict = PyDict::new(py);
343 for item in self.inner(py).borrow_mut().iter() {
336 for item in self.inner(py).borrow_mut().iter() {
344 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
337 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
345 if entry.state != EntryState::Removed {
338 if entry.state != EntryState::Removed {
346 let key = normalize_case(path);
339 let key = normalize_case(path);
347 let value = path;
340 let value = path;
348 dict.set_item(
341 dict.set_item(
349 py,
342 py,
350 PyBytes::new(py, key.as_bytes()).into_object(),
343 PyBytes::new(py, key.as_bytes()).into_object(),
351 PyBytes::new(py, value.as_bytes()).into_object(),
344 PyBytes::new(py, value.as_bytes()).into_object(),
352 )?;
345 )?;
353 }
346 }
354 }
347 }
355 Ok(dict)
348 Ok(dict)
356 }
349 }
357
350
358 def __len__(&self) -> PyResult<usize> {
351 def __len__(&self) -> PyResult<usize> {
359 Ok(self.inner(py).borrow().len())
352 Ok(self.inner(py).borrow().len())
360 }
353 }
361
354
362 def __contains__(&self, key: PyObject) -> PyResult<bool> {
355 def __contains__(&self, key: PyObject) -> PyResult<bool> {
363 let key = key.extract::<PyBytes>(py)?;
356 let key = key.extract::<PyBytes>(py)?;
364 self.inner(py)
357 self.inner(py)
365 .borrow()
358 .borrow()
366 .contains_key(HgPath::new(key.data(py)))
359 .contains_key(HgPath::new(key.data(py)))
367 .map_err(|e| v2_error(py, e))
360 .map_err(|e| v2_error(py, e))
368 }
361 }
369
362
370 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
363 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
371 let key = key.extract::<PyBytes>(py)?;
364 let key = key.extract::<PyBytes>(py)?;
372 let key = HgPath::new(key.data(py));
365 let key = HgPath::new(key.data(py));
373 match self
366 match self
374 .inner(py)
367 .inner(py)
375 .borrow()
368 .borrow()
376 .get(key)
369 .get(key)
377 .map_err(|e| v2_error(py, e))?
370 .map_err(|e| v2_error(py, e))?
378 {
371 {
379 Some(entry) => {
372 Some(entry) => {
380 Ok(make_dirstate_tuple(py, &entry)?)
373 Ok(make_dirstate_tuple(py, &entry)?)
381 },
374 },
382 None => Err(PyErr::new::<exc::KeyError, _>(
375 None => Err(PyErr::new::<exc::KeyError, _>(
383 py,
376 py,
384 String::from_utf8_lossy(key.as_bytes()),
377 String::from_utf8_lossy(key.as_bytes()),
385 )),
378 )),
386 }
379 }
387 }
380 }
388
381
389 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
382 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
390 let leaked_ref = self.inner(py).leak_immutable();
383 let leaked_ref = self.inner(py).leak_immutable();
391 DirstateMapKeysIterator::from_inner(
384 DirstateMapKeysIterator::from_inner(
392 py,
385 py,
393 unsafe { leaked_ref.map(py, |o| o.iter()) },
386 unsafe { leaked_ref.map(py, |o| o.iter()) },
394 )
387 )
395 }
388 }
396
389
397 def items(&self) -> PyResult<DirstateMapItemsIterator> {
390 def items(&self) -> PyResult<DirstateMapItemsIterator> {
398 let leaked_ref = self.inner(py).leak_immutable();
391 let leaked_ref = self.inner(py).leak_immutable();
399 DirstateMapItemsIterator::from_inner(
392 DirstateMapItemsIterator::from_inner(
400 py,
393 py,
401 unsafe { leaked_ref.map(py, |o| o.iter()) },
394 unsafe { leaked_ref.map(py, |o| o.iter()) },
402 )
395 )
403 }
396 }
404
397
405 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
398 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
406 let leaked_ref = self.inner(py).leak_immutable();
399 let leaked_ref = self.inner(py).leak_immutable();
407 DirstateMapKeysIterator::from_inner(
400 DirstateMapKeysIterator::from_inner(
408 py,
401 py,
409 unsafe { leaked_ref.map(py, |o| o.iter()) },
402 unsafe { leaked_ref.map(py, |o| o.iter()) },
410 )
403 )
411 }
404 }
412
405
413 // TODO all copymap* methods, see docstring above
406 // TODO all copymap* methods, see docstring above
414 def copymapcopy(&self) -> PyResult<PyDict> {
407 def copymapcopy(&self) -> PyResult<PyDict> {
415 let dict = PyDict::new(py);
408 let dict = PyDict::new(py);
416 for item in self.inner(py).borrow().copy_map_iter() {
409 for item in self.inner(py).borrow().copy_map_iter() {
417 let (key, value) = item.map_err(|e| v2_error(py, e))?;
410 let (key, value) = item.map_err(|e| v2_error(py, e))?;
418 dict.set_item(
411 dict.set_item(
419 py,
412 py,
420 PyBytes::new(py, key.as_bytes()),
413 PyBytes::new(py, key.as_bytes()),
421 PyBytes::new(py, value.as_bytes()),
414 PyBytes::new(py, value.as_bytes()),
422 )?;
415 )?;
423 }
416 }
424 Ok(dict)
417 Ok(dict)
425 }
418 }
426
419
427 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
420 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
428 let key = key.extract::<PyBytes>(py)?;
421 let key = key.extract::<PyBytes>(py)?;
429 match self
422 match self
430 .inner(py)
423 .inner(py)
431 .borrow()
424 .borrow()
432 .copy_map_get(HgPath::new(key.data(py)))
425 .copy_map_get(HgPath::new(key.data(py)))
433 .map_err(|e| v2_error(py, e))?
426 .map_err(|e| v2_error(py, e))?
434 {
427 {
435 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
428 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
436 None => Err(PyErr::new::<exc::KeyError, _>(
429 None => Err(PyErr::new::<exc::KeyError, _>(
437 py,
430 py,
438 String::from_utf8_lossy(key.data(py)),
431 String::from_utf8_lossy(key.data(py)),
439 )),
432 )),
440 }
433 }
441 }
434 }
442 def copymap(&self) -> PyResult<CopyMap> {
435 def copymap(&self) -> PyResult<CopyMap> {
443 CopyMap::from_inner(py, self.clone_ref(py))
436 CopyMap::from_inner(py, self.clone_ref(py))
444 }
437 }
445
438
446 def copymaplen(&self) -> PyResult<usize> {
439 def copymaplen(&self) -> PyResult<usize> {
447 Ok(self.inner(py).borrow().copy_map_len())
440 Ok(self.inner(py).borrow().copy_map_len())
448 }
441 }
449 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
442 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
450 let key = key.extract::<PyBytes>(py)?;
443 let key = key.extract::<PyBytes>(py)?;
451 self.inner(py)
444 self.inner(py)
452 .borrow()
445 .borrow()
453 .copy_map_contains_key(HgPath::new(key.data(py)))
446 .copy_map_contains_key(HgPath::new(key.data(py)))
454 .map_err(|e| v2_error(py, e))
447 .map_err(|e| v2_error(py, e))
455 }
448 }
456 def copymapget(
449 def copymapget(
457 &self,
450 &self,
458 key: PyObject,
451 key: PyObject,
459 default: Option<PyObject>
452 default: Option<PyObject>
460 ) -> PyResult<Option<PyObject>> {
453 ) -> PyResult<Option<PyObject>> {
461 let key = key.extract::<PyBytes>(py)?;
454 let key = key.extract::<PyBytes>(py)?;
462 match self
455 match self
463 .inner(py)
456 .inner(py)
464 .borrow()
457 .borrow()
465 .copy_map_get(HgPath::new(key.data(py)))
458 .copy_map_get(HgPath::new(key.data(py)))
466 .map_err(|e| v2_error(py, e))?
459 .map_err(|e| v2_error(py, e))?
467 {
460 {
468 Some(copy) => Ok(Some(
461 Some(copy) => Ok(Some(
469 PyBytes::new(py, copy.as_bytes()).into_object(),
462 PyBytes::new(py, copy.as_bytes()).into_object(),
470 )),
463 )),
471 None => Ok(default),
464 None => Ok(default),
472 }
465 }
473 }
466 }
474 def copymapsetitem(
467 def copymapsetitem(
475 &self,
468 &self,
476 key: PyObject,
469 key: PyObject,
477 value: PyObject
470 value: PyObject
478 ) -> PyResult<PyObject> {
471 ) -> PyResult<PyObject> {
479 let key = key.extract::<PyBytes>(py)?;
472 let key = key.extract::<PyBytes>(py)?;
480 let value = value.extract::<PyBytes>(py)?;
473 let value = value.extract::<PyBytes>(py)?;
481 self.inner(py)
474 self.inner(py)
482 .borrow_mut()
475 .borrow_mut()
483 .copy_map_insert(
476 .copy_map_insert(
484 HgPathBuf::from_bytes(key.data(py)),
477 HgPathBuf::from_bytes(key.data(py)),
485 HgPathBuf::from_bytes(value.data(py)),
478 HgPathBuf::from_bytes(value.data(py)),
486 )
479 )
487 .map_err(|e| v2_error(py, e))?;
480 .map_err(|e| v2_error(py, e))?;
488 Ok(py.None())
481 Ok(py.None())
489 }
482 }
490 def copymappop(
483 def copymappop(
491 &self,
484 &self,
492 key: PyObject,
485 key: PyObject,
493 default: Option<PyObject>
486 default: Option<PyObject>
494 ) -> PyResult<Option<PyObject>> {
487 ) -> PyResult<Option<PyObject>> {
495 let key = key.extract::<PyBytes>(py)?;
488 let key = key.extract::<PyBytes>(py)?;
496 match self
489 match self
497 .inner(py)
490 .inner(py)
498 .borrow_mut()
491 .borrow_mut()
499 .copy_map_remove(HgPath::new(key.data(py)))
492 .copy_map_remove(HgPath::new(key.data(py)))
500 .map_err(|e| v2_error(py, e))?
493 .map_err(|e| v2_error(py, e))?
501 {
494 {
502 Some(_) => Ok(None),
495 Some(_) => Ok(None),
503 None => Ok(default),
496 None => Ok(default),
504 }
497 }
505 }
498 }
506
499
507 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
500 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
508 let leaked_ref = self.inner(py).leak_immutable();
501 let leaked_ref = self.inner(py).leak_immutable();
509 CopyMapKeysIterator::from_inner(
502 CopyMapKeysIterator::from_inner(
510 py,
503 py,
511 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
504 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
512 )
505 )
513 }
506 }
514
507
515 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
508 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
516 let leaked_ref = self.inner(py).leak_immutable();
509 let leaked_ref = self.inner(py).leak_immutable();
517 CopyMapItemsIterator::from_inner(
510 CopyMapItemsIterator::from_inner(
518 py,
511 py,
519 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
512 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
520 )
513 )
521 }
514 }
522
515
523 def directories(&self) -> PyResult<PyList> {
516 def directories(&self) -> PyResult<PyList> {
524 let dirs = PyList::new(py, &[]);
517 let dirs = PyList::new(py, &[]);
525 for item in self.inner(py).borrow().iter_directories() {
518 for item in self.inner(py).borrow().iter_directories() {
526 let (path, mtime) = item.map_err(|e| v2_error(py, e))?;
519 let (path, mtime) = item.map_err(|e| v2_error(py, e))?;
527 let path = PyBytes::new(py, path.as_bytes());
520 let path = PyBytes::new(py, path.as_bytes());
528 let mtime = mtime.map(|t| t.0).unwrap_or(-1);
521 let mtime = mtime.map(|t| t.0).unwrap_or(-1);
529 let tuple = (path, (b'd', 0, 0, mtime));
522 let tuple = (path, (b'd', 0, 0, mtime));
530 dirs.append(py, tuple.to_py_object(py).into_object())
523 dirs.append(py, tuple.to_py_object(py).into_object())
531 }
524 }
532 Ok(dirs)
525 Ok(dirs)
533 }
526 }
534
527
535 });
528 });
536
529
537 impl DirstateMap {
530 impl DirstateMap {
538 pub fn get_inner_mut<'a>(
531 pub fn get_inner_mut<'a>(
539 &'a self,
532 &'a self,
540 py: Python<'a>,
533 py: Python<'a>,
541 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
534 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
542 self.inner(py).borrow_mut()
535 self.inner(py).borrow_mut()
543 }
536 }
544 fn translate_key(
537 fn translate_key(
545 py: Python,
538 py: Python,
546 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
539 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
547 ) -> PyResult<Option<PyBytes>> {
540 ) -> PyResult<Option<PyBytes>> {
548 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
541 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
549 Ok(Some(PyBytes::new(py, f.as_bytes())))
542 Ok(Some(PyBytes::new(py, f.as_bytes())))
550 }
543 }
551 fn translate_key_value(
544 fn translate_key_value(
552 py: Python,
545 py: Python,
553 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
546 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
554 ) -> PyResult<Option<(PyBytes, PyObject)>> {
547 ) -> PyResult<Option<(PyBytes, PyObject)>> {
555 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
548 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
556 Ok(Some((
549 Ok(Some((
557 PyBytes::new(py, f.as_bytes()),
550 PyBytes::new(py, f.as_bytes()),
558 make_dirstate_tuple(py, &entry)?,
551 make_dirstate_tuple(py, &entry)?,
559 )))
552 )))
560 }
553 }
561 }
554 }
562
555
563 py_shared_iterator!(
556 py_shared_iterator!(
564 DirstateMapKeysIterator,
557 DirstateMapKeysIterator,
565 UnsafePyLeaked<StateMapIter<'static>>,
558 UnsafePyLeaked<StateMapIter<'static>>,
566 DirstateMap::translate_key,
559 DirstateMap::translate_key,
567 Option<PyBytes>
560 Option<PyBytes>
568 );
561 );
569
562
570 py_shared_iterator!(
563 py_shared_iterator!(
571 DirstateMapItemsIterator,
564 DirstateMapItemsIterator,
572 UnsafePyLeaked<StateMapIter<'static>>,
565 UnsafePyLeaked<StateMapIter<'static>>,
573 DirstateMap::translate_key_value,
566 DirstateMap::translate_key_value,
574 Option<(PyBytes, PyObject)>
567 Option<(PyBytes, PyObject)>
575 );
568 );
576
569
577 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
570 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
578 let bytes = obj.extract::<PyBytes>(py)?;
571 let bytes = obj.extract::<PyBytes>(py)?;
579 match bytes.data(py).try_into() {
572 match bytes.data(py).try_into() {
580 Ok(s) => Ok(s),
573 Ok(s) => Ok(s),
581 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
574 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
582 }
575 }
583 }
576 }
584
577
585 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
578 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
586 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
579 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
587 }
580 }
@@ -1,224 +1,219 b''
1 use crate::dirstate::owning::OwningDirstateMap;
1 use crate::dirstate::owning::OwningDirstateMap;
2 use hg::dirstate::parsers::Timestamp;
2 use hg::dirstate::parsers::Timestamp;
3 use hg::dirstate_tree::dispatch::DirstateMapMethods;
3 use hg::dirstate_tree::dispatch::DirstateMapMethods;
4 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
4 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
5 use hg::matchers::Matcher;
5 use hg::matchers::Matcher;
6 use hg::utils::hg_path::{HgPath, HgPathBuf};
6 use hg::utils::hg_path::{HgPath, HgPathBuf};
7 use hg::CopyMapIter;
7 use hg::CopyMapIter;
8 use hg::DirstateEntry;
8 use hg::DirstateEntry;
9 use hg::DirstateError;
9 use hg::DirstateError;
10 use hg::DirstateParents;
10 use hg::DirstateParents;
11 use hg::DirstateStatus;
11 use hg::DirstateStatus;
12 use hg::EntryState;
13 use hg::PatternFileWarning;
12 use hg::PatternFileWarning;
14 use hg::StateMapIter;
13 use hg::StateMapIter;
15 use hg::StatusError;
14 use hg::StatusError;
16 use hg::StatusOptions;
15 use hg::StatusOptions;
17 use std::path::PathBuf;
16 use std::path::PathBuf;
18
17
19 impl DirstateMapMethods for OwningDirstateMap {
18 impl DirstateMapMethods for OwningDirstateMap {
20 fn clear(&mut self) {
19 fn clear(&mut self) {
21 self.get_mut().clear()
20 self.get_mut().clear()
22 }
21 }
23
22
24 fn add_file(
23 fn add_file(
25 &mut self,
24 &mut self,
26 filename: &HgPath,
25 filename: &HgPath,
27 entry: DirstateEntry,
26 entry: DirstateEntry,
28 added: bool,
27 added: bool,
29 merged: bool,
28 merged: bool,
30 from_p2: bool,
29 from_p2: bool,
31 possibly_dirty: bool,
30 possibly_dirty: bool,
32 ) -> Result<(), DirstateError> {
31 ) -> Result<(), DirstateError> {
33 self.get_mut().add_file(
32 self.get_mut().add_file(
34 filename,
33 filename,
35 entry,
34 entry,
36 added,
35 added,
37 merged,
36 merged,
38 from_p2,
37 from_p2,
39 possibly_dirty,
38 possibly_dirty,
40 )
39 )
41 }
40 }
42
41
43 fn remove_file(
42 fn remove_file(
44 &mut self,
43 &mut self,
45 filename: &HgPath,
44 filename: &HgPath,
46 in_merge: bool,
45 in_merge: bool,
47 ) -> Result<(), DirstateError> {
46 ) -> Result<(), DirstateError> {
48 self.get_mut().remove_file(filename, in_merge)
47 self.get_mut().remove_file(filename, in_merge)
49 }
48 }
50
49
51 fn drop_file(
50 fn drop_file(&mut self, filename: &HgPath) -> Result<bool, DirstateError> {
52 &mut self,
51 self.get_mut().drop_file(filename)
53 filename: &HgPath,
54 old_state: EntryState,
55 ) -> Result<bool, DirstateError> {
56 self.get_mut().drop_file(filename, old_state)
57 }
52 }
58
53
59 fn clear_ambiguous_times(
54 fn clear_ambiguous_times(
60 &mut self,
55 &mut self,
61 filenames: Vec<HgPathBuf>,
56 filenames: Vec<HgPathBuf>,
62 now: i32,
57 now: i32,
63 ) -> Result<(), DirstateV2ParseError> {
58 ) -> Result<(), DirstateV2ParseError> {
64 self.get_mut().clear_ambiguous_times(filenames, now)
59 self.get_mut().clear_ambiguous_times(filenames, now)
65 }
60 }
66
61
67 fn non_normal_entries_contains(
62 fn non_normal_entries_contains(
68 &mut self,
63 &mut self,
69 key: &HgPath,
64 key: &HgPath,
70 ) -> Result<bool, DirstateV2ParseError> {
65 ) -> Result<bool, DirstateV2ParseError> {
71 self.get_mut().non_normal_entries_contains(key)
66 self.get_mut().non_normal_entries_contains(key)
72 }
67 }
73
68
74 fn non_normal_entries_remove(&mut self, key: &HgPath) {
69 fn non_normal_entries_remove(&mut self, key: &HgPath) {
75 self.get_mut().non_normal_entries_remove(key)
70 self.get_mut().non_normal_entries_remove(key)
76 }
71 }
77
72
78 fn non_normal_or_other_parent_paths(
73 fn non_normal_or_other_parent_paths(
79 &mut self,
74 &mut self,
80 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
75 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
81 {
76 {
82 self.get_mut().non_normal_or_other_parent_paths()
77 self.get_mut().non_normal_or_other_parent_paths()
83 }
78 }
84
79
85 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
80 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
86 self.get_mut().set_non_normal_other_parent_entries(force)
81 self.get_mut().set_non_normal_other_parent_entries(force)
87 }
82 }
88
83
89 fn iter_non_normal_paths(
84 fn iter_non_normal_paths(
90 &mut self,
85 &mut self,
91 ) -> Box<
86 ) -> Box<
92 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
87 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
93 > {
88 > {
94 self.get_mut().iter_non_normal_paths()
89 self.get_mut().iter_non_normal_paths()
95 }
90 }
96
91
97 fn iter_non_normal_paths_panic(
92 fn iter_non_normal_paths_panic(
98 &self,
93 &self,
99 ) -> Box<
94 ) -> Box<
100 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
95 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
101 > {
96 > {
102 self.get().iter_non_normal_paths_panic()
97 self.get().iter_non_normal_paths_panic()
103 }
98 }
104
99
105 fn iter_other_parent_paths(
100 fn iter_other_parent_paths(
106 &mut self,
101 &mut self,
107 ) -> Box<
102 ) -> Box<
108 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
103 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
109 > {
104 > {
110 self.get_mut().iter_other_parent_paths()
105 self.get_mut().iter_other_parent_paths()
111 }
106 }
112
107
113 fn has_tracked_dir(
108 fn has_tracked_dir(
114 &mut self,
109 &mut self,
115 directory: &HgPath,
110 directory: &HgPath,
116 ) -> Result<bool, DirstateError> {
111 ) -> Result<bool, DirstateError> {
117 self.get_mut().has_tracked_dir(directory)
112 self.get_mut().has_tracked_dir(directory)
118 }
113 }
119
114
120 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
115 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
121 self.get_mut().has_dir(directory)
116 self.get_mut().has_dir(directory)
122 }
117 }
123
118
124 fn pack_v1(
119 fn pack_v1(
125 &mut self,
120 &mut self,
126 parents: DirstateParents,
121 parents: DirstateParents,
127 now: Timestamp,
122 now: Timestamp,
128 ) -> Result<Vec<u8>, DirstateError> {
123 ) -> Result<Vec<u8>, DirstateError> {
129 self.get_mut().pack_v1(parents, now)
124 self.get_mut().pack_v1(parents, now)
130 }
125 }
131
126
132 fn pack_v2(
127 fn pack_v2(
133 &mut self,
128 &mut self,
134 parents: DirstateParents,
129 parents: DirstateParents,
135 now: Timestamp,
130 now: Timestamp,
136 ) -> Result<Vec<u8>, DirstateError> {
131 ) -> Result<Vec<u8>, DirstateError> {
137 self.get_mut().pack_v2(parents, now)
132 self.get_mut().pack_v2(parents, now)
138 }
133 }
139
134
140 fn status<'a>(
135 fn status<'a>(
141 &'a mut self,
136 &'a mut self,
142 matcher: &'a (dyn Matcher + Sync),
137 matcher: &'a (dyn Matcher + Sync),
143 root_dir: PathBuf,
138 root_dir: PathBuf,
144 ignore_files: Vec<PathBuf>,
139 ignore_files: Vec<PathBuf>,
145 options: StatusOptions,
140 options: StatusOptions,
146 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
141 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
147 {
142 {
148 self.get_mut()
143 self.get_mut()
149 .status(matcher, root_dir, ignore_files, options)
144 .status(matcher, root_dir, ignore_files, options)
150 }
145 }
151
146
152 fn copy_map_len(&self) -> usize {
147 fn copy_map_len(&self) -> usize {
153 self.get().copy_map_len()
148 self.get().copy_map_len()
154 }
149 }
155
150
156 fn copy_map_iter(&self) -> CopyMapIter<'_> {
151 fn copy_map_iter(&self) -> CopyMapIter<'_> {
157 self.get().copy_map_iter()
152 self.get().copy_map_iter()
158 }
153 }
159
154
160 fn copy_map_contains_key(
155 fn copy_map_contains_key(
161 &self,
156 &self,
162 key: &HgPath,
157 key: &HgPath,
163 ) -> Result<bool, DirstateV2ParseError> {
158 ) -> Result<bool, DirstateV2ParseError> {
164 self.get().copy_map_contains_key(key)
159 self.get().copy_map_contains_key(key)
165 }
160 }
166
161
167 fn copy_map_get(
162 fn copy_map_get(
168 &self,
163 &self,
169 key: &HgPath,
164 key: &HgPath,
170 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
165 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
171 self.get().copy_map_get(key)
166 self.get().copy_map_get(key)
172 }
167 }
173
168
174 fn copy_map_remove(
169 fn copy_map_remove(
175 &mut self,
170 &mut self,
176 key: &HgPath,
171 key: &HgPath,
177 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
172 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
178 self.get_mut().copy_map_remove(key)
173 self.get_mut().copy_map_remove(key)
179 }
174 }
180
175
181 fn copy_map_insert(
176 fn copy_map_insert(
182 &mut self,
177 &mut self,
183 key: HgPathBuf,
178 key: HgPathBuf,
184 value: HgPathBuf,
179 value: HgPathBuf,
185 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
180 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
186 self.get_mut().copy_map_insert(key, value)
181 self.get_mut().copy_map_insert(key, value)
187 }
182 }
188
183
189 fn len(&self) -> usize {
184 fn len(&self) -> usize {
190 self.get().len()
185 self.get().len()
191 }
186 }
192
187
193 fn contains_key(
188 fn contains_key(
194 &self,
189 &self,
195 key: &HgPath,
190 key: &HgPath,
196 ) -> Result<bool, DirstateV2ParseError> {
191 ) -> Result<bool, DirstateV2ParseError> {
197 self.get().contains_key(key)
192 self.get().contains_key(key)
198 }
193 }
199
194
200 fn get(
195 fn get(
201 &self,
196 &self,
202 key: &HgPath,
197 key: &HgPath,
203 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
198 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
204 self.get().get(key)
199 self.get().get(key)
205 }
200 }
206
201
207 fn iter(&self) -> StateMapIter<'_> {
202 fn iter(&self) -> StateMapIter<'_> {
208 self.get().iter()
203 self.get().iter()
209 }
204 }
210
205
211 fn iter_directories(
206 fn iter_directories(
212 &self,
207 &self,
213 ) -> Box<
208 ) -> Box<
214 dyn Iterator<
209 dyn Iterator<
215 Item = Result<
210 Item = Result<
216 (&HgPath, Option<Timestamp>),
211 (&HgPath, Option<Timestamp>),
217 DirstateV2ParseError,
212 DirstateV2ParseError,
218 >,
213 >,
219 > + Send
214 > + Send
220 + '_,
215 + '_,
221 > {
216 > {
222 self.get().iter_directories()
217 self.get().iter_directories()
223 }
218 }
224 }
219 }
General Comments 0
You need to be logged in to leave comments. Login now