##// END OF EJS Templates
dirstate: use a `added` parameter to _addpath...
marmoute -
r48314:fe4641cf default
parent child Browse files
Show More
@@ -1,1435 +1,1437 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304
304
305 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
306 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
307 instead.
307 instead.
308 """
308 """
309 entry = self._map.get(key)
309 entry = self._map.get(key)
310 if entry is not None:
310 if entry is not None:
311 return entry.state
311 return entry.state
312 return b'?'
312 return b'?'
313
313
314 def __contains__(self, key):
314 def __contains__(self, key):
315 return key in self._map
315 return key in self._map
316
316
317 def __iter__(self):
317 def __iter__(self):
318 return iter(sorted(self._map))
318 return iter(sorted(self._map))
319
319
320 def items(self):
320 def items(self):
321 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
322
322
323 iteritems = items
323 iteritems = items
324
324
325 def directories(self):
325 def directories(self):
326 return self._map.directories()
326 return self._map.directories()
327
327
328 def parents(self):
328 def parents(self):
329 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
330
330
331 def p1(self):
331 def p1(self):
332 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
333
333
334 def p2(self):
334 def p2(self):
335 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
336
336
337 @property
337 @property
338 def in_merge(self):
338 def in_merge(self):
339 """True if a merge is in progress"""
339 """True if a merge is in progress"""
340 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
341
341
342 def branch(self):
342 def branch(self):
343 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
344
344
345 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
346 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
347
347
348 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
349 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
350 returned by the call.
350 returned by the call.
351
351
352 See localrepo.setparents()
352 See localrepo.setparents()
353 """
353 """
354 if p2 is None:
354 if p2 is None:
355 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
356 if self._parentwriters == 0:
356 if self._parentwriters == 0:
357 raise ValueError(
357 raise ValueError(
358 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
359 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
360 )
360 )
361
361
362 self._dirty = True
362 self._dirty = True
363 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
364 if self._origpl is None:
364 if self._origpl is None:
365 self._origpl = self._pl
365 self._origpl = self._pl
366 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
367 copies = {}
367 copies = {}
368 if (
368 if (
369 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
371 ):
371 ):
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
373
373
374 for f in candidatefiles:
374 for f in candidatefiles:
375 s = self._map.get(f)
375 s = self._map.get(f)
376 if s is None:
376 if s is None:
377 continue
377 continue
378
378
379 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
380 if s.merged:
380 if s.merged:
381 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
382 if source:
382 if source:
383 copies[f] = source
383 copies[f] = source
384 self.normallookup(f)
384 self.normallookup(f)
385 # Also fix up otherparent markers
385 # Also fix up otherparent markers
386 elif s.from_p2:
386 elif s.from_p2:
387 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
388 if source:
388 if source:
389 copies[f] = source
389 copies[f] = source
390 self.add(f)
390 self.add(f)
391 return copies
391 return copies
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._lastnormaltime = 0
419 self._lastnormaltime = 0
420 self._dirty = False
420 self._dirty = False
421 self._updatedfiles.clear()
421 self._updatedfiles.clear()
422 self._parentwriters = 0
422 self._parentwriters = 0
423 self._origpl = None
423 self._origpl = None
424
424
425 def copy(self, source, dest):
425 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
427 if source == dest:
428 return
428 return
429 self._dirty = True
429 self._dirty = True
430 if source is not None:
430 if source is not None:
431 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
432 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
435 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
436
436
437 def copied(self, file):
437 def copied(self, file):
438 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
439
439
440 def copies(self):
440 def copies(self):
441 return self._map.copymap
441 return self._map.copymap
442
442
443 def _addpath(
443 def _addpath(
444 self,
444 self,
445 f,
445 f,
446 state,
446 state=None,
447 mode,
447 mode=0,
448 size=None,
448 size=None,
449 mtime=None,
449 mtime=None,
450 added=False,
450 from_p2=False,
451 from_p2=False,
451 possibly_dirty=False,
452 possibly_dirty=False,
452 ):
453 ):
453 entry = self._map.get(f)
454 entry = self._map.get(f)
454 if state == b'a' or entry is not None and entry.removed:
455 if added or entry is not None and entry.removed:
455 scmutil.checkfilename(f)
456 scmutil.checkfilename(f)
456 if self._map.hastrackeddir(f):
457 if self._map.hastrackeddir(f):
457 msg = _(b'directory %r already in dirstate')
458 msg = _(b'directory %r already in dirstate')
458 msg %= pycompat.bytestr(f)
459 msg %= pycompat.bytestr(f)
459 raise error.Abort(msg)
460 raise error.Abort(msg)
460 # shadows
461 # shadows
461 for d in pathutil.finddirs(f):
462 for d in pathutil.finddirs(f):
462 if self._map.hastrackeddir(d):
463 if self._map.hastrackeddir(d):
463 break
464 break
464 entry = self._map.get(d)
465 entry = self._map.get(d)
465 if entry is not None and not entry.removed:
466 if entry is not None and not entry.removed:
466 msg = _(b'file %r in dirstate clashes with %r')
467 msg = _(b'file %r in dirstate clashes with %r')
467 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 raise error.Abort(msg)
469 raise error.Abort(msg)
469 self._dirty = True
470 self._dirty = True
470 self._updatedfiles.add(f)
471 self._updatedfiles.add(f)
471 self._map.addfile(
472 self._map.addfile(
472 f,
473 f,
473 state=state,
474 state=state,
474 mode=mode,
475 mode=mode,
475 size=size,
476 size=size,
476 mtime=mtime,
477 mtime=mtime,
478 added=added,
477 from_p2=from_p2,
479 from_p2=from_p2,
478 possibly_dirty=possibly_dirty,
480 possibly_dirty=possibly_dirty,
479 )
481 )
480
482
481 def normal(self, f, parentfiledata=None):
483 def normal(self, f, parentfiledata=None):
482 """Mark a file normal and clean.
484 """Mark a file normal and clean.
483
485
484 parentfiledata: (mode, size, mtime) of the clean file
486 parentfiledata: (mode, size, mtime) of the clean file
485
487
486 parentfiledata should be computed from memory (for mode,
488 parentfiledata should be computed from memory (for mode,
487 size), as or close as possible from the point where we
489 size), as or close as possible from the point where we
488 determined the file was clean, to limit the risk of the
490 determined the file was clean, to limit the risk of the
489 file having been changed by an external process between the
491 file having been changed by an external process between the
490 moment where the file was determined to be clean and now."""
492 moment where the file was determined to be clean and now."""
491 if parentfiledata:
493 if parentfiledata:
492 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
493 else:
495 else:
494 s = os.lstat(self._join(f))
496 s = os.lstat(self._join(f))
495 mode = s.st_mode
497 mode = s.st_mode
496 size = s.st_size
498 size = s.st_size
497 mtime = s[stat.ST_MTIME]
499 mtime = s[stat.ST_MTIME]
498 self._addpath(f, b'n', mode, size, mtime)
500 self._addpath(f, b'n', mode, size, mtime)
499 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
500 if f in self._map.nonnormalset:
502 if f in self._map.nonnormalset:
501 self._map.nonnormalset.remove(f)
503 self._map.nonnormalset.remove(f)
502 if mtime > self._lastnormaltime:
504 if mtime > self._lastnormaltime:
503 # Remember the most recent modification timeslot for status(),
505 # Remember the most recent modification timeslot for status(),
504 # to make sure we won't miss future size-preserving file content
506 # to make sure we won't miss future size-preserving file content
505 # modifications that happen within the same timeslot.
507 # modifications that happen within the same timeslot.
506 self._lastnormaltime = mtime
508 self._lastnormaltime = mtime
507
509
508 def normallookup(self, f):
510 def normallookup(self, f):
509 '''Mark a file normal, but possibly dirty.'''
511 '''Mark a file normal, but possibly dirty.'''
510 if self.in_merge:
512 if self.in_merge:
511 # if there is a merge going on and the file was either
513 # if there is a merge going on and the file was either
512 # "merged" or coming from other parent (-2) before
514 # "merged" or coming from other parent (-2) before
513 # being removed, restore that state.
515 # being removed, restore that state.
514 entry = self._map.get(f)
516 entry = self._map.get(f)
515 if entry is not None:
517 if entry is not None:
516 # XXX this should probably be dealt with a a lower level
518 # XXX this should probably be dealt with a a lower level
517 # (see `merged_removed` and `from_p2_removed`)
519 # (see `merged_removed` and `from_p2_removed`)
518 if entry.merged_removed or entry.from_p2_removed:
520 if entry.merged_removed or entry.from_p2_removed:
519 source = self._map.copymap.get(f)
521 source = self._map.copymap.get(f)
520 if entry.merged_removed:
522 if entry.merged_removed:
521 self.merge(f)
523 self.merge(f)
522 elif entry.from_p2_removed:
524 elif entry.from_p2_removed:
523 self.otherparent(f)
525 self.otherparent(f)
524 if source is not None:
526 if source is not None:
525 self.copy(source, f)
527 self.copy(source, f)
526 return
528 return
527 elif entry.merged or entry.from_p2:
529 elif entry.merged or entry.from_p2:
528 return
530 return
529 self._addpath(f, b'n', 0, possibly_dirty=True)
531 self._addpath(f, b'n', 0, possibly_dirty=True)
530 self._map.copymap.pop(f, None)
532 self._map.copymap.pop(f, None)
531
533
532 def otherparent(self, f):
534 def otherparent(self, f):
533 '''Mark as coming from the other parent, always dirty.'''
535 '''Mark as coming from the other parent, always dirty.'''
534 if not self.in_merge:
536 if not self.in_merge:
535 msg = _(b"setting %r to other parent only allowed in merges") % f
537 msg = _(b"setting %r to other parent only allowed in merges") % f
536 raise error.Abort(msg)
538 raise error.Abort(msg)
537 if f in self and self[f] == b'n':
539 if f in self and self[f] == b'n':
538 # merge-like
540 # merge-like
539 self._addpath(f, b'm', 0, from_p2=True)
541 self._addpath(f, b'm', 0, from_p2=True)
540 else:
542 else:
541 # add-like
543 # add-like
542 self._addpath(f, b'n', 0, from_p2=True)
544 self._addpath(f, b'n', 0, from_p2=True)
543 self._map.copymap.pop(f, None)
545 self._map.copymap.pop(f, None)
544
546
545 def add(self, f):
547 def add(self, f):
546 '''Mark a file added.'''
548 '''Mark a file added.'''
547 self._addpath(f, b'a', 0)
549 self._addpath(f, added=True)
548 self._map.copymap.pop(f, None)
550 self._map.copymap.pop(f, None)
549
551
550 def remove(self, f):
552 def remove(self, f):
551 '''Mark a file removed.'''
553 '''Mark a file removed.'''
552 self._dirty = True
554 self._dirty = True
553 self._updatedfiles.add(f)
555 self._updatedfiles.add(f)
554 self._map.removefile(f, in_merge=self.in_merge)
556 self._map.removefile(f, in_merge=self.in_merge)
555
557
556 def merge(self, f):
558 def merge(self, f):
557 '''Mark a file merged.'''
559 '''Mark a file merged.'''
558 if not self.in_merge:
560 if not self.in_merge:
559 return self.normallookup(f)
561 return self.normallookup(f)
560 return self.otherparent(f)
562 return self.otherparent(f)
561
563
562 def drop(self, f):
564 def drop(self, f):
563 '''Drop a file from the dirstate'''
565 '''Drop a file from the dirstate'''
564 oldstate = self[f]
566 oldstate = self[f]
565 if self._map.dropfile(f, oldstate):
567 if self._map.dropfile(f, oldstate):
566 self._dirty = True
568 self._dirty = True
567 self._updatedfiles.add(f)
569 self._updatedfiles.add(f)
568 self._map.copymap.pop(f, None)
570 self._map.copymap.pop(f, None)
569
571
570 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
572 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
571 if exists is None:
573 if exists is None:
572 exists = os.path.lexists(os.path.join(self._root, path))
574 exists = os.path.lexists(os.path.join(self._root, path))
573 if not exists:
575 if not exists:
574 # Maybe a path component exists
576 # Maybe a path component exists
575 if not ignoremissing and b'/' in path:
577 if not ignoremissing and b'/' in path:
576 d, f = path.rsplit(b'/', 1)
578 d, f = path.rsplit(b'/', 1)
577 d = self._normalize(d, False, ignoremissing, None)
579 d = self._normalize(d, False, ignoremissing, None)
578 folded = d + b"/" + f
580 folded = d + b"/" + f
579 else:
581 else:
580 # No path components, preserve original case
582 # No path components, preserve original case
581 folded = path
583 folded = path
582 else:
584 else:
583 # recursively normalize leading directory components
585 # recursively normalize leading directory components
584 # against dirstate
586 # against dirstate
585 if b'/' in normed:
587 if b'/' in normed:
586 d, f = normed.rsplit(b'/', 1)
588 d, f = normed.rsplit(b'/', 1)
587 d = self._normalize(d, False, ignoremissing, True)
589 d = self._normalize(d, False, ignoremissing, True)
588 r = self._root + b"/" + d
590 r = self._root + b"/" + d
589 folded = d + b"/" + util.fspath(f, r)
591 folded = d + b"/" + util.fspath(f, r)
590 else:
592 else:
591 folded = util.fspath(normed, self._root)
593 folded = util.fspath(normed, self._root)
592 storemap[normed] = folded
594 storemap[normed] = folded
593
595
594 return folded
596 return folded
595
597
596 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
598 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
597 normed = util.normcase(path)
599 normed = util.normcase(path)
598 folded = self._map.filefoldmap.get(normed, None)
600 folded = self._map.filefoldmap.get(normed, None)
599 if folded is None:
601 if folded is None:
600 if isknown:
602 if isknown:
601 folded = path
603 folded = path
602 else:
604 else:
603 folded = self._discoverpath(
605 folded = self._discoverpath(
604 path, normed, ignoremissing, exists, self._map.filefoldmap
606 path, normed, ignoremissing, exists, self._map.filefoldmap
605 )
607 )
606 return folded
608 return folded
607
609
608 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
610 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
609 normed = util.normcase(path)
611 normed = util.normcase(path)
610 folded = self._map.filefoldmap.get(normed, None)
612 folded = self._map.filefoldmap.get(normed, None)
611 if folded is None:
613 if folded is None:
612 folded = self._map.dirfoldmap.get(normed, None)
614 folded = self._map.dirfoldmap.get(normed, None)
613 if folded is None:
615 if folded is None:
614 if isknown:
616 if isknown:
615 folded = path
617 folded = path
616 else:
618 else:
617 # store discovered result in dirfoldmap so that future
619 # store discovered result in dirfoldmap so that future
618 # normalizefile calls don't start matching directories
620 # normalizefile calls don't start matching directories
619 folded = self._discoverpath(
621 folded = self._discoverpath(
620 path, normed, ignoremissing, exists, self._map.dirfoldmap
622 path, normed, ignoremissing, exists, self._map.dirfoldmap
621 )
623 )
622 return folded
624 return folded
623
625
624 def normalize(self, path, isknown=False, ignoremissing=False):
626 def normalize(self, path, isknown=False, ignoremissing=False):
625 """
627 """
626 normalize the case of a pathname when on a casefolding filesystem
628 normalize the case of a pathname when on a casefolding filesystem
627
629
628 isknown specifies whether the filename came from walking the
630 isknown specifies whether the filename came from walking the
629 disk, to avoid extra filesystem access.
631 disk, to avoid extra filesystem access.
630
632
631 If ignoremissing is True, missing path are returned
633 If ignoremissing is True, missing path are returned
632 unchanged. Otherwise, we try harder to normalize possibly
634 unchanged. Otherwise, we try harder to normalize possibly
633 existing path components.
635 existing path components.
634
636
635 The normalized case is determined based on the following precedence:
637 The normalized case is determined based on the following precedence:
636
638
637 - version of name already stored in the dirstate
639 - version of name already stored in the dirstate
638 - version of name stored on disk
640 - version of name stored on disk
639 - version provided via command arguments
641 - version provided via command arguments
640 """
642 """
641
643
642 if self._checkcase:
644 if self._checkcase:
643 return self._normalize(path, isknown, ignoremissing)
645 return self._normalize(path, isknown, ignoremissing)
644 return path
646 return path
645
647
646 def clear(self):
648 def clear(self):
647 self._map.clear()
649 self._map.clear()
648 self._lastnormaltime = 0
650 self._lastnormaltime = 0
649 self._updatedfiles.clear()
651 self._updatedfiles.clear()
650 self._dirty = True
652 self._dirty = True
651
653
652 def rebuild(self, parent, allfiles, changedfiles=None):
654 def rebuild(self, parent, allfiles, changedfiles=None):
653 if changedfiles is None:
655 if changedfiles is None:
654 # Rebuild entire dirstate
656 # Rebuild entire dirstate
655 to_lookup = allfiles
657 to_lookup = allfiles
656 to_drop = []
658 to_drop = []
657 lastnormaltime = self._lastnormaltime
659 lastnormaltime = self._lastnormaltime
658 self.clear()
660 self.clear()
659 self._lastnormaltime = lastnormaltime
661 self._lastnormaltime = lastnormaltime
660 elif len(changedfiles) < 10:
662 elif len(changedfiles) < 10:
661 # Avoid turning allfiles into a set, which can be expensive if it's
663 # Avoid turning allfiles into a set, which can be expensive if it's
662 # large.
664 # large.
663 to_lookup = []
665 to_lookup = []
664 to_drop = []
666 to_drop = []
665 for f in changedfiles:
667 for f in changedfiles:
666 if f in allfiles:
668 if f in allfiles:
667 to_lookup.append(f)
669 to_lookup.append(f)
668 else:
670 else:
669 to_drop.append(f)
671 to_drop.append(f)
670 else:
672 else:
671 changedfilesset = set(changedfiles)
673 changedfilesset = set(changedfiles)
672 to_lookup = changedfilesset & set(allfiles)
674 to_lookup = changedfilesset & set(allfiles)
673 to_drop = changedfilesset - to_lookup
675 to_drop = changedfilesset - to_lookup
674
676
675 if self._origpl is None:
677 if self._origpl is None:
676 self._origpl = self._pl
678 self._origpl = self._pl
677 self._map.setparents(parent, self._nodeconstants.nullid)
679 self._map.setparents(parent, self._nodeconstants.nullid)
678
680
679 for f in to_lookup:
681 for f in to_lookup:
680 self.normallookup(f)
682 self.normallookup(f)
681 for f in to_drop:
683 for f in to_drop:
682 self.drop(f)
684 self.drop(f)
683
685
684 self._dirty = True
686 self._dirty = True
685
687
686 def identity(self):
688 def identity(self):
687 """Return identity of dirstate itself to detect changing in storage
689 """Return identity of dirstate itself to detect changing in storage
688
690
689 If identity of previous dirstate is equal to this, writing
691 If identity of previous dirstate is equal to this, writing
690 changes based on the former dirstate out can keep consistency.
692 changes based on the former dirstate out can keep consistency.
691 """
693 """
692 return self._map.identity
694 return self._map.identity
693
695
694 def write(self, tr):
696 def write(self, tr):
695 if not self._dirty:
697 if not self._dirty:
696 return
698 return
697
699
698 filename = self._filename
700 filename = self._filename
699 if tr:
701 if tr:
700 # 'dirstate.write()' is not only for writing in-memory
702 # 'dirstate.write()' is not only for writing in-memory
701 # changes out, but also for dropping ambiguous timestamp.
703 # changes out, but also for dropping ambiguous timestamp.
702 # delayed writing re-raise "ambiguous timestamp issue".
704 # delayed writing re-raise "ambiguous timestamp issue".
703 # See also the wiki page below for detail:
705 # See also the wiki page below for detail:
704 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
706 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
705
707
706 # emulate dropping timestamp in 'parsers.pack_dirstate'
708 # emulate dropping timestamp in 'parsers.pack_dirstate'
707 now = _getfsnow(self._opener)
709 now = _getfsnow(self._opener)
708 self._map.clearambiguoustimes(self._updatedfiles, now)
710 self._map.clearambiguoustimes(self._updatedfiles, now)
709
711
710 # emulate that all 'dirstate.normal' results are written out
712 # emulate that all 'dirstate.normal' results are written out
711 self._lastnormaltime = 0
713 self._lastnormaltime = 0
712 self._updatedfiles.clear()
714 self._updatedfiles.clear()
713
715
714 # delay writing in-memory changes out
716 # delay writing in-memory changes out
715 tr.addfilegenerator(
717 tr.addfilegenerator(
716 b'dirstate',
718 b'dirstate',
717 (self._filename,),
719 (self._filename,),
718 self._writedirstate,
720 self._writedirstate,
719 location=b'plain',
721 location=b'plain',
720 )
722 )
721 return
723 return
722
724
723 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
725 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
724 self._writedirstate(st)
726 self._writedirstate(st)
725
727
726 def addparentchangecallback(self, category, callback):
728 def addparentchangecallback(self, category, callback):
727 """add a callback to be called when the wd parents are changed
729 """add a callback to be called when the wd parents are changed
728
730
729 Callback will be called with the following arguments:
731 Callback will be called with the following arguments:
730 dirstate, (oldp1, oldp2), (newp1, newp2)
732 dirstate, (oldp1, oldp2), (newp1, newp2)
731
733
732 Category is a unique identifier to allow overwriting an old callback
734 Category is a unique identifier to allow overwriting an old callback
733 with a newer callback.
735 with a newer callback.
734 """
736 """
735 self._plchangecallbacks[category] = callback
737 self._plchangecallbacks[category] = callback
736
738
737 def _writedirstate(self, st):
739 def _writedirstate(self, st):
738 # notify callbacks about parents change
740 # notify callbacks about parents change
739 if self._origpl is not None and self._origpl != self._pl:
741 if self._origpl is not None and self._origpl != self._pl:
740 for c, callback in sorted(
742 for c, callback in sorted(
741 pycompat.iteritems(self._plchangecallbacks)
743 pycompat.iteritems(self._plchangecallbacks)
742 ):
744 ):
743 callback(self, self._origpl, self._pl)
745 callback(self, self._origpl, self._pl)
744 self._origpl = None
746 self._origpl = None
745 # use the modification time of the newly created temporary file as the
747 # use the modification time of the newly created temporary file as the
746 # filesystem's notion of 'now'
748 # filesystem's notion of 'now'
747 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
749 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
748
750
749 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
751 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
750 # timestamp of each entries in dirstate, because of 'now > mtime'
752 # timestamp of each entries in dirstate, because of 'now > mtime'
751 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
753 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
752 if delaywrite > 0:
754 if delaywrite > 0:
753 # do we have any files to delay for?
755 # do we have any files to delay for?
754 for f, e in pycompat.iteritems(self._map):
756 for f, e in pycompat.iteritems(self._map):
755 if e.state == b'n' and e[3] == now:
757 if e.state == b'n' and e[3] == now:
756 import time # to avoid useless import
758 import time # to avoid useless import
757
759
758 # rather than sleep n seconds, sleep until the next
760 # rather than sleep n seconds, sleep until the next
759 # multiple of n seconds
761 # multiple of n seconds
760 clock = time.time()
762 clock = time.time()
761 start = int(clock) - (int(clock) % delaywrite)
763 start = int(clock) - (int(clock) % delaywrite)
762 end = start + delaywrite
764 end = start + delaywrite
763 time.sleep(end - clock)
765 time.sleep(end - clock)
764 now = end # trust our estimate that the end is near now
766 now = end # trust our estimate that the end is near now
765 break
767 break
766
768
767 self._map.write(st, now)
769 self._map.write(st, now)
768 self._lastnormaltime = 0
770 self._lastnormaltime = 0
769 self._dirty = False
771 self._dirty = False
770
772
771 def _dirignore(self, f):
773 def _dirignore(self, f):
772 if self._ignore(f):
774 if self._ignore(f):
773 return True
775 return True
774 for p in pathutil.finddirs(f):
776 for p in pathutil.finddirs(f):
775 if self._ignore(p):
777 if self._ignore(p):
776 return True
778 return True
777 return False
779 return False
778
780
779 def _ignorefiles(self):
781 def _ignorefiles(self):
780 files = []
782 files = []
781 if os.path.exists(self._join(b'.hgignore')):
783 if os.path.exists(self._join(b'.hgignore')):
782 files.append(self._join(b'.hgignore'))
784 files.append(self._join(b'.hgignore'))
783 for name, path in self._ui.configitems(b"ui"):
785 for name, path in self._ui.configitems(b"ui"):
784 if name == b'ignore' or name.startswith(b'ignore.'):
786 if name == b'ignore' or name.startswith(b'ignore.'):
785 # we need to use os.path.join here rather than self._join
787 # we need to use os.path.join here rather than self._join
786 # because path is arbitrary and user-specified
788 # because path is arbitrary and user-specified
787 files.append(os.path.join(self._rootdir, util.expandpath(path)))
789 files.append(os.path.join(self._rootdir, util.expandpath(path)))
788 return files
790 return files
789
791
790 def _ignorefileandline(self, f):
792 def _ignorefileandline(self, f):
791 files = collections.deque(self._ignorefiles())
793 files = collections.deque(self._ignorefiles())
792 visited = set()
794 visited = set()
793 while files:
795 while files:
794 i = files.popleft()
796 i = files.popleft()
795 patterns = matchmod.readpatternfile(
797 patterns = matchmod.readpatternfile(
796 i, self._ui.warn, sourceinfo=True
798 i, self._ui.warn, sourceinfo=True
797 )
799 )
798 for pattern, lineno, line in patterns:
800 for pattern, lineno, line in patterns:
799 kind, p = matchmod._patsplit(pattern, b'glob')
801 kind, p = matchmod._patsplit(pattern, b'glob')
800 if kind == b"subinclude":
802 if kind == b"subinclude":
801 if p not in visited:
803 if p not in visited:
802 files.append(p)
804 files.append(p)
803 continue
805 continue
804 m = matchmod.match(
806 m = matchmod.match(
805 self._root, b'', [], [pattern], warn=self._ui.warn
807 self._root, b'', [], [pattern], warn=self._ui.warn
806 )
808 )
807 if m(f):
809 if m(f):
808 return (i, lineno, line)
810 return (i, lineno, line)
809 visited.add(i)
811 visited.add(i)
810 return (None, -1, b"")
812 return (None, -1, b"")
811
813
812 def _walkexplicit(self, match, subrepos):
814 def _walkexplicit(self, match, subrepos):
813 """Get stat data about the files explicitly specified by match.
815 """Get stat data about the files explicitly specified by match.
814
816
815 Return a triple (results, dirsfound, dirsnotfound).
817 Return a triple (results, dirsfound, dirsnotfound).
816 - results is a mapping from filename to stat result. It also contains
818 - results is a mapping from filename to stat result. It also contains
817 listings mapping subrepos and .hg to None.
819 listings mapping subrepos and .hg to None.
818 - dirsfound is a list of files found to be directories.
820 - dirsfound is a list of files found to be directories.
819 - dirsnotfound is a list of files that the dirstate thinks are
821 - dirsnotfound is a list of files that the dirstate thinks are
820 directories and that were not found."""
822 directories and that were not found."""
821
823
822 def badtype(mode):
824 def badtype(mode):
823 kind = _(b'unknown')
825 kind = _(b'unknown')
824 if stat.S_ISCHR(mode):
826 if stat.S_ISCHR(mode):
825 kind = _(b'character device')
827 kind = _(b'character device')
826 elif stat.S_ISBLK(mode):
828 elif stat.S_ISBLK(mode):
827 kind = _(b'block device')
829 kind = _(b'block device')
828 elif stat.S_ISFIFO(mode):
830 elif stat.S_ISFIFO(mode):
829 kind = _(b'fifo')
831 kind = _(b'fifo')
830 elif stat.S_ISSOCK(mode):
832 elif stat.S_ISSOCK(mode):
831 kind = _(b'socket')
833 kind = _(b'socket')
832 elif stat.S_ISDIR(mode):
834 elif stat.S_ISDIR(mode):
833 kind = _(b'directory')
835 kind = _(b'directory')
834 return _(b'unsupported file type (type is %s)') % kind
836 return _(b'unsupported file type (type is %s)') % kind
835
837
836 badfn = match.bad
838 badfn = match.bad
837 dmap = self._map
839 dmap = self._map
838 lstat = os.lstat
840 lstat = os.lstat
839 getkind = stat.S_IFMT
841 getkind = stat.S_IFMT
840 dirkind = stat.S_IFDIR
842 dirkind = stat.S_IFDIR
841 regkind = stat.S_IFREG
843 regkind = stat.S_IFREG
842 lnkkind = stat.S_IFLNK
844 lnkkind = stat.S_IFLNK
843 join = self._join
845 join = self._join
844 dirsfound = []
846 dirsfound = []
845 foundadd = dirsfound.append
847 foundadd = dirsfound.append
846 dirsnotfound = []
848 dirsnotfound = []
847 notfoundadd = dirsnotfound.append
849 notfoundadd = dirsnotfound.append
848
850
849 if not match.isexact() and self._checkcase:
851 if not match.isexact() and self._checkcase:
850 normalize = self._normalize
852 normalize = self._normalize
851 else:
853 else:
852 normalize = None
854 normalize = None
853
855
854 files = sorted(match.files())
856 files = sorted(match.files())
855 subrepos.sort()
857 subrepos.sort()
856 i, j = 0, 0
858 i, j = 0, 0
857 while i < len(files) and j < len(subrepos):
859 while i < len(files) and j < len(subrepos):
858 subpath = subrepos[j] + b"/"
860 subpath = subrepos[j] + b"/"
859 if files[i] < subpath:
861 if files[i] < subpath:
860 i += 1
862 i += 1
861 continue
863 continue
862 while i < len(files) and files[i].startswith(subpath):
864 while i < len(files) and files[i].startswith(subpath):
863 del files[i]
865 del files[i]
864 j += 1
866 j += 1
865
867
866 if not files or b'' in files:
868 if not files or b'' in files:
867 files = [b'']
869 files = [b'']
868 # constructing the foldmap is expensive, so don't do it for the
870 # constructing the foldmap is expensive, so don't do it for the
869 # common case where files is ['']
871 # common case where files is ['']
870 normalize = None
872 normalize = None
871 results = dict.fromkeys(subrepos)
873 results = dict.fromkeys(subrepos)
872 results[b'.hg'] = None
874 results[b'.hg'] = None
873
875
874 for ff in files:
876 for ff in files:
875 if normalize:
877 if normalize:
876 nf = normalize(ff, False, True)
878 nf = normalize(ff, False, True)
877 else:
879 else:
878 nf = ff
880 nf = ff
879 if nf in results:
881 if nf in results:
880 continue
882 continue
881
883
882 try:
884 try:
883 st = lstat(join(nf))
885 st = lstat(join(nf))
884 kind = getkind(st.st_mode)
886 kind = getkind(st.st_mode)
885 if kind == dirkind:
887 if kind == dirkind:
886 if nf in dmap:
888 if nf in dmap:
887 # file replaced by dir on disk but still in dirstate
889 # file replaced by dir on disk but still in dirstate
888 results[nf] = None
890 results[nf] = None
889 foundadd((nf, ff))
891 foundadd((nf, ff))
890 elif kind == regkind or kind == lnkkind:
892 elif kind == regkind or kind == lnkkind:
891 results[nf] = st
893 results[nf] = st
892 else:
894 else:
893 badfn(ff, badtype(kind))
895 badfn(ff, badtype(kind))
894 if nf in dmap:
896 if nf in dmap:
895 results[nf] = None
897 results[nf] = None
896 except OSError as inst: # nf not found on disk - it is dirstate only
898 except OSError as inst: # nf not found on disk - it is dirstate only
897 if nf in dmap: # does it exactly match a missing file?
899 if nf in dmap: # does it exactly match a missing file?
898 results[nf] = None
900 results[nf] = None
899 else: # does it match a missing directory?
901 else: # does it match a missing directory?
900 if self._map.hasdir(nf):
902 if self._map.hasdir(nf):
901 notfoundadd(nf)
903 notfoundadd(nf)
902 else:
904 else:
903 badfn(ff, encoding.strtolocal(inst.strerror))
905 badfn(ff, encoding.strtolocal(inst.strerror))
904
906
905 # match.files() may contain explicitly-specified paths that shouldn't
907 # match.files() may contain explicitly-specified paths that shouldn't
906 # be taken; drop them from the list of files found. dirsfound/notfound
908 # be taken; drop them from the list of files found. dirsfound/notfound
907 # aren't filtered here because they will be tested later.
909 # aren't filtered here because they will be tested later.
908 if match.anypats():
910 if match.anypats():
909 for f in list(results):
911 for f in list(results):
910 if f == b'.hg' or f in subrepos:
912 if f == b'.hg' or f in subrepos:
911 # keep sentinel to disable further out-of-repo walks
913 # keep sentinel to disable further out-of-repo walks
912 continue
914 continue
913 if not match(f):
915 if not match(f):
914 del results[f]
916 del results[f]
915
917
916 # Case insensitive filesystems cannot rely on lstat() failing to detect
918 # Case insensitive filesystems cannot rely on lstat() failing to detect
917 # a case-only rename. Prune the stat object for any file that does not
919 # a case-only rename. Prune the stat object for any file that does not
918 # match the case in the filesystem, if there are multiple files that
920 # match the case in the filesystem, if there are multiple files that
919 # normalize to the same path.
921 # normalize to the same path.
920 if match.isexact() and self._checkcase:
922 if match.isexact() and self._checkcase:
921 normed = {}
923 normed = {}
922
924
923 for f, st in pycompat.iteritems(results):
925 for f, st in pycompat.iteritems(results):
924 if st is None:
926 if st is None:
925 continue
927 continue
926
928
927 nc = util.normcase(f)
929 nc = util.normcase(f)
928 paths = normed.get(nc)
930 paths = normed.get(nc)
929
931
930 if paths is None:
932 if paths is None:
931 paths = set()
933 paths = set()
932 normed[nc] = paths
934 normed[nc] = paths
933
935
934 paths.add(f)
936 paths.add(f)
935
937
936 for norm, paths in pycompat.iteritems(normed):
938 for norm, paths in pycompat.iteritems(normed):
937 if len(paths) > 1:
939 if len(paths) > 1:
938 for path in paths:
940 for path in paths:
939 folded = self._discoverpath(
941 folded = self._discoverpath(
940 path, norm, True, None, self._map.dirfoldmap
942 path, norm, True, None, self._map.dirfoldmap
941 )
943 )
942 if path != folded:
944 if path != folded:
943 results[path] = None
945 results[path] = None
944
946
945 return results, dirsfound, dirsnotfound
947 return results, dirsfound, dirsnotfound
946
948
947 def walk(self, match, subrepos, unknown, ignored, full=True):
949 def walk(self, match, subrepos, unknown, ignored, full=True):
948 """
950 """
949 Walk recursively through the directory tree, finding all files
951 Walk recursively through the directory tree, finding all files
950 matched by match.
952 matched by match.
951
953
952 If full is False, maybe skip some known-clean files.
954 If full is False, maybe skip some known-clean files.
953
955
954 Return a dict mapping filename to stat-like object (either
956 Return a dict mapping filename to stat-like object (either
955 mercurial.osutil.stat instance or return value of os.stat()).
957 mercurial.osutil.stat instance or return value of os.stat()).
956
958
957 """
959 """
958 # full is a flag that extensions that hook into walk can use -- this
960 # full is a flag that extensions that hook into walk can use -- this
959 # implementation doesn't use it at all. This satisfies the contract
961 # implementation doesn't use it at all. This satisfies the contract
960 # because we only guarantee a "maybe".
962 # because we only guarantee a "maybe".
961
963
962 if ignored:
964 if ignored:
963 ignore = util.never
965 ignore = util.never
964 dirignore = util.never
966 dirignore = util.never
965 elif unknown:
967 elif unknown:
966 ignore = self._ignore
968 ignore = self._ignore
967 dirignore = self._dirignore
969 dirignore = self._dirignore
968 else:
970 else:
969 # if not unknown and not ignored, drop dir recursion and step 2
971 # if not unknown and not ignored, drop dir recursion and step 2
970 ignore = util.always
972 ignore = util.always
971 dirignore = util.always
973 dirignore = util.always
972
974
973 matchfn = match.matchfn
975 matchfn = match.matchfn
974 matchalways = match.always()
976 matchalways = match.always()
975 matchtdir = match.traversedir
977 matchtdir = match.traversedir
976 dmap = self._map
978 dmap = self._map
977 listdir = util.listdir
979 listdir = util.listdir
978 lstat = os.lstat
980 lstat = os.lstat
979 dirkind = stat.S_IFDIR
981 dirkind = stat.S_IFDIR
980 regkind = stat.S_IFREG
982 regkind = stat.S_IFREG
981 lnkkind = stat.S_IFLNK
983 lnkkind = stat.S_IFLNK
982 join = self._join
984 join = self._join
983
985
984 exact = skipstep3 = False
986 exact = skipstep3 = False
985 if match.isexact(): # match.exact
987 if match.isexact(): # match.exact
986 exact = True
988 exact = True
987 dirignore = util.always # skip step 2
989 dirignore = util.always # skip step 2
988 elif match.prefix(): # match.match, no patterns
990 elif match.prefix(): # match.match, no patterns
989 skipstep3 = True
991 skipstep3 = True
990
992
991 if not exact and self._checkcase:
993 if not exact and self._checkcase:
992 normalize = self._normalize
994 normalize = self._normalize
993 normalizefile = self._normalizefile
995 normalizefile = self._normalizefile
994 skipstep3 = False
996 skipstep3 = False
995 else:
997 else:
996 normalize = self._normalize
998 normalize = self._normalize
997 normalizefile = None
999 normalizefile = None
998
1000
999 # step 1: find all explicit files
1001 # step 1: find all explicit files
1000 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1002 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1001 if matchtdir:
1003 if matchtdir:
1002 for d in work:
1004 for d in work:
1003 matchtdir(d[0])
1005 matchtdir(d[0])
1004 for d in dirsnotfound:
1006 for d in dirsnotfound:
1005 matchtdir(d)
1007 matchtdir(d)
1006
1008
1007 skipstep3 = skipstep3 and not (work or dirsnotfound)
1009 skipstep3 = skipstep3 and not (work or dirsnotfound)
1008 work = [d for d in work if not dirignore(d[0])]
1010 work = [d for d in work if not dirignore(d[0])]
1009
1011
1010 # step 2: visit subdirectories
1012 # step 2: visit subdirectories
1011 def traverse(work, alreadynormed):
1013 def traverse(work, alreadynormed):
1012 wadd = work.append
1014 wadd = work.append
1013 while work:
1015 while work:
1014 tracing.counter('dirstate.walk work', len(work))
1016 tracing.counter('dirstate.walk work', len(work))
1015 nd = work.pop()
1017 nd = work.pop()
1016 visitentries = match.visitchildrenset(nd)
1018 visitentries = match.visitchildrenset(nd)
1017 if not visitentries:
1019 if not visitentries:
1018 continue
1020 continue
1019 if visitentries == b'this' or visitentries == b'all':
1021 if visitentries == b'this' or visitentries == b'all':
1020 visitentries = None
1022 visitentries = None
1021 skip = None
1023 skip = None
1022 if nd != b'':
1024 if nd != b'':
1023 skip = b'.hg'
1025 skip = b'.hg'
1024 try:
1026 try:
1025 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1027 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1026 entries = listdir(join(nd), stat=True, skip=skip)
1028 entries = listdir(join(nd), stat=True, skip=skip)
1027 except OSError as inst:
1029 except OSError as inst:
1028 if inst.errno in (errno.EACCES, errno.ENOENT):
1030 if inst.errno in (errno.EACCES, errno.ENOENT):
1029 match.bad(
1031 match.bad(
1030 self.pathto(nd), encoding.strtolocal(inst.strerror)
1032 self.pathto(nd), encoding.strtolocal(inst.strerror)
1031 )
1033 )
1032 continue
1034 continue
1033 raise
1035 raise
1034 for f, kind, st in entries:
1036 for f, kind, st in entries:
1035 # Some matchers may return files in the visitentries set,
1037 # Some matchers may return files in the visitentries set,
1036 # instead of 'this', if the matcher explicitly mentions them
1038 # instead of 'this', if the matcher explicitly mentions them
1037 # and is not an exactmatcher. This is acceptable; we do not
1039 # and is not an exactmatcher. This is acceptable; we do not
1038 # make any hard assumptions about file-or-directory below
1040 # make any hard assumptions about file-or-directory below
1039 # based on the presence of `f` in visitentries. If
1041 # based on the presence of `f` in visitentries. If
1040 # visitchildrenset returned a set, we can always skip the
1042 # visitchildrenset returned a set, we can always skip the
1041 # entries *not* in the set it provided regardless of whether
1043 # entries *not* in the set it provided regardless of whether
1042 # they're actually a file or a directory.
1044 # they're actually a file or a directory.
1043 if visitentries and f not in visitentries:
1045 if visitentries and f not in visitentries:
1044 continue
1046 continue
1045 if normalizefile:
1047 if normalizefile:
1046 # even though f might be a directory, we're only
1048 # even though f might be a directory, we're only
1047 # interested in comparing it to files currently in the
1049 # interested in comparing it to files currently in the
1048 # dmap -- therefore normalizefile is enough
1050 # dmap -- therefore normalizefile is enough
1049 nf = normalizefile(
1051 nf = normalizefile(
1050 nd and (nd + b"/" + f) or f, True, True
1052 nd and (nd + b"/" + f) or f, True, True
1051 )
1053 )
1052 else:
1054 else:
1053 nf = nd and (nd + b"/" + f) or f
1055 nf = nd and (nd + b"/" + f) or f
1054 if nf not in results:
1056 if nf not in results:
1055 if kind == dirkind:
1057 if kind == dirkind:
1056 if not ignore(nf):
1058 if not ignore(nf):
1057 if matchtdir:
1059 if matchtdir:
1058 matchtdir(nf)
1060 matchtdir(nf)
1059 wadd(nf)
1061 wadd(nf)
1060 if nf in dmap and (matchalways or matchfn(nf)):
1062 if nf in dmap and (matchalways or matchfn(nf)):
1061 results[nf] = None
1063 results[nf] = None
1062 elif kind == regkind or kind == lnkkind:
1064 elif kind == regkind or kind == lnkkind:
1063 if nf in dmap:
1065 if nf in dmap:
1064 if matchalways or matchfn(nf):
1066 if matchalways or matchfn(nf):
1065 results[nf] = st
1067 results[nf] = st
1066 elif (matchalways or matchfn(nf)) and not ignore(
1068 elif (matchalways or matchfn(nf)) and not ignore(
1067 nf
1069 nf
1068 ):
1070 ):
1069 # unknown file -- normalize if necessary
1071 # unknown file -- normalize if necessary
1070 if not alreadynormed:
1072 if not alreadynormed:
1071 nf = normalize(nf, False, True)
1073 nf = normalize(nf, False, True)
1072 results[nf] = st
1074 results[nf] = st
1073 elif nf in dmap and (matchalways or matchfn(nf)):
1075 elif nf in dmap and (matchalways or matchfn(nf)):
1074 results[nf] = None
1076 results[nf] = None
1075
1077
1076 for nd, d in work:
1078 for nd, d in work:
1077 # alreadynormed means that processwork doesn't have to do any
1079 # alreadynormed means that processwork doesn't have to do any
1078 # expensive directory normalization
1080 # expensive directory normalization
1079 alreadynormed = not normalize or nd == d
1081 alreadynormed = not normalize or nd == d
1080 traverse([d], alreadynormed)
1082 traverse([d], alreadynormed)
1081
1083
1082 for s in subrepos:
1084 for s in subrepos:
1083 del results[s]
1085 del results[s]
1084 del results[b'.hg']
1086 del results[b'.hg']
1085
1087
1086 # step 3: visit remaining files from dmap
1088 # step 3: visit remaining files from dmap
1087 if not skipstep3 and not exact:
1089 if not skipstep3 and not exact:
1088 # If a dmap file is not in results yet, it was either
1090 # If a dmap file is not in results yet, it was either
1089 # a) not matching matchfn b) ignored, c) missing, or d) under a
1091 # a) not matching matchfn b) ignored, c) missing, or d) under a
1090 # symlink directory.
1092 # symlink directory.
1091 if not results and matchalways:
1093 if not results and matchalways:
1092 visit = [f for f in dmap]
1094 visit = [f for f in dmap]
1093 else:
1095 else:
1094 visit = [f for f in dmap if f not in results and matchfn(f)]
1096 visit = [f for f in dmap if f not in results and matchfn(f)]
1095 visit.sort()
1097 visit.sort()
1096
1098
1097 if unknown:
1099 if unknown:
1098 # unknown == True means we walked all dirs under the roots
1100 # unknown == True means we walked all dirs under the roots
1099 # that wasn't ignored, and everything that matched was stat'ed
1101 # that wasn't ignored, and everything that matched was stat'ed
1100 # and is already in results.
1102 # and is already in results.
1101 # The rest must thus be ignored or under a symlink.
1103 # The rest must thus be ignored or under a symlink.
1102 audit_path = pathutil.pathauditor(self._root, cached=True)
1104 audit_path = pathutil.pathauditor(self._root, cached=True)
1103
1105
1104 for nf in iter(visit):
1106 for nf in iter(visit):
1105 # If a stat for the same file was already added with a
1107 # If a stat for the same file was already added with a
1106 # different case, don't add one for this, since that would
1108 # different case, don't add one for this, since that would
1107 # make it appear as if the file exists under both names
1109 # make it appear as if the file exists under both names
1108 # on disk.
1110 # on disk.
1109 if (
1111 if (
1110 normalizefile
1112 normalizefile
1111 and normalizefile(nf, True, True) in results
1113 and normalizefile(nf, True, True) in results
1112 ):
1114 ):
1113 results[nf] = None
1115 results[nf] = None
1114 # Report ignored items in the dmap as long as they are not
1116 # Report ignored items in the dmap as long as they are not
1115 # under a symlink directory.
1117 # under a symlink directory.
1116 elif audit_path.check(nf):
1118 elif audit_path.check(nf):
1117 try:
1119 try:
1118 results[nf] = lstat(join(nf))
1120 results[nf] = lstat(join(nf))
1119 # file was just ignored, no links, and exists
1121 # file was just ignored, no links, and exists
1120 except OSError:
1122 except OSError:
1121 # file doesn't exist
1123 # file doesn't exist
1122 results[nf] = None
1124 results[nf] = None
1123 else:
1125 else:
1124 # It's either missing or under a symlink directory
1126 # It's either missing or under a symlink directory
1125 # which we in this case report as missing
1127 # which we in this case report as missing
1126 results[nf] = None
1128 results[nf] = None
1127 else:
1129 else:
1128 # We may not have walked the full directory tree above,
1130 # We may not have walked the full directory tree above,
1129 # so stat and check everything we missed.
1131 # so stat and check everything we missed.
1130 iv = iter(visit)
1132 iv = iter(visit)
1131 for st in util.statfiles([join(i) for i in visit]):
1133 for st in util.statfiles([join(i) for i in visit]):
1132 results[next(iv)] = st
1134 results[next(iv)] = st
1133 return results
1135 return results
1134
1136
1135 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1137 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1136 # Force Rayon (Rust parallelism library) to respect the number of
1138 # Force Rayon (Rust parallelism library) to respect the number of
1137 # workers. This is a temporary workaround until Rust code knows
1139 # workers. This is a temporary workaround until Rust code knows
1138 # how to read the config file.
1140 # how to read the config file.
1139 numcpus = self._ui.configint(b"worker", b"numcpus")
1141 numcpus = self._ui.configint(b"worker", b"numcpus")
1140 if numcpus is not None:
1142 if numcpus is not None:
1141 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1143 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1142
1144
1143 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1145 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1144 if not workers_enabled:
1146 if not workers_enabled:
1145 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1147 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1146
1148
1147 (
1149 (
1148 lookup,
1150 lookup,
1149 modified,
1151 modified,
1150 added,
1152 added,
1151 removed,
1153 removed,
1152 deleted,
1154 deleted,
1153 clean,
1155 clean,
1154 ignored,
1156 ignored,
1155 unknown,
1157 unknown,
1156 warnings,
1158 warnings,
1157 bad,
1159 bad,
1158 traversed,
1160 traversed,
1159 dirty,
1161 dirty,
1160 ) = rustmod.status(
1162 ) = rustmod.status(
1161 self._map._rustmap,
1163 self._map._rustmap,
1162 matcher,
1164 matcher,
1163 self._rootdir,
1165 self._rootdir,
1164 self._ignorefiles(),
1166 self._ignorefiles(),
1165 self._checkexec,
1167 self._checkexec,
1166 self._lastnormaltime,
1168 self._lastnormaltime,
1167 bool(list_clean),
1169 bool(list_clean),
1168 bool(list_ignored),
1170 bool(list_ignored),
1169 bool(list_unknown),
1171 bool(list_unknown),
1170 bool(matcher.traversedir),
1172 bool(matcher.traversedir),
1171 )
1173 )
1172
1174
1173 self._dirty |= dirty
1175 self._dirty |= dirty
1174
1176
1175 if matcher.traversedir:
1177 if matcher.traversedir:
1176 for dir in traversed:
1178 for dir in traversed:
1177 matcher.traversedir(dir)
1179 matcher.traversedir(dir)
1178
1180
1179 if self._ui.warn:
1181 if self._ui.warn:
1180 for item in warnings:
1182 for item in warnings:
1181 if isinstance(item, tuple):
1183 if isinstance(item, tuple):
1182 file_path, syntax = item
1184 file_path, syntax = item
1183 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1185 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1184 file_path,
1186 file_path,
1185 syntax,
1187 syntax,
1186 )
1188 )
1187 self._ui.warn(msg)
1189 self._ui.warn(msg)
1188 else:
1190 else:
1189 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1191 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1190 self._ui.warn(
1192 self._ui.warn(
1191 msg
1193 msg
1192 % (
1194 % (
1193 pathutil.canonpath(
1195 pathutil.canonpath(
1194 self._rootdir, self._rootdir, item
1196 self._rootdir, self._rootdir, item
1195 ),
1197 ),
1196 b"No such file or directory",
1198 b"No such file or directory",
1197 )
1199 )
1198 )
1200 )
1199
1201
1200 for (fn, message) in bad:
1202 for (fn, message) in bad:
1201 matcher.bad(fn, encoding.strtolocal(message))
1203 matcher.bad(fn, encoding.strtolocal(message))
1202
1204
1203 status = scmutil.status(
1205 status = scmutil.status(
1204 modified=modified,
1206 modified=modified,
1205 added=added,
1207 added=added,
1206 removed=removed,
1208 removed=removed,
1207 deleted=deleted,
1209 deleted=deleted,
1208 unknown=unknown,
1210 unknown=unknown,
1209 ignored=ignored,
1211 ignored=ignored,
1210 clean=clean,
1212 clean=clean,
1211 )
1213 )
1212 return (lookup, status)
1214 return (lookup, status)
1213
1215
1214 def status(self, match, subrepos, ignored, clean, unknown):
1216 def status(self, match, subrepos, ignored, clean, unknown):
1215 """Determine the status of the working copy relative to the
1217 """Determine the status of the working copy relative to the
1216 dirstate and return a pair of (unsure, status), where status is of type
1218 dirstate and return a pair of (unsure, status), where status is of type
1217 scmutil.status and:
1219 scmutil.status and:
1218
1220
1219 unsure:
1221 unsure:
1220 files that might have been modified since the dirstate was
1222 files that might have been modified since the dirstate was
1221 written, but need to be read to be sure (size is the same
1223 written, but need to be read to be sure (size is the same
1222 but mtime differs)
1224 but mtime differs)
1223 status.modified:
1225 status.modified:
1224 files that have definitely been modified since the dirstate
1226 files that have definitely been modified since the dirstate
1225 was written (different size or mode)
1227 was written (different size or mode)
1226 status.clean:
1228 status.clean:
1227 files that have definitely not been modified since the
1229 files that have definitely not been modified since the
1228 dirstate was written
1230 dirstate was written
1229 """
1231 """
1230 listignored, listclean, listunknown = ignored, clean, unknown
1232 listignored, listclean, listunknown = ignored, clean, unknown
1231 lookup, modified, added, unknown, ignored = [], [], [], [], []
1233 lookup, modified, added, unknown, ignored = [], [], [], [], []
1232 removed, deleted, clean = [], [], []
1234 removed, deleted, clean = [], [], []
1233
1235
1234 dmap = self._map
1236 dmap = self._map
1235 dmap.preload()
1237 dmap.preload()
1236
1238
1237 use_rust = True
1239 use_rust = True
1238
1240
1239 allowed_matchers = (
1241 allowed_matchers = (
1240 matchmod.alwaysmatcher,
1242 matchmod.alwaysmatcher,
1241 matchmod.exactmatcher,
1243 matchmod.exactmatcher,
1242 matchmod.includematcher,
1244 matchmod.includematcher,
1243 )
1245 )
1244
1246
1245 if rustmod is None:
1247 if rustmod is None:
1246 use_rust = False
1248 use_rust = False
1247 elif self._checkcase:
1249 elif self._checkcase:
1248 # Case-insensitive filesystems are not handled yet
1250 # Case-insensitive filesystems are not handled yet
1249 use_rust = False
1251 use_rust = False
1250 elif subrepos:
1252 elif subrepos:
1251 use_rust = False
1253 use_rust = False
1252 elif sparse.enabled:
1254 elif sparse.enabled:
1253 use_rust = False
1255 use_rust = False
1254 elif not isinstance(match, allowed_matchers):
1256 elif not isinstance(match, allowed_matchers):
1255 # Some matchers have yet to be implemented
1257 # Some matchers have yet to be implemented
1256 use_rust = False
1258 use_rust = False
1257
1259
1258 if use_rust:
1260 if use_rust:
1259 try:
1261 try:
1260 return self._rust_status(
1262 return self._rust_status(
1261 match, listclean, listignored, listunknown
1263 match, listclean, listignored, listunknown
1262 )
1264 )
1263 except rustmod.FallbackError:
1265 except rustmod.FallbackError:
1264 pass
1266 pass
1265
1267
1266 def noop(f):
1268 def noop(f):
1267 pass
1269 pass
1268
1270
1269 dcontains = dmap.__contains__
1271 dcontains = dmap.__contains__
1270 dget = dmap.__getitem__
1272 dget = dmap.__getitem__
1271 ladd = lookup.append # aka "unsure"
1273 ladd = lookup.append # aka "unsure"
1272 madd = modified.append
1274 madd = modified.append
1273 aadd = added.append
1275 aadd = added.append
1274 uadd = unknown.append if listunknown else noop
1276 uadd = unknown.append if listunknown else noop
1275 iadd = ignored.append if listignored else noop
1277 iadd = ignored.append if listignored else noop
1276 radd = removed.append
1278 radd = removed.append
1277 dadd = deleted.append
1279 dadd = deleted.append
1278 cadd = clean.append if listclean else noop
1280 cadd = clean.append if listclean else noop
1279 mexact = match.exact
1281 mexact = match.exact
1280 dirignore = self._dirignore
1282 dirignore = self._dirignore
1281 checkexec = self._checkexec
1283 checkexec = self._checkexec
1282 copymap = self._map.copymap
1284 copymap = self._map.copymap
1283 lastnormaltime = self._lastnormaltime
1285 lastnormaltime = self._lastnormaltime
1284
1286
1285 # We need to do full walks when either
1287 # We need to do full walks when either
1286 # - we're listing all clean files, or
1288 # - we're listing all clean files, or
1287 # - match.traversedir does something, because match.traversedir should
1289 # - match.traversedir does something, because match.traversedir should
1288 # be called for every dir in the working dir
1290 # be called for every dir in the working dir
1289 full = listclean or match.traversedir is not None
1291 full = listclean or match.traversedir is not None
1290 for fn, st in pycompat.iteritems(
1292 for fn, st in pycompat.iteritems(
1291 self.walk(match, subrepos, listunknown, listignored, full=full)
1293 self.walk(match, subrepos, listunknown, listignored, full=full)
1292 ):
1294 ):
1293 if not dcontains(fn):
1295 if not dcontains(fn):
1294 if (listignored or mexact(fn)) and dirignore(fn):
1296 if (listignored or mexact(fn)) and dirignore(fn):
1295 if listignored:
1297 if listignored:
1296 iadd(fn)
1298 iadd(fn)
1297 else:
1299 else:
1298 uadd(fn)
1300 uadd(fn)
1299 continue
1301 continue
1300
1302
1301 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1303 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1302 # written like that for performance reasons. dmap[fn] is not a
1304 # written like that for performance reasons. dmap[fn] is not a
1303 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1305 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1304 # opcode has fast paths when the value to be unpacked is a tuple or
1306 # opcode has fast paths when the value to be unpacked is a tuple or
1305 # a list, but falls back to creating a full-fledged iterator in
1307 # a list, but falls back to creating a full-fledged iterator in
1306 # general. That is much slower than simply accessing and storing the
1308 # general. That is much slower than simply accessing and storing the
1307 # tuple members one by one.
1309 # tuple members one by one.
1308 t = dget(fn)
1310 t = dget(fn)
1309 state = t.state
1311 state = t.state
1310 mode = t[1]
1312 mode = t[1]
1311 size = t[2]
1313 size = t[2]
1312 time = t[3]
1314 time = t[3]
1313
1315
1314 if not st and state in b"nma":
1316 if not st and state in b"nma":
1315 dadd(fn)
1317 dadd(fn)
1316 elif state == b'n':
1318 elif state == b'n':
1317 if (
1319 if (
1318 size >= 0
1320 size >= 0
1319 and (
1321 and (
1320 (size != st.st_size and size != st.st_size & _rangemask)
1322 (size != st.st_size and size != st.st_size & _rangemask)
1321 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1323 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1322 )
1324 )
1323 or t.from_p2
1325 or t.from_p2
1324 or fn in copymap
1326 or fn in copymap
1325 ):
1327 ):
1326 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1328 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1327 # issue6456: Size returned may be longer due to
1329 # issue6456: Size returned may be longer due to
1328 # encryption on EXT-4 fscrypt, undecided.
1330 # encryption on EXT-4 fscrypt, undecided.
1329 ladd(fn)
1331 ladd(fn)
1330 else:
1332 else:
1331 madd(fn)
1333 madd(fn)
1332 elif (
1334 elif (
1333 time != st[stat.ST_MTIME]
1335 time != st[stat.ST_MTIME]
1334 and time != st[stat.ST_MTIME] & _rangemask
1336 and time != st[stat.ST_MTIME] & _rangemask
1335 ):
1337 ):
1336 ladd(fn)
1338 ladd(fn)
1337 elif st[stat.ST_MTIME] == lastnormaltime:
1339 elif st[stat.ST_MTIME] == lastnormaltime:
1338 # fn may have just been marked as normal and it may have
1340 # fn may have just been marked as normal and it may have
1339 # changed in the same second without changing its size.
1341 # changed in the same second without changing its size.
1340 # This can happen if we quickly do multiple commits.
1342 # This can happen if we quickly do multiple commits.
1341 # Force lookup, so we don't miss such a racy file change.
1343 # Force lookup, so we don't miss such a racy file change.
1342 ladd(fn)
1344 ladd(fn)
1343 elif listclean:
1345 elif listclean:
1344 cadd(fn)
1346 cadd(fn)
1345 elif t.merged:
1347 elif t.merged:
1346 madd(fn)
1348 madd(fn)
1347 elif state == b'a':
1349 elif state == b'a':
1348 aadd(fn)
1350 aadd(fn)
1349 elif t.removed:
1351 elif t.removed:
1350 radd(fn)
1352 radd(fn)
1351 status = scmutil.status(
1353 status = scmutil.status(
1352 modified, added, removed, deleted, unknown, ignored, clean
1354 modified, added, removed, deleted, unknown, ignored, clean
1353 )
1355 )
1354 return (lookup, status)
1356 return (lookup, status)
1355
1357
1356 def matches(self, match):
1358 def matches(self, match):
1357 """
1359 """
1358 return files in the dirstate (in whatever state) filtered by match
1360 return files in the dirstate (in whatever state) filtered by match
1359 """
1361 """
1360 dmap = self._map
1362 dmap = self._map
1361 if rustmod is not None:
1363 if rustmod is not None:
1362 dmap = self._map._rustmap
1364 dmap = self._map._rustmap
1363
1365
1364 if match.always():
1366 if match.always():
1365 return dmap.keys()
1367 return dmap.keys()
1366 files = match.files()
1368 files = match.files()
1367 if match.isexact():
1369 if match.isexact():
1368 # fast path -- filter the other way around, since typically files is
1370 # fast path -- filter the other way around, since typically files is
1369 # much smaller than dmap
1371 # much smaller than dmap
1370 return [f for f in files if f in dmap]
1372 return [f for f in files if f in dmap]
1371 if match.prefix() and all(fn in dmap for fn in files):
1373 if match.prefix() and all(fn in dmap for fn in files):
1372 # fast path -- all the values are known to be files, so just return
1374 # fast path -- all the values are known to be files, so just return
1373 # that
1375 # that
1374 return list(files)
1376 return list(files)
1375 return [f for f in dmap if match(f)]
1377 return [f for f in dmap if match(f)]
1376
1378
1377 def _actualfilename(self, tr):
1379 def _actualfilename(self, tr):
1378 if tr:
1380 if tr:
1379 return self._pendingfilename
1381 return self._pendingfilename
1380 else:
1382 else:
1381 return self._filename
1383 return self._filename
1382
1384
1383 def savebackup(self, tr, backupname):
1385 def savebackup(self, tr, backupname):
1384 '''Save current dirstate into backup file'''
1386 '''Save current dirstate into backup file'''
1385 filename = self._actualfilename(tr)
1387 filename = self._actualfilename(tr)
1386 assert backupname != filename
1388 assert backupname != filename
1387
1389
1388 # use '_writedirstate' instead of 'write' to write changes certainly,
1390 # use '_writedirstate' instead of 'write' to write changes certainly,
1389 # because the latter omits writing out if transaction is running.
1391 # because the latter omits writing out if transaction is running.
1390 # output file will be used to create backup of dirstate at this point.
1392 # output file will be used to create backup of dirstate at this point.
1391 if self._dirty or not self._opener.exists(filename):
1393 if self._dirty or not self._opener.exists(filename):
1392 self._writedirstate(
1394 self._writedirstate(
1393 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1395 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1394 )
1396 )
1395
1397
1396 if tr:
1398 if tr:
1397 # ensure that subsequent tr.writepending returns True for
1399 # ensure that subsequent tr.writepending returns True for
1398 # changes written out above, even if dirstate is never
1400 # changes written out above, even if dirstate is never
1399 # changed after this
1401 # changed after this
1400 tr.addfilegenerator(
1402 tr.addfilegenerator(
1401 b'dirstate',
1403 b'dirstate',
1402 (self._filename,),
1404 (self._filename,),
1403 self._writedirstate,
1405 self._writedirstate,
1404 location=b'plain',
1406 location=b'plain',
1405 )
1407 )
1406
1408
1407 # ensure that pending file written above is unlinked at
1409 # ensure that pending file written above is unlinked at
1408 # failure, even if tr.writepending isn't invoked until the
1410 # failure, even if tr.writepending isn't invoked until the
1409 # end of this transaction
1411 # end of this transaction
1410 tr.registertmp(filename, location=b'plain')
1412 tr.registertmp(filename, location=b'plain')
1411
1413
1412 self._opener.tryunlink(backupname)
1414 self._opener.tryunlink(backupname)
1413 # hardlink backup is okay because _writedirstate is always called
1415 # hardlink backup is okay because _writedirstate is always called
1414 # with an "atomictemp=True" file.
1416 # with an "atomictemp=True" file.
1415 util.copyfile(
1417 util.copyfile(
1416 self._opener.join(filename),
1418 self._opener.join(filename),
1417 self._opener.join(backupname),
1419 self._opener.join(backupname),
1418 hardlink=True,
1420 hardlink=True,
1419 )
1421 )
1420
1422
1421 def restorebackup(self, tr, backupname):
1423 def restorebackup(self, tr, backupname):
1422 '''Restore dirstate by backup file'''
1424 '''Restore dirstate by backup file'''
1423 # this "invalidate()" prevents "wlock.release()" from writing
1425 # this "invalidate()" prevents "wlock.release()" from writing
1424 # changes of dirstate out after restoring from backup file
1426 # changes of dirstate out after restoring from backup file
1425 self.invalidate()
1427 self.invalidate()
1426 filename = self._actualfilename(tr)
1428 filename = self._actualfilename(tr)
1427 o = self._opener
1429 o = self._opener
1428 if util.samefile(o.join(backupname), o.join(filename)):
1430 if util.samefile(o.join(backupname), o.join(filename)):
1429 o.unlink(backupname)
1431 o.unlink(backupname)
1430 else:
1432 else:
1431 o.rename(backupname, filename, checkambig=True)
1433 o.rename(backupname, filename, checkambig=True)
1432
1434
1433 def clearbackup(self, tr, backupname):
1435 def clearbackup(self, tr, backupname):
1434 '''Clear backup file'''
1436 '''Clear backup file'''
1435 self._opener.unlink(backupname)
1437 self._opener.unlink(backupname)
@@ -1,667 +1,673 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 parsers = policy.importmod('parsers')
21 parsers = policy.importmod('parsers')
22 rustmod = policy.importrust('dirstate')
22 rustmod = policy.importrust('dirstate')
23
23
24 propertycache = util.propertycache
24 propertycache = util.propertycache
25
25
26 dirstatetuple = parsers.dirstatetuple
26 dirstatetuple = parsers.dirstatetuple
27
27
28
28
29 # a special value used internally for `size` if the file come from the other parent
29 # a special value used internally for `size` if the file come from the other parent
30 FROM_P2 = -2
30 FROM_P2 = -2
31
31
32 # a special value used internally for `size` if the file is modified/merged/added
32 # a special value used internally for `size` if the file is modified/merged/added
33 NONNORMAL = -1
33 NONNORMAL = -1
34
34
35 # a special value used internally for `time` if the time is ambigeous
35 # a special value used internally for `time` if the time is ambigeous
36 AMBIGUOUS_TIME = -1
36 AMBIGUOUS_TIME = -1
37
37
38 rangemask = 0x7FFFFFFF
38 rangemask = 0x7FFFFFFF
39
39
40
40
41 class dirstatemap(object):
41 class dirstatemap(object):
42 """Map encapsulating the dirstate's contents.
42 """Map encapsulating the dirstate's contents.
43
43
44 The dirstate contains the following state:
44 The dirstate contains the following state:
45
45
46 - `identity` is the identity of the dirstate file, which can be used to
46 - `identity` is the identity of the dirstate file, which can be used to
47 detect when changes have occurred to the dirstate file.
47 detect when changes have occurred to the dirstate file.
48
48
49 - `parents` is a pair containing the parents of the working copy. The
49 - `parents` is a pair containing the parents of the working copy. The
50 parents are updated by calling `setparents`.
50 parents are updated by calling `setparents`.
51
51
52 - the state map maps filenames to tuples of (state, mode, size, mtime),
52 - the state map maps filenames to tuples of (state, mode, size, mtime),
53 where state is a single character representing 'normal', 'added',
53 where state is a single character representing 'normal', 'added',
54 'removed', or 'merged'. It is read by treating the dirstate as a
54 'removed', or 'merged'. It is read by treating the dirstate as a
55 dict. File state is updated by calling the `addfile`, `removefile` and
55 dict. File state is updated by calling the `addfile`, `removefile` and
56 `dropfile` methods.
56 `dropfile` methods.
57
57
58 - `copymap` maps destination filenames to their source filename.
58 - `copymap` maps destination filenames to their source filename.
59
59
60 The dirstate also provides the following views onto the state:
60 The dirstate also provides the following views onto the state:
61
61
62 - `nonnormalset` is a set of the filenames that have state other
62 - `nonnormalset` is a set of the filenames that have state other
63 than 'normal', or are normal but have an mtime of -1 ('normallookup').
63 than 'normal', or are normal but have an mtime of -1 ('normallookup').
64
64
65 - `otherparentset` is a set of the filenames that are marked as coming
65 - `otherparentset` is a set of the filenames that are marked as coming
66 from the second parent when the dirstate is currently being merged.
66 from the second parent when the dirstate is currently being merged.
67
67
68 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
68 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
69 form that they appear as in the dirstate.
69 form that they appear as in the dirstate.
70
70
71 - `dirfoldmap` is a dict mapping normalized directory names to the
71 - `dirfoldmap` is a dict mapping normalized directory names to the
72 denormalized form that they appear as in the dirstate.
72 denormalized form that they appear as in the dirstate.
73 """
73 """
74
74
75 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
75 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
76 self._ui = ui
76 self._ui = ui
77 self._opener = opener
77 self._opener = opener
78 self._root = root
78 self._root = root
79 self._filename = b'dirstate'
79 self._filename = b'dirstate'
80 self._nodelen = 20
80 self._nodelen = 20
81 self._nodeconstants = nodeconstants
81 self._nodeconstants = nodeconstants
82 assert (
82 assert (
83 not use_dirstate_v2
83 not use_dirstate_v2
84 ), "should have detected unsupported requirement"
84 ), "should have detected unsupported requirement"
85
85
86 self._parents = None
86 self._parents = None
87 self._dirtyparents = False
87 self._dirtyparents = False
88
88
89 # for consistent view between _pl() and _read() invocations
89 # for consistent view between _pl() and _read() invocations
90 self._pendingmode = None
90 self._pendingmode = None
91
91
92 @propertycache
92 @propertycache
93 def _map(self):
93 def _map(self):
94 self._map = {}
94 self._map = {}
95 self.read()
95 self.read()
96 return self._map
96 return self._map
97
97
98 @propertycache
98 @propertycache
99 def copymap(self):
99 def copymap(self):
100 self.copymap = {}
100 self.copymap = {}
101 self._map
101 self._map
102 return self.copymap
102 return self.copymap
103
103
104 def directories(self):
104 def directories(self):
105 # Rust / dirstate-v2 only
105 # Rust / dirstate-v2 only
106 return []
106 return []
107
107
108 def clear(self):
108 def clear(self):
109 self._map.clear()
109 self._map.clear()
110 self.copymap.clear()
110 self.copymap.clear()
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
112 util.clearcachedproperty(self, b"_dirs")
112 util.clearcachedproperty(self, b"_dirs")
113 util.clearcachedproperty(self, b"_alldirs")
113 util.clearcachedproperty(self, b"_alldirs")
114 util.clearcachedproperty(self, b"filefoldmap")
114 util.clearcachedproperty(self, b"filefoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
116 util.clearcachedproperty(self, b"nonnormalset")
116 util.clearcachedproperty(self, b"nonnormalset")
117 util.clearcachedproperty(self, b"otherparentset")
117 util.clearcachedproperty(self, b"otherparentset")
118
118
119 def items(self):
119 def items(self):
120 return pycompat.iteritems(self._map)
120 return pycompat.iteritems(self._map)
121
121
122 # forward for python2,3 compat
122 # forward for python2,3 compat
123 iteritems = items
123 iteritems = items
124
124
125 def __len__(self):
125 def __len__(self):
126 return len(self._map)
126 return len(self._map)
127
127
128 def __iter__(self):
128 def __iter__(self):
129 return iter(self._map)
129 return iter(self._map)
130
130
131 def get(self, key, default=None):
131 def get(self, key, default=None):
132 return self._map.get(key, default)
132 return self._map.get(key, default)
133
133
134 def __contains__(self, key):
134 def __contains__(self, key):
135 return key in self._map
135 return key in self._map
136
136
137 def __getitem__(self, key):
137 def __getitem__(self, key):
138 return self._map[key]
138 return self._map[key]
139
139
140 def keys(self):
140 def keys(self):
141 return self._map.keys()
141 return self._map.keys()
142
142
143 def preload(self):
143 def preload(self):
144 """Loads the underlying data, if it's not already loaded"""
144 """Loads the underlying data, if it's not already loaded"""
145 self._map
145 self._map
146
146
147 def addfile(
147 def addfile(
148 self,
148 self,
149 f,
149 f,
150 state,
150 state=None,
151 mode,
151 mode=0,
152 size=None,
152 size=None,
153 mtime=None,
153 mtime=None,
154 added=False,
154 from_p2=False,
155 from_p2=False,
155 possibly_dirty=False,
156 possibly_dirty=False,
156 ):
157 ):
157 """Add a tracked file to the dirstate."""
158 """Add a tracked file to the dirstate."""
158 if state == b'a':
159 if added:
159 assert not possibly_dirty
160 assert not possibly_dirty
160 assert not from_p2
161 assert not from_p2
162 state = b'a'
161 size = NONNORMAL
163 size = NONNORMAL
162 mtime = AMBIGUOUS_TIME
164 mtime = AMBIGUOUS_TIME
163 elif from_p2:
165 elif from_p2:
164 assert not possibly_dirty
166 assert not possibly_dirty
165 size = FROM_P2
167 size = FROM_P2
166 mtime = AMBIGUOUS_TIME
168 mtime = AMBIGUOUS_TIME
167 elif possibly_dirty:
169 elif possibly_dirty:
168 size = NONNORMAL
170 size = NONNORMAL
169 mtime = AMBIGUOUS_TIME
171 mtime = AMBIGUOUS_TIME
170 else:
172 else:
173 assert state != b'a'
171 assert size != FROM_P2
174 assert size != FROM_P2
172 assert size != NONNORMAL
175 assert size != NONNORMAL
173 size = size & rangemask
176 size = size & rangemask
174 mtime = mtime & rangemask
177 mtime = mtime & rangemask
178 assert state is not None
175 assert size is not None
179 assert size is not None
176 assert mtime is not None
180 assert mtime is not None
177 old_entry = self.get(f)
181 old_entry = self.get(f)
178 if (
182 if (
179 old_entry is None or old_entry.removed
183 old_entry is None or old_entry.removed
180 ) and "_dirs" in self.__dict__:
184 ) and "_dirs" in self.__dict__:
181 self._dirs.addpath(f)
185 self._dirs.addpath(f)
182 if old_entry is None and "_alldirs" in self.__dict__:
186 if old_entry is None and "_alldirs" in self.__dict__:
183 self._alldirs.addpath(f)
187 self._alldirs.addpath(f)
184 self._map[f] = dirstatetuple(state, mode, size, mtime)
188 self._map[f] = dirstatetuple(state, mode, size, mtime)
185 if state != b'n' or mtime == AMBIGUOUS_TIME:
189 if state != b'n' or mtime == AMBIGUOUS_TIME:
186 self.nonnormalset.add(f)
190 self.nonnormalset.add(f)
187 if size == FROM_P2:
191 if size == FROM_P2:
188 self.otherparentset.add(f)
192 self.otherparentset.add(f)
189
193
190 def removefile(self, f, in_merge=False):
194 def removefile(self, f, in_merge=False):
191 """
195 """
192 Mark a file as removed in the dirstate.
196 Mark a file as removed in the dirstate.
193
197
194 The `size` parameter is used to store sentinel values that indicate
198 The `size` parameter is used to store sentinel values that indicate
195 the file's previous state. In the future, we should refactor this
199 the file's previous state. In the future, we should refactor this
196 to be more explicit about what that state is.
200 to be more explicit about what that state is.
197 """
201 """
198 entry = self.get(f)
202 entry = self.get(f)
199 size = 0
203 size = 0
200 if in_merge:
204 if in_merge:
201 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
205 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
202 # during a merge. So I (marmoute) am not sure we need the
206 # during a merge. So I (marmoute) am not sure we need the
203 # conditionnal at all. Adding double checking this with assert
207 # conditionnal at all. Adding double checking this with assert
204 # would be nice.
208 # would be nice.
205 if entry is not None:
209 if entry is not None:
206 # backup the previous state
210 # backup the previous state
207 if entry.merged: # merge
211 if entry.merged: # merge
208 size = NONNORMAL
212 size = NONNORMAL
209 elif entry[0] == b'n' and entry.from_p2:
213 elif entry[0] == b'n' and entry.from_p2:
210 size = FROM_P2
214 size = FROM_P2
211 self.otherparentset.add(f)
215 self.otherparentset.add(f)
212 if size == 0:
216 if size == 0:
213 self.copymap.pop(f, None)
217 self.copymap.pop(f, None)
214
218
215 if entry is not None and entry[0] != b'r' and "_dirs" in self.__dict__:
219 if entry is not None and entry[0] != b'r' and "_dirs" in self.__dict__:
216 self._dirs.delpath(f)
220 self._dirs.delpath(f)
217 if entry is None and "_alldirs" in self.__dict__:
221 if entry is None and "_alldirs" in self.__dict__:
218 self._alldirs.addpath(f)
222 self._alldirs.addpath(f)
219 if "filefoldmap" in self.__dict__:
223 if "filefoldmap" in self.__dict__:
220 normed = util.normcase(f)
224 normed = util.normcase(f)
221 self.filefoldmap.pop(normed, None)
225 self.filefoldmap.pop(normed, None)
222 self._map[f] = dirstatetuple(b'r', 0, size, 0)
226 self._map[f] = dirstatetuple(b'r', 0, size, 0)
223 self.nonnormalset.add(f)
227 self.nonnormalset.add(f)
224
228
225 def dropfile(self, f, oldstate):
229 def dropfile(self, f, oldstate):
226 """
230 """
227 Remove a file from the dirstate. Returns True if the file was
231 Remove a file from the dirstate. Returns True if the file was
228 previously recorded.
232 previously recorded.
229 """
233 """
230 exists = self._map.pop(f, None) is not None
234 exists = self._map.pop(f, None) is not None
231 if exists:
235 if exists:
232 if oldstate != b"r" and "_dirs" in self.__dict__:
236 if oldstate != b"r" and "_dirs" in self.__dict__:
233 self._dirs.delpath(f)
237 self._dirs.delpath(f)
234 if "_alldirs" in self.__dict__:
238 if "_alldirs" in self.__dict__:
235 self._alldirs.delpath(f)
239 self._alldirs.delpath(f)
236 if "filefoldmap" in self.__dict__:
240 if "filefoldmap" in self.__dict__:
237 normed = util.normcase(f)
241 normed = util.normcase(f)
238 self.filefoldmap.pop(normed, None)
242 self.filefoldmap.pop(normed, None)
239 self.nonnormalset.discard(f)
243 self.nonnormalset.discard(f)
240 return exists
244 return exists
241
245
242 def clearambiguoustimes(self, files, now):
246 def clearambiguoustimes(self, files, now):
243 for f in files:
247 for f in files:
244 e = self.get(f)
248 e = self.get(f)
245 if e is not None and e[0] == b'n' and e[3] == now:
249 if e is not None and e[0] == b'n' and e[3] == now:
246 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
250 self._map[f] = dirstatetuple(e[0], e[1], e[2], AMBIGUOUS_TIME)
247 self.nonnormalset.add(f)
251 self.nonnormalset.add(f)
248
252
249 def nonnormalentries(self):
253 def nonnormalentries(self):
250 '''Compute the nonnormal dirstate entries from the dmap'''
254 '''Compute the nonnormal dirstate entries from the dmap'''
251 try:
255 try:
252 return parsers.nonnormalotherparententries(self._map)
256 return parsers.nonnormalotherparententries(self._map)
253 except AttributeError:
257 except AttributeError:
254 nonnorm = set()
258 nonnorm = set()
255 otherparent = set()
259 otherparent = set()
256 for fname, e in pycompat.iteritems(self._map):
260 for fname, e in pycompat.iteritems(self._map):
257 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
261 if e[0] != b'n' or e[3] == AMBIGUOUS_TIME:
258 nonnorm.add(fname)
262 nonnorm.add(fname)
259 if e[0] == b'n' and e[2] == FROM_P2:
263 if e[0] == b'n' and e[2] == FROM_P2:
260 otherparent.add(fname)
264 otherparent.add(fname)
261 return nonnorm, otherparent
265 return nonnorm, otherparent
262
266
263 @propertycache
267 @propertycache
264 def filefoldmap(self):
268 def filefoldmap(self):
265 """Returns a dictionary mapping normalized case paths to their
269 """Returns a dictionary mapping normalized case paths to their
266 non-normalized versions.
270 non-normalized versions.
267 """
271 """
268 try:
272 try:
269 makefilefoldmap = parsers.make_file_foldmap
273 makefilefoldmap = parsers.make_file_foldmap
270 except AttributeError:
274 except AttributeError:
271 pass
275 pass
272 else:
276 else:
273 return makefilefoldmap(
277 return makefilefoldmap(
274 self._map, util.normcasespec, util.normcasefallback
278 self._map, util.normcasespec, util.normcasefallback
275 )
279 )
276
280
277 f = {}
281 f = {}
278 normcase = util.normcase
282 normcase = util.normcase
279 for name, s in pycompat.iteritems(self._map):
283 for name, s in pycompat.iteritems(self._map):
280 if s[0] != b'r':
284 if s[0] != b'r':
281 f[normcase(name)] = name
285 f[normcase(name)] = name
282 f[b'.'] = b'.' # prevents useless util.fspath() invocation
286 f[b'.'] = b'.' # prevents useless util.fspath() invocation
283 return f
287 return f
284
288
285 def hastrackeddir(self, d):
289 def hastrackeddir(self, d):
286 """
290 """
287 Returns True if the dirstate contains a tracked (not removed) file
291 Returns True if the dirstate contains a tracked (not removed) file
288 in this directory.
292 in this directory.
289 """
293 """
290 return d in self._dirs
294 return d in self._dirs
291
295
292 def hasdir(self, d):
296 def hasdir(self, d):
293 """
297 """
294 Returns True if the dirstate contains a file (tracked or removed)
298 Returns True if the dirstate contains a file (tracked or removed)
295 in this directory.
299 in this directory.
296 """
300 """
297 return d in self._alldirs
301 return d in self._alldirs
298
302
299 @propertycache
303 @propertycache
300 def _dirs(self):
304 def _dirs(self):
301 return pathutil.dirs(self._map, b'r')
305 return pathutil.dirs(self._map, b'r')
302
306
303 @propertycache
307 @propertycache
304 def _alldirs(self):
308 def _alldirs(self):
305 return pathutil.dirs(self._map)
309 return pathutil.dirs(self._map)
306
310
307 def _opendirstatefile(self):
311 def _opendirstatefile(self):
308 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
312 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
309 if self._pendingmode is not None and self._pendingmode != mode:
313 if self._pendingmode is not None and self._pendingmode != mode:
310 fp.close()
314 fp.close()
311 raise error.Abort(
315 raise error.Abort(
312 _(b'working directory state may be changed parallelly')
316 _(b'working directory state may be changed parallelly')
313 )
317 )
314 self._pendingmode = mode
318 self._pendingmode = mode
315 return fp
319 return fp
316
320
317 def parents(self):
321 def parents(self):
318 if not self._parents:
322 if not self._parents:
319 try:
323 try:
320 fp = self._opendirstatefile()
324 fp = self._opendirstatefile()
321 st = fp.read(2 * self._nodelen)
325 st = fp.read(2 * self._nodelen)
322 fp.close()
326 fp.close()
323 except IOError as err:
327 except IOError as err:
324 if err.errno != errno.ENOENT:
328 if err.errno != errno.ENOENT:
325 raise
329 raise
326 # File doesn't exist, so the current state is empty
330 # File doesn't exist, so the current state is empty
327 st = b''
331 st = b''
328
332
329 l = len(st)
333 l = len(st)
330 if l == self._nodelen * 2:
334 if l == self._nodelen * 2:
331 self._parents = (
335 self._parents = (
332 st[: self._nodelen],
336 st[: self._nodelen],
333 st[self._nodelen : 2 * self._nodelen],
337 st[self._nodelen : 2 * self._nodelen],
334 )
338 )
335 elif l == 0:
339 elif l == 0:
336 self._parents = (
340 self._parents = (
337 self._nodeconstants.nullid,
341 self._nodeconstants.nullid,
338 self._nodeconstants.nullid,
342 self._nodeconstants.nullid,
339 )
343 )
340 else:
344 else:
341 raise error.Abort(
345 raise error.Abort(
342 _(b'working directory state appears damaged!')
346 _(b'working directory state appears damaged!')
343 )
347 )
344
348
345 return self._parents
349 return self._parents
346
350
347 def setparents(self, p1, p2):
351 def setparents(self, p1, p2):
348 self._parents = (p1, p2)
352 self._parents = (p1, p2)
349 self._dirtyparents = True
353 self._dirtyparents = True
350
354
351 def read(self):
355 def read(self):
352 # ignore HG_PENDING because identity is used only for writing
356 # ignore HG_PENDING because identity is used only for writing
353 self.identity = util.filestat.frompath(
357 self.identity = util.filestat.frompath(
354 self._opener.join(self._filename)
358 self._opener.join(self._filename)
355 )
359 )
356
360
357 try:
361 try:
358 fp = self._opendirstatefile()
362 fp = self._opendirstatefile()
359 try:
363 try:
360 st = fp.read()
364 st = fp.read()
361 finally:
365 finally:
362 fp.close()
366 fp.close()
363 except IOError as err:
367 except IOError as err:
364 if err.errno != errno.ENOENT:
368 if err.errno != errno.ENOENT:
365 raise
369 raise
366 return
370 return
367 if not st:
371 if not st:
368 return
372 return
369
373
370 if util.safehasattr(parsers, b'dict_new_presized'):
374 if util.safehasattr(parsers, b'dict_new_presized'):
371 # Make an estimate of the number of files in the dirstate based on
375 # Make an estimate of the number of files in the dirstate based on
372 # its size. This trades wasting some memory for avoiding costly
376 # its size. This trades wasting some memory for avoiding costly
373 # resizes. Each entry have a prefix of 17 bytes followed by one or
377 # resizes. Each entry have a prefix of 17 bytes followed by one or
374 # two path names. Studies on various large-scale real-world repositories
378 # two path names. Studies on various large-scale real-world repositories
375 # found 54 bytes a reasonable upper limit for the average path names.
379 # found 54 bytes a reasonable upper limit for the average path names.
376 # Copy entries are ignored for the sake of this estimate.
380 # Copy entries are ignored for the sake of this estimate.
377 self._map = parsers.dict_new_presized(len(st) // 71)
381 self._map = parsers.dict_new_presized(len(st) // 71)
378
382
379 # Python's garbage collector triggers a GC each time a certain number
383 # Python's garbage collector triggers a GC each time a certain number
380 # of container objects (the number being defined by
384 # of container objects (the number being defined by
381 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
385 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
382 # for each file in the dirstate. The C version then immediately marks
386 # for each file in the dirstate. The C version then immediately marks
383 # them as not to be tracked by the collector. However, this has no
387 # them as not to be tracked by the collector. However, this has no
384 # effect on when GCs are triggered, only on what objects the GC looks
388 # effect on when GCs are triggered, only on what objects the GC looks
385 # into. This means that O(number of files) GCs are unavoidable.
389 # into. This means that O(number of files) GCs are unavoidable.
386 # Depending on when in the process's lifetime the dirstate is parsed,
390 # Depending on when in the process's lifetime the dirstate is parsed,
387 # this can get very expensive. As a workaround, disable GC while
391 # this can get very expensive. As a workaround, disable GC while
388 # parsing the dirstate.
392 # parsing the dirstate.
389 #
393 #
390 # (we cannot decorate the function directly since it is in a C module)
394 # (we cannot decorate the function directly since it is in a C module)
391 parse_dirstate = util.nogc(parsers.parse_dirstate)
395 parse_dirstate = util.nogc(parsers.parse_dirstate)
392 p = parse_dirstate(self._map, self.copymap, st)
396 p = parse_dirstate(self._map, self.copymap, st)
393 if not self._dirtyparents:
397 if not self._dirtyparents:
394 self.setparents(*p)
398 self.setparents(*p)
395
399
396 # Avoid excess attribute lookups by fast pathing certain checks
400 # Avoid excess attribute lookups by fast pathing certain checks
397 self.__contains__ = self._map.__contains__
401 self.__contains__ = self._map.__contains__
398 self.__getitem__ = self._map.__getitem__
402 self.__getitem__ = self._map.__getitem__
399 self.get = self._map.get
403 self.get = self._map.get
400
404
401 def write(self, st, now):
405 def write(self, st, now):
402 st.write(
406 st.write(
403 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
407 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
404 )
408 )
405 st.close()
409 st.close()
406 self._dirtyparents = False
410 self._dirtyparents = False
407 self.nonnormalset, self.otherparentset = self.nonnormalentries()
411 self.nonnormalset, self.otherparentset = self.nonnormalentries()
408
412
409 @propertycache
413 @propertycache
410 def nonnormalset(self):
414 def nonnormalset(self):
411 nonnorm, otherparents = self.nonnormalentries()
415 nonnorm, otherparents = self.nonnormalentries()
412 self.otherparentset = otherparents
416 self.otherparentset = otherparents
413 return nonnorm
417 return nonnorm
414
418
415 @propertycache
419 @propertycache
416 def otherparentset(self):
420 def otherparentset(self):
417 nonnorm, otherparents = self.nonnormalentries()
421 nonnorm, otherparents = self.nonnormalentries()
418 self.nonnormalset = nonnorm
422 self.nonnormalset = nonnorm
419 return otherparents
423 return otherparents
420
424
421 def non_normal_or_other_parent_paths(self):
425 def non_normal_or_other_parent_paths(self):
422 return self.nonnormalset.union(self.otherparentset)
426 return self.nonnormalset.union(self.otherparentset)
423
427
424 @propertycache
428 @propertycache
425 def identity(self):
429 def identity(self):
426 self._map
430 self._map
427 return self.identity
431 return self.identity
428
432
429 @propertycache
433 @propertycache
430 def dirfoldmap(self):
434 def dirfoldmap(self):
431 f = {}
435 f = {}
432 normcase = util.normcase
436 normcase = util.normcase
433 for name in self._dirs:
437 for name in self._dirs:
434 f[normcase(name)] = name
438 f[normcase(name)] = name
435 return f
439 return f
436
440
437
441
438 if rustmod is not None:
442 if rustmod is not None:
439
443
440 class dirstatemap(object):
444 class dirstatemap(object):
441 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
445 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
442 self._use_dirstate_v2 = use_dirstate_v2
446 self._use_dirstate_v2 = use_dirstate_v2
443 self._nodeconstants = nodeconstants
447 self._nodeconstants = nodeconstants
444 self._ui = ui
448 self._ui = ui
445 self._opener = opener
449 self._opener = opener
446 self._root = root
450 self._root = root
447 self._filename = b'dirstate'
451 self._filename = b'dirstate'
448 self._nodelen = 20 # Also update Rust code when changing this!
452 self._nodelen = 20 # Also update Rust code when changing this!
449 self._parents = None
453 self._parents = None
450 self._dirtyparents = False
454 self._dirtyparents = False
451
455
452 # for consistent view between _pl() and _read() invocations
456 # for consistent view between _pl() and _read() invocations
453 self._pendingmode = None
457 self._pendingmode = None
454
458
455 self._use_dirstate_tree = self._ui.configbool(
459 self._use_dirstate_tree = self._ui.configbool(
456 b"experimental",
460 b"experimental",
457 b"dirstate-tree.in-memory",
461 b"dirstate-tree.in-memory",
458 False,
462 False,
459 )
463 )
460
464
461 def addfile(
465 def addfile(
462 self,
466 self,
463 f,
467 f,
464 state,
468 state=None,
465 mode,
469 mode=0,
466 size=None,
470 size=None,
467 mtime=None,
471 mtime=None,
472 added=False,
468 from_p2=False,
473 from_p2=False,
469 possibly_dirty=False,
474 possibly_dirty=False,
470 ):
475 ):
471 return self._rustmap.addfile(
476 return self._rustmap.addfile(
472 f,
477 f,
473 state,
478 state,
474 mode,
479 mode,
475 size,
480 size,
476 mtime,
481 mtime,
482 added,
477 from_p2,
483 from_p2,
478 possibly_dirty,
484 possibly_dirty,
479 )
485 )
480
486
481 def removefile(self, *args, **kwargs):
487 def removefile(self, *args, **kwargs):
482 return self._rustmap.removefile(*args, **kwargs)
488 return self._rustmap.removefile(*args, **kwargs)
483
489
484 def dropfile(self, *args, **kwargs):
490 def dropfile(self, *args, **kwargs):
485 return self._rustmap.dropfile(*args, **kwargs)
491 return self._rustmap.dropfile(*args, **kwargs)
486
492
487 def clearambiguoustimes(self, *args, **kwargs):
493 def clearambiguoustimes(self, *args, **kwargs):
488 return self._rustmap.clearambiguoustimes(*args, **kwargs)
494 return self._rustmap.clearambiguoustimes(*args, **kwargs)
489
495
490 def nonnormalentries(self):
496 def nonnormalentries(self):
491 return self._rustmap.nonnormalentries()
497 return self._rustmap.nonnormalentries()
492
498
493 def get(self, *args, **kwargs):
499 def get(self, *args, **kwargs):
494 return self._rustmap.get(*args, **kwargs)
500 return self._rustmap.get(*args, **kwargs)
495
501
496 @property
502 @property
497 def copymap(self):
503 def copymap(self):
498 return self._rustmap.copymap()
504 return self._rustmap.copymap()
499
505
500 def directories(self):
506 def directories(self):
501 return self._rustmap.directories()
507 return self._rustmap.directories()
502
508
503 def preload(self):
509 def preload(self):
504 self._rustmap
510 self._rustmap
505
511
506 def clear(self):
512 def clear(self):
507 self._rustmap.clear()
513 self._rustmap.clear()
508 self.setparents(
514 self.setparents(
509 self._nodeconstants.nullid, self._nodeconstants.nullid
515 self._nodeconstants.nullid, self._nodeconstants.nullid
510 )
516 )
511 util.clearcachedproperty(self, b"_dirs")
517 util.clearcachedproperty(self, b"_dirs")
512 util.clearcachedproperty(self, b"_alldirs")
518 util.clearcachedproperty(self, b"_alldirs")
513 util.clearcachedproperty(self, b"dirfoldmap")
519 util.clearcachedproperty(self, b"dirfoldmap")
514
520
515 def items(self):
521 def items(self):
516 return self._rustmap.items()
522 return self._rustmap.items()
517
523
518 def keys(self):
524 def keys(self):
519 return iter(self._rustmap)
525 return iter(self._rustmap)
520
526
521 def __contains__(self, key):
527 def __contains__(self, key):
522 return key in self._rustmap
528 return key in self._rustmap
523
529
524 def __getitem__(self, item):
530 def __getitem__(self, item):
525 return self._rustmap[item]
531 return self._rustmap[item]
526
532
527 def __len__(self):
533 def __len__(self):
528 return len(self._rustmap)
534 return len(self._rustmap)
529
535
530 def __iter__(self):
536 def __iter__(self):
531 return iter(self._rustmap)
537 return iter(self._rustmap)
532
538
533 # forward for python2,3 compat
539 # forward for python2,3 compat
534 iteritems = items
540 iteritems = items
535
541
536 def _opendirstatefile(self):
542 def _opendirstatefile(self):
537 fp, mode = txnutil.trypending(
543 fp, mode = txnutil.trypending(
538 self._root, self._opener, self._filename
544 self._root, self._opener, self._filename
539 )
545 )
540 if self._pendingmode is not None and self._pendingmode != mode:
546 if self._pendingmode is not None and self._pendingmode != mode:
541 fp.close()
547 fp.close()
542 raise error.Abort(
548 raise error.Abort(
543 _(b'working directory state may be changed parallelly')
549 _(b'working directory state may be changed parallelly')
544 )
550 )
545 self._pendingmode = mode
551 self._pendingmode = mode
546 return fp
552 return fp
547
553
548 def setparents(self, p1, p2):
554 def setparents(self, p1, p2):
549 self._parents = (p1, p2)
555 self._parents = (p1, p2)
550 self._dirtyparents = True
556 self._dirtyparents = True
551
557
552 def parents(self):
558 def parents(self):
553 if not self._parents:
559 if not self._parents:
554 if self._use_dirstate_v2:
560 if self._use_dirstate_v2:
555 offset = len(rustmod.V2_FORMAT_MARKER)
561 offset = len(rustmod.V2_FORMAT_MARKER)
556 else:
562 else:
557 offset = 0
563 offset = 0
558 read_len = offset + self._nodelen * 2
564 read_len = offset + self._nodelen * 2
559 try:
565 try:
560 fp = self._opendirstatefile()
566 fp = self._opendirstatefile()
561 st = fp.read(read_len)
567 st = fp.read(read_len)
562 fp.close()
568 fp.close()
563 except IOError as err:
569 except IOError as err:
564 if err.errno != errno.ENOENT:
570 if err.errno != errno.ENOENT:
565 raise
571 raise
566 # File doesn't exist, so the current state is empty
572 # File doesn't exist, so the current state is empty
567 st = b''
573 st = b''
568
574
569 l = len(st)
575 l = len(st)
570 if l == read_len:
576 if l == read_len:
571 st = st[offset:]
577 st = st[offset:]
572 self._parents = (
578 self._parents = (
573 st[: self._nodelen],
579 st[: self._nodelen],
574 st[self._nodelen : 2 * self._nodelen],
580 st[self._nodelen : 2 * self._nodelen],
575 )
581 )
576 elif l == 0:
582 elif l == 0:
577 self._parents = (
583 self._parents = (
578 self._nodeconstants.nullid,
584 self._nodeconstants.nullid,
579 self._nodeconstants.nullid,
585 self._nodeconstants.nullid,
580 )
586 )
581 else:
587 else:
582 raise error.Abort(
588 raise error.Abort(
583 _(b'working directory state appears damaged!')
589 _(b'working directory state appears damaged!')
584 )
590 )
585
591
586 return self._parents
592 return self._parents
587
593
588 @propertycache
594 @propertycache
589 def _rustmap(self):
595 def _rustmap(self):
590 """
596 """
591 Fills the Dirstatemap when called.
597 Fills the Dirstatemap when called.
592 """
598 """
593 # ignore HG_PENDING because identity is used only for writing
599 # ignore HG_PENDING because identity is used only for writing
594 self.identity = util.filestat.frompath(
600 self.identity = util.filestat.frompath(
595 self._opener.join(self._filename)
601 self._opener.join(self._filename)
596 )
602 )
597
603
598 try:
604 try:
599 fp = self._opendirstatefile()
605 fp = self._opendirstatefile()
600 try:
606 try:
601 st = fp.read()
607 st = fp.read()
602 finally:
608 finally:
603 fp.close()
609 fp.close()
604 except IOError as err:
610 except IOError as err:
605 if err.errno != errno.ENOENT:
611 if err.errno != errno.ENOENT:
606 raise
612 raise
607 st = b''
613 st = b''
608
614
609 self._rustmap, parents = rustmod.DirstateMap.new(
615 self._rustmap, parents = rustmod.DirstateMap.new(
610 self._use_dirstate_tree, self._use_dirstate_v2, st
616 self._use_dirstate_tree, self._use_dirstate_v2, st
611 )
617 )
612
618
613 if parents and not self._dirtyparents:
619 if parents and not self._dirtyparents:
614 self.setparents(*parents)
620 self.setparents(*parents)
615
621
616 self.__contains__ = self._rustmap.__contains__
622 self.__contains__ = self._rustmap.__contains__
617 self.__getitem__ = self._rustmap.__getitem__
623 self.__getitem__ = self._rustmap.__getitem__
618 self.get = self._rustmap.get
624 self.get = self._rustmap.get
619 return self._rustmap
625 return self._rustmap
620
626
621 def write(self, st, now):
627 def write(self, st, now):
622 parents = self.parents()
628 parents = self.parents()
623 packed = self._rustmap.write(
629 packed = self._rustmap.write(
624 self._use_dirstate_v2, parents[0], parents[1], now
630 self._use_dirstate_v2, parents[0], parents[1], now
625 )
631 )
626 st.write(packed)
632 st.write(packed)
627 st.close()
633 st.close()
628 self._dirtyparents = False
634 self._dirtyparents = False
629
635
630 @propertycache
636 @propertycache
631 def filefoldmap(self):
637 def filefoldmap(self):
632 """Returns a dictionary mapping normalized case paths to their
638 """Returns a dictionary mapping normalized case paths to their
633 non-normalized versions.
639 non-normalized versions.
634 """
640 """
635 return self._rustmap.filefoldmapasdict()
641 return self._rustmap.filefoldmapasdict()
636
642
637 def hastrackeddir(self, d):
643 def hastrackeddir(self, d):
638 return self._rustmap.hastrackeddir(d)
644 return self._rustmap.hastrackeddir(d)
639
645
640 def hasdir(self, d):
646 def hasdir(self, d):
641 return self._rustmap.hasdir(d)
647 return self._rustmap.hasdir(d)
642
648
643 @propertycache
649 @propertycache
644 def identity(self):
650 def identity(self):
645 self._rustmap
651 self._rustmap
646 return self.identity
652 return self.identity
647
653
648 @property
654 @property
649 def nonnormalset(self):
655 def nonnormalset(self):
650 nonnorm = self._rustmap.non_normal_entries()
656 nonnorm = self._rustmap.non_normal_entries()
651 return nonnorm
657 return nonnorm
652
658
653 @propertycache
659 @propertycache
654 def otherparentset(self):
660 def otherparentset(self):
655 otherparents = self._rustmap.other_parent_entries()
661 otherparents = self._rustmap.other_parent_entries()
656 return otherparents
662 return otherparents
657
663
658 def non_normal_or_other_parent_paths(self):
664 def non_normal_or_other_parent_paths(self):
659 return self._rustmap.non_normal_or_other_parent_paths()
665 return self._rustmap.non_normal_or_other_parent_paths()
660
666
661 @propertycache
667 @propertycache
662 def dirfoldmap(self):
668 def dirfoldmap(self):
663 f = {}
669 f = {}
664 normcase = util.normcase
670 normcase = util.normcase
665 for name, _pseudo_entry in self.directories():
671 for name, _pseudo_entry in self.directories():
666 f[normcase(name)] = name
672 f[normcase(name)] = name
667 return f
673 return f
@@ -1,463 +1,466 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::dirstate::parsers::Timestamp;
8 use crate::dirstate::parsers::Timestamp;
9 use crate::{
9 use crate::{
10 dirstate::EntryState,
10 dirstate::EntryState,
11 dirstate::MTIME_UNSET,
11 dirstate::MTIME_UNSET,
12 dirstate::SIZE_FROM_OTHER_PARENT,
12 dirstate::SIZE_FROM_OTHER_PARENT,
13 dirstate::SIZE_NON_NORMAL,
13 dirstate::SIZE_NON_NORMAL,
14 dirstate::V1_RANGEMASK,
14 dirstate::V1_RANGEMASK,
15 pack_dirstate, parse_dirstate,
15 pack_dirstate, parse_dirstate,
16 utils::hg_path::{HgPath, HgPathBuf},
16 utils::hg_path::{HgPath, HgPathBuf},
17 CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateParents,
17 CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateParents,
18 StateMap,
18 StateMap,
19 };
19 };
20 use micro_timer::timed;
20 use micro_timer::timed;
21 use std::collections::HashSet;
21 use std::collections::HashSet;
22 use std::iter::FromIterator;
22 use std::iter::FromIterator;
23 use std::ops::Deref;
23 use std::ops::Deref;
24
24
25 #[derive(Default)]
25 #[derive(Default)]
26 pub struct DirstateMap {
26 pub struct DirstateMap {
27 state_map: StateMap,
27 state_map: StateMap,
28 pub copy_map: CopyMap,
28 pub copy_map: CopyMap,
29 pub dirs: Option<DirsMultiset>,
29 pub dirs: Option<DirsMultiset>,
30 pub all_dirs: Option<DirsMultiset>,
30 pub all_dirs: Option<DirsMultiset>,
31 non_normal_set: Option<HashSet<HgPathBuf>>,
31 non_normal_set: Option<HashSet<HgPathBuf>>,
32 other_parent_set: Option<HashSet<HgPathBuf>>,
32 other_parent_set: Option<HashSet<HgPathBuf>>,
33 }
33 }
34
34
35 /// Should only really be used in python interface code, for clarity
35 /// Should only really be used in python interface code, for clarity
36 impl Deref for DirstateMap {
36 impl Deref for DirstateMap {
37 type Target = StateMap;
37 type Target = StateMap;
38
38
39 fn deref(&self) -> &Self::Target {
39 fn deref(&self) -> &Self::Target {
40 &self.state_map
40 &self.state_map
41 }
41 }
42 }
42 }
43
43
44 impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap {
44 impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap {
45 fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>(
45 fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>(
46 iter: I,
46 iter: I,
47 ) -> Self {
47 ) -> Self {
48 Self {
48 Self {
49 state_map: iter.into_iter().collect(),
49 state_map: iter.into_iter().collect(),
50 ..Self::default()
50 ..Self::default()
51 }
51 }
52 }
52 }
53 }
53 }
54
54
55 impl DirstateMap {
55 impl DirstateMap {
56 pub fn new() -> Self {
56 pub fn new() -> Self {
57 Self::default()
57 Self::default()
58 }
58 }
59
59
60 pub fn clear(&mut self) {
60 pub fn clear(&mut self) {
61 self.state_map = StateMap::default();
61 self.state_map = StateMap::default();
62 self.copy_map.clear();
62 self.copy_map.clear();
63 self.non_normal_set = None;
63 self.non_normal_set = None;
64 self.other_parent_set = None;
64 self.other_parent_set = None;
65 }
65 }
66
66
67 /// Add a tracked file to the dirstate
67 /// Add a tracked file to the dirstate
68 pub fn add_file(
68 pub fn add_file(
69 &mut self,
69 &mut self,
70 filename: &HgPath,
70 filename: &HgPath,
71 entry: DirstateEntry,
71 entry: DirstateEntry,
72 // XXX once the dust settle this should probably become an enum
72 // XXX once the dust settle this should probably become an enum
73 added: bool,
73 from_p2: bool,
74 from_p2: bool,
74 possibly_dirty: bool,
75 possibly_dirty: bool,
75 ) -> Result<(), DirstateError> {
76 ) -> Result<(), DirstateError> {
76 let mut entry = entry;
77 let mut entry = entry;
77 if entry.state == EntryState::Added {
78 if added {
78 assert!(!possibly_dirty);
79 assert!(!possibly_dirty);
79 assert!(!from_p2);
80 assert!(!from_p2);
81 entry.state = EntryState::Added;
80 entry.size = SIZE_NON_NORMAL;
82 entry.size = SIZE_NON_NORMAL;
81 entry.mtime = MTIME_UNSET;
83 entry.mtime = MTIME_UNSET;
82 } else if from_p2 {
84 } else if from_p2 {
83 assert!(!possibly_dirty);
85 assert!(!possibly_dirty);
84 entry.size = SIZE_FROM_OTHER_PARENT;
86 entry.size = SIZE_FROM_OTHER_PARENT;
85 entry.mtime = MTIME_UNSET;
87 entry.mtime = MTIME_UNSET;
86 } else if possibly_dirty {
88 } else if possibly_dirty {
87 entry.size = SIZE_NON_NORMAL;
89 entry.size = SIZE_NON_NORMAL;
88 entry.mtime = MTIME_UNSET;
90 entry.mtime = MTIME_UNSET;
89 } else {
91 } else {
90 entry.size = entry.size & V1_RANGEMASK;
92 entry.size = entry.size & V1_RANGEMASK;
91 entry.mtime = entry.mtime & V1_RANGEMASK;
93 entry.mtime = entry.mtime & V1_RANGEMASK;
92 }
94 }
93 let old_state = match self.get(filename) {
95 let old_state = match self.get(filename) {
94 Some(e) => e.state,
96 Some(e) => e.state,
95 None => EntryState::Unknown,
97 None => EntryState::Unknown,
96 };
98 };
97 if old_state == EntryState::Unknown || old_state == EntryState::Removed
99 if old_state == EntryState::Unknown || old_state == EntryState::Removed
98 {
100 {
99 if let Some(ref mut dirs) = self.dirs {
101 if let Some(ref mut dirs) = self.dirs {
100 dirs.add_path(filename)?;
102 dirs.add_path(filename)?;
101 }
103 }
102 }
104 }
103 if old_state == EntryState::Unknown {
105 if old_state == EntryState::Unknown {
104 if let Some(ref mut all_dirs) = self.all_dirs {
106 if let Some(ref mut all_dirs) = self.all_dirs {
105 all_dirs.add_path(filename)?;
107 all_dirs.add_path(filename)?;
106 }
108 }
107 }
109 }
108 self.state_map.insert(filename.to_owned(), entry.to_owned());
110 self.state_map.insert(filename.to_owned(), entry.to_owned());
109
111
110 if entry.is_non_normal() {
112 if entry.is_non_normal() {
111 self.get_non_normal_other_parent_entries()
113 self.get_non_normal_other_parent_entries()
112 .0
114 .0
113 .insert(filename.to_owned());
115 .insert(filename.to_owned());
114 }
116 }
115
117
116 if entry.is_from_other_parent() {
118 if entry.is_from_other_parent() {
117 self.get_non_normal_other_parent_entries()
119 self.get_non_normal_other_parent_entries()
118 .1
120 .1
119 .insert(filename.to_owned());
121 .insert(filename.to_owned());
120 }
122 }
121 Ok(())
123 Ok(())
122 }
124 }
123
125
124 /// Mark a file as removed in the dirstate.
126 /// Mark a file as removed in the dirstate.
125 ///
127 ///
126 /// The `size` parameter is used to store sentinel values that indicate
128 /// The `size` parameter is used to store sentinel values that indicate
127 /// the file's previous state. In the future, we should refactor this
129 /// the file's previous state. In the future, we should refactor this
128 /// to be more explicit about what that state is.
130 /// to be more explicit about what that state is.
129 pub fn remove_file(
131 pub fn remove_file(
130 &mut self,
132 &mut self,
131 filename: &HgPath,
133 filename: &HgPath,
132 in_merge: bool,
134 in_merge: bool,
133 ) -> Result<(), DirstateError> {
135 ) -> Result<(), DirstateError> {
134 let old_entry_opt = self.get(filename);
136 let old_entry_opt = self.get(filename);
135 let old_state = match old_entry_opt {
137 let old_state = match old_entry_opt {
136 Some(e) => e.state,
138 Some(e) => e.state,
137 None => EntryState::Unknown,
139 None => EntryState::Unknown,
138 };
140 };
139 let mut size = 0;
141 let mut size = 0;
140 if in_merge {
142 if in_merge {
141 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
143 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
142 // during a merge. So I (marmoute) am not sure we need the
144 // during a merge. So I (marmoute) am not sure we need the
143 // conditionnal at all. Adding double checking this with assert
145 // conditionnal at all. Adding double checking this with assert
144 // would be nice.
146 // would be nice.
145 if let Some(old_entry) = old_entry_opt {
147 if let Some(old_entry) = old_entry_opt {
146 // backup the previous state
148 // backup the previous state
147 if old_entry.state == EntryState::Merged {
149 if old_entry.state == EntryState::Merged {
148 size = SIZE_NON_NORMAL;
150 size = SIZE_NON_NORMAL;
149 } else if old_entry.state == EntryState::Normal
151 } else if old_entry.state == EntryState::Normal
150 && old_entry.size == SIZE_FROM_OTHER_PARENT
152 && old_entry.size == SIZE_FROM_OTHER_PARENT
151 {
153 {
152 // other parent
154 // other parent
153 size = SIZE_FROM_OTHER_PARENT;
155 size = SIZE_FROM_OTHER_PARENT;
154 self.get_non_normal_other_parent_entries()
156 self.get_non_normal_other_parent_entries()
155 .1
157 .1
156 .insert(filename.to_owned());
158 .insert(filename.to_owned());
157 }
159 }
158 }
160 }
159 }
161 }
160 if old_state != EntryState::Unknown && old_state != EntryState::Removed
162 if old_state != EntryState::Unknown && old_state != EntryState::Removed
161 {
163 {
162 if let Some(ref mut dirs) = self.dirs {
164 if let Some(ref mut dirs) = self.dirs {
163 dirs.delete_path(filename)?;
165 dirs.delete_path(filename)?;
164 }
166 }
165 }
167 }
166 if old_state == EntryState::Unknown {
168 if old_state == EntryState::Unknown {
167 if let Some(ref mut all_dirs) = self.all_dirs {
169 if let Some(ref mut all_dirs) = self.all_dirs {
168 all_dirs.add_path(filename)?;
170 all_dirs.add_path(filename)?;
169 }
171 }
170 }
172 }
171 if size == 0 {
173 if size == 0 {
172 self.copy_map.remove(filename);
174 self.copy_map.remove(filename);
173 }
175 }
174
176
175 self.state_map.insert(
177 self.state_map.insert(
176 filename.to_owned(),
178 filename.to_owned(),
177 DirstateEntry {
179 DirstateEntry {
178 state: EntryState::Removed,
180 state: EntryState::Removed,
179 mode: 0,
181 mode: 0,
180 size,
182 size,
181 mtime: 0,
183 mtime: 0,
182 },
184 },
183 );
185 );
184 self.get_non_normal_other_parent_entries()
186 self.get_non_normal_other_parent_entries()
185 .0
187 .0
186 .insert(filename.to_owned());
188 .insert(filename.to_owned());
187 Ok(())
189 Ok(())
188 }
190 }
189
191
190 /// Remove a file from the dirstate.
192 /// Remove a file from the dirstate.
191 /// Returns `true` if the file was previously recorded.
193 /// Returns `true` if the file was previously recorded.
192 pub fn drop_file(
194 pub fn drop_file(
193 &mut self,
195 &mut self,
194 filename: &HgPath,
196 filename: &HgPath,
195 old_state: EntryState,
197 old_state: EntryState,
196 ) -> Result<bool, DirstateError> {
198 ) -> Result<bool, DirstateError> {
197 let exists = self.state_map.remove(filename).is_some();
199 let exists = self.state_map.remove(filename).is_some();
198
200
199 if exists {
201 if exists {
200 if old_state != EntryState::Removed {
202 if old_state != EntryState::Removed {
201 if let Some(ref mut dirs) = self.dirs {
203 if let Some(ref mut dirs) = self.dirs {
202 dirs.delete_path(filename)?;
204 dirs.delete_path(filename)?;
203 }
205 }
204 }
206 }
205 if let Some(ref mut all_dirs) = self.all_dirs {
207 if let Some(ref mut all_dirs) = self.all_dirs {
206 all_dirs.delete_path(filename)?;
208 all_dirs.delete_path(filename)?;
207 }
209 }
208 }
210 }
209 self.get_non_normal_other_parent_entries()
211 self.get_non_normal_other_parent_entries()
210 .0
212 .0
211 .remove(filename);
213 .remove(filename);
212
214
213 Ok(exists)
215 Ok(exists)
214 }
216 }
215
217
216 pub fn clear_ambiguous_times(
218 pub fn clear_ambiguous_times(
217 &mut self,
219 &mut self,
218 filenames: Vec<HgPathBuf>,
220 filenames: Vec<HgPathBuf>,
219 now: i32,
221 now: i32,
220 ) {
222 ) {
221 for filename in filenames {
223 for filename in filenames {
222 if let Some(entry) = self.state_map.get_mut(&filename) {
224 if let Some(entry) = self.state_map.get_mut(&filename) {
223 if entry.clear_ambiguous_mtime(now) {
225 if entry.clear_ambiguous_mtime(now) {
224 self.get_non_normal_other_parent_entries()
226 self.get_non_normal_other_parent_entries()
225 .0
227 .0
226 .insert(filename.to_owned());
228 .insert(filename.to_owned());
227 }
229 }
228 }
230 }
229 }
231 }
230 }
232 }
231
233
232 pub fn non_normal_entries_remove(&mut self, key: impl AsRef<HgPath>) {
234 pub fn non_normal_entries_remove(&mut self, key: impl AsRef<HgPath>) {
233 self.get_non_normal_other_parent_entries()
235 self.get_non_normal_other_parent_entries()
234 .0
236 .0
235 .remove(key.as_ref());
237 .remove(key.as_ref());
236 }
238 }
237
239
238 pub fn non_normal_entries_union(
240 pub fn non_normal_entries_union(
239 &mut self,
241 &mut self,
240 other: HashSet<HgPathBuf>,
242 other: HashSet<HgPathBuf>,
241 ) -> Vec<HgPathBuf> {
243 ) -> Vec<HgPathBuf> {
242 self.get_non_normal_other_parent_entries()
244 self.get_non_normal_other_parent_entries()
243 .0
245 .0
244 .union(&other)
246 .union(&other)
245 .map(ToOwned::to_owned)
247 .map(ToOwned::to_owned)
246 .collect()
248 .collect()
247 }
249 }
248
250
249 pub fn get_non_normal_other_parent_entries(
251 pub fn get_non_normal_other_parent_entries(
250 &mut self,
252 &mut self,
251 ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
253 ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) {
252 self.set_non_normal_other_parent_entries(false);
254 self.set_non_normal_other_parent_entries(false);
253 (
255 (
254 self.non_normal_set.as_mut().unwrap(),
256 self.non_normal_set.as_mut().unwrap(),
255 self.other_parent_set.as_mut().unwrap(),
257 self.other_parent_set.as_mut().unwrap(),
256 )
258 )
257 }
259 }
258
260
259 /// Useful to get immutable references to those sets in contexts where
261 /// Useful to get immutable references to those sets in contexts where
260 /// you only have an immutable reference to the `DirstateMap`, like when
262 /// you only have an immutable reference to the `DirstateMap`, like when
261 /// sharing references with Python.
263 /// sharing references with Python.
262 ///
264 ///
263 /// TODO, get rid of this along with the other "setter/getter" stuff when
265 /// TODO, get rid of this along with the other "setter/getter" stuff when
264 /// a nice typestate plan is defined.
266 /// a nice typestate plan is defined.
265 ///
267 ///
266 /// # Panics
268 /// # Panics
267 ///
269 ///
268 /// Will panic if either set is `None`.
270 /// Will panic if either set is `None`.
269 pub fn get_non_normal_other_parent_entries_panic(
271 pub fn get_non_normal_other_parent_entries_panic(
270 &self,
272 &self,
271 ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
273 ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) {
272 (
274 (
273 self.non_normal_set.as_ref().unwrap(),
275 self.non_normal_set.as_ref().unwrap(),
274 self.other_parent_set.as_ref().unwrap(),
276 self.other_parent_set.as_ref().unwrap(),
275 )
277 )
276 }
278 }
277
279
278 pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
280 pub fn set_non_normal_other_parent_entries(&mut self, force: bool) {
279 if !force
281 if !force
280 && self.non_normal_set.is_some()
282 && self.non_normal_set.is_some()
281 && self.other_parent_set.is_some()
283 && self.other_parent_set.is_some()
282 {
284 {
283 return;
285 return;
284 }
286 }
285 let mut non_normal = HashSet::new();
287 let mut non_normal = HashSet::new();
286 let mut other_parent = HashSet::new();
288 let mut other_parent = HashSet::new();
287
289
288 for (filename, entry) in self.state_map.iter() {
290 for (filename, entry) in self.state_map.iter() {
289 if entry.is_non_normal() {
291 if entry.is_non_normal() {
290 non_normal.insert(filename.to_owned());
292 non_normal.insert(filename.to_owned());
291 }
293 }
292 if entry.is_from_other_parent() {
294 if entry.is_from_other_parent() {
293 other_parent.insert(filename.to_owned());
295 other_parent.insert(filename.to_owned());
294 }
296 }
295 }
297 }
296 self.non_normal_set = Some(non_normal);
298 self.non_normal_set = Some(non_normal);
297 self.other_parent_set = Some(other_parent);
299 self.other_parent_set = Some(other_parent);
298 }
300 }
299
301
300 /// Both of these setters and their uses appear to be the simplest way to
302 /// Both of these setters and their uses appear to be the simplest way to
301 /// emulate a Python lazy property, but it is ugly and unidiomatic.
303 /// emulate a Python lazy property, but it is ugly and unidiomatic.
302 /// TODO One day, rewriting this struct using the typestate might be a
304 /// TODO One day, rewriting this struct using the typestate might be a
303 /// good idea.
305 /// good idea.
304 pub fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
306 pub fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
305 if self.all_dirs.is_none() {
307 if self.all_dirs.is_none() {
306 self.all_dirs = Some(DirsMultiset::from_dirstate(
308 self.all_dirs = Some(DirsMultiset::from_dirstate(
307 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
309 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
308 None,
310 None,
309 )?);
311 )?);
310 }
312 }
311 Ok(())
313 Ok(())
312 }
314 }
313
315
314 pub fn set_dirs(&mut self) -> Result<(), DirstateError> {
316 pub fn set_dirs(&mut self) -> Result<(), DirstateError> {
315 if self.dirs.is_none() {
317 if self.dirs.is_none() {
316 self.dirs = Some(DirsMultiset::from_dirstate(
318 self.dirs = Some(DirsMultiset::from_dirstate(
317 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
319 self.state_map.iter().map(|(k, v)| Ok((k, *v))),
318 Some(EntryState::Removed),
320 Some(EntryState::Removed),
319 )?);
321 )?);
320 }
322 }
321 Ok(())
323 Ok(())
322 }
324 }
323
325
324 pub fn has_tracked_dir(
326 pub fn has_tracked_dir(
325 &mut self,
327 &mut self,
326 directory: &HgPath,
328 directory: &HgPath,
327 ) -> Result<bool, DirstateError> {
329 ) -> Result<bool, DirstateError> {
328 self.set_dirs()?;
330 self.set_dirs()?;
329 Ok(self.dirs.as_ref().unwrap().contains(directory))
331 Ok(self.dirs.as_ref().unwrap().contains(directory))
330 }
332 }
331
333
332 pub fn has_dir(
334 pub fn has_dir(
333 &mut self,
335 &mut self,
334 directory: &HgPath,
336 directory: &HgPath,
335 ) -> Result<bool, DirstateError> {
337 ) -> Result<bool, DirstateError> {
336 self.set_all_dirs()?;
338 self.set_all_dirs()?;
337 Ok(self.all_dirs.as_ref().unwrap().contains(directory))
339 Ok(self.all_dirs.as_ref().unwrap().contains(directory))
338 }
340 }
339
341
340 #[timed]
342 #[timed]
341 pub fn read(
343 pub fn read(
342 &mut self,
344 &mut self,
343 file_contents: &[u8],
345 file_contents: &[u8],
344 ) -> Result<Option<DirstateParents>, DirstateError> {
346 ) -> Result<Option<DirstateParents>, DirstateError> {
345 if file_contents.is_empty() {
347 if file_contents.is_empty() {
346 return Ok(None);
348 return Ok(None);
347 }
349 }
348
350
349 let (parents, entries, copies) = parse_dirstate(file_contents)?;
351 let (parents, entries, copies) = parse_dirstate(file_contents)?;
350 self.state_map.extend(
352 self.state_map.extend(
351 entries
353 entries
352 .into_iter()
354 .into_iter()
353 .map(|(path, entry)| (path.to_owned(), entry)),
355 .map(|(path, entry)| (path.to_owned(), entry)),
354 );
356 );
355 self.copy_map.extend(
357 self.copy_map.extend(
356 copies
358 copies
357 .into_iter()
359 .into_iter()
358 .map(|(path, copy)| (path.to_owned(), copy.to_owned())),
360 .map(|(path, copy)| (path.to_owned(), copy.to_owned())),
359 );
361 );
360 Ok(Some(parents.clone()))
362 Ok(Some(parents.clone()))
361 }
363 }
362
364
363 pub fn pack(
365 pub fn pack(
364 &mut self,
366 &mut self,
365 parents: DirstateParents,
367 parents: DirstateParents,
366 now: Timestamp,
368 now: Timestamp,
367 ) -> Result<Vec<u8>, DirstateError> {
369 ) -> Result<Vec<u8>, DirstateError> {
368 let packed =
370 let packed =
369 pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?;
371 pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?;
370
372
371 self.set_non_normal_other_parent_entries(true);
373 self.set_non_normal_other_parent_entries(true);
372 Ok(packed)
374 Ok(packed)
373 }
375 }
374 }
376 }
375
377
376 #[cfg(test)]
378 #[cfg(test)]
377 mod tests {
379 mod tests {
378 use super::*;
380 use super::*;
379
381
380 #[test]
382 #[test]
381 fn test_dirs_multiset() {
383 fn test_dirs_multiset() {
382 let mut map = DirstateMap::new();
384 let mut map = DirstateMap::new();
383 assert!(map.dirs.is_none());
385 assert!(map.dirs.is_none());
384 assert!(map.all_dirs.is_none());
386 assert!(map.all_dirs.is_none());
385
387
386 assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false);
388 assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false);
387 assert!(map.all_dirs.is_some());
389 assert!(map.all_dirs.is_some());
388 assert!(map.dirs.is_none());
390 assert!(map.dirs.is_none());
389
391
390 assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false);
392 assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false);
391 assert!(map.dirs.is_some());
393 assert!(map.dirs.is_some());
392 }
394 }
393
395
394 #[test]
396 #[test]
395 fn test_add_file() {
397 fn test_add_file() {
396 let mut map = DirstateMap::new();
398 let mut map = DirstateMap::new();
397
399
398 assert_eq!(0, map.len());
400 assert_eq!(0, map.len());
399
401
400 map.add_file(
402 map.add_file(
401 HgPath::new(b"meh"),
403 HgPath::new(b"meh"),
402 DirstateEntry {
404 DirstateEntry {
403 state: EntryState::Normal,
405 state: EntryState::Normal,
404 mode: 1337,
406 mode: 1337,
405 mtime: 1337,
407 mtime: 1337,
406 size: 1337,
408 size: 1337,
407 },
409 },
408 false,
410 false,
409 false,
411 false,
412 false,
410 )
413 )
411 .unwrap();
414 .unwrap();
412
415
413 assert_eq!(1, map.len());
416 assert_eq!(1, map.len());
414 assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
417 assert_eq!(0, map.get_non_normal_other_parent_entries().0.len());
415 assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
418 assert_eq!(0, map.get_non_normal_other_parent_entries().1.len());
416 }
419 }
417
420
418 #[test]
421 #[test]
419 fn test_non_normal_other_parent_entries() {
422 fn test_non_normal_other_parent_entries() {
420 let mut map: DirstateMap = [
423 let mut map: DirstateMap = [
421 (b"f1", (EntryState::Removed, 1337, 1337, 1337)),
424 (b"f1", (EntryState::Removed, 1337, 1337, 1337)),
422 (b"f2", (EntryState::Normal, 1337, 1337, -1)),
425 (b"f2", (EntryState::Normal, 1337, 1337, -1)),
423 (b"f3", (EntryState::Normal, 1337, 1337, 1337)),
426 (b"f3", (EntryState::Normal, 1337, 1337, 1337)),
424 (b"f4", (EntryState::Normal, 1337, -2, 1337)),
427 (b"f4", (EntryState::Normal, 1337, -2, 1337)),
425 (b"f5", (EntryState::Added, 1337, 1337, 1337)),
428 (b"f5", (EntryState::Added, 1337, 1337, 1337)),
426 (b"f6", (EntryState::Added, 1337, 1337, -1)),
429 (b"f6", (EntryState::Added, 1337, 1337, -1)),
427 (b"f7", (EntryState::Merged, 1337, 1337, -1)),
430 (b"f7", (EntryState::Merged, 1337, 1337, -1)),
428 (b"f8", (EntryState::Merged, 1337, 1337, 1337)),
431 (b"f8", (EntryState::Merged, 1337, 1337, 1337)),
429 (b"f9", (EntryState::Merged, 1337, -2, 1337)),
432 (b"f9", (EntryState::Merged, 1337, -2, 1337)),
430 (b"fa", (EntryState::Added, 1337, -2, 1337)),
433 (b"fa", (EntryState::Added, 1337, -2, 1337)),
431 (b"fb", (EntryState::Removed, 1337, -2, 1337)),
434 (b"fb", (EntryState::Removed, 1337, -2, 1337)),
432 ]
435 ]
433 .iter()
436 .iter()
434 .map(|(fname, (state, mode, size, mtime))| {
437 .map(|(fname, (state, mode, size, mtime))| {
435 (
438 (
436 HgPathBuf::from_bytes(fname.as_ref()),
439 HgPathBuf::from_bytes(fname.as_ref()),
437 DirstateEntry {
440 DirstateEntry {
438 state: *state,
441 state: *state,
439 mode: *mode,
442 mode: *mode,
440 size: *size,
443 size: *size,
441 mtime: *mtime,
444 mtime: *mtime,
442 },
445 },
443 )
446 )
444 })
447 })
445 .collect();
448 .collect();
446
449
447 let mut non_normal = [
450 let mut non_normal = [
448 b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
451 b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb",
449 ]
452 ]
450 .iter()
453 .iter()
451 .map(|x| HgPathBuf::from_bytes(x.as_ref()))
454 .map(|x| HgPathBuf::from_bytes(x.as_ref()))
452 .collect();
455 .collect();
453
456
454 let mut other_parent = HashSet::new();
457 let mut other_parent = HashSet::new();
455 other_parent.insert(HgPathBuf::from_bytes(b"f4"));
458 other_parent.insert(HgPathBuf::from_bytes(b"f4"));
456 let entries = map.get_non_normal_other_parent_entries();
459 let entries = map.get_non_normal_other_parent_entries();
457
460
458 assert_eq!(
461 assert_eq!(
459 (&mut non_normal, &mut other_parent),
462 (&mut non_normal, &mut other_parent),
460 (entries.0, entries.1)
463 (entries.0, entries.1)
461 );
464 );
462 }
465 }
463 }
466 }
@@ -1,1194 +1,1196 b''
1 use bytes_cast::BytesCast;
1 use bytes_cast::BytesCast;
2 use micro_timer::timed;
2 use micro_timer::timed;
3 use std::borrow::Cow;
3 use std::borrow::Cow;
4 use std::convert::TryInto;
4 use std::convert::TryInto;
5 use std::path::PathBuf;
5 use std::path::PathBuf;
6
6
7 use super::on_disk;
7 use super::on_disk;
8 use super::on_disk::DirstateV2ParseError;
8 use super::on_disk::DirstateV2ParseError;
9 use super::path_with_basename::WithBasename;
9 use super::path_with_basename::WithBasename;
10 use crate::dirstate::parsers::pack_entry;
10 use crate::dirstate::parsers::pack_entry;
11 use crate::dirstate::parsers::packed_entry_size;
11 use crate::dirstate::parsers::packed_entry_size;
12 use crate::dirstate::parsers::parse_dirstate_entries;
12 use crate::dirstate::parsers::parse_dirstate_entries;
13 use crate::dirstate::parsers::Timestamp;
13 use crate::dirstate::parsers::Timestamp;
14 use crate::dirstate::MTIME_UNSET;
14 use crate::dirstate::MTIME_UNSET;
15 use crate::dirstate::SIZE_FROM_OTHER_PARENT;
15 use crate::dirstate::SIZE_FROM_OTHER_PARENT;
16 use crate::dirstate::SIZE_NON_NORMAL;
16 use crate::dirstate::SIZE_NON_NORMAL;
17 use crate::dirstate::V1_RANGEMASK;
17 use crate::dirstate::V1_RANGEMASK;
18 use crate::matchers::Matcher;
18 use crate::matchers::Matcher;
19 use crate::utils::hg_path::{HgPath, HgPathBuf};
19 use crate::utils::hg_path::{HgPath, HgPathBuf};
20 use crate::CopyMapIter;
20 use crate::CopyMapIter;
21 use crate::DirstateEntry;
21 use crate::DirstateEntry;
22 use crate::DirstateError;
22 use crate::DirstateError;
23 use crate::DirstateParents;
23 use crate::DirstateParents;
24 use crate::DirstateStatus;
24 use crate::DirstateStatus;
25 use crate::EntryState;
25 use crate::EntryState;
26 use crate::FastHashMap;
26 use crate::FastHashMap;
27 use crate::PatternFileWarning;
27 use crate::PatternFileWarning;
28 use crate::StateMapIter;
28 use crate::StateMapIter;
29 use crate::StatusError;
29 use crate::StatusError;
30 use crate::StatusOptions;
30 use crate::StatusOptions;
31
31
32 pub struct DirstateMap<'on_disk> {
32 pub struct DirstateMap<'on_disk> {
33 /// Contents of the `.hg/dirstate` file
33 /// Contents of the `.hg/dirstate` file
34 pub(super) on_disk: &'on_disk [u8],
34 pub(super) on_disk: &'on_disk [u8],
35
35
36 pub(super) root: ChildNodes<'on_disk>,
36 pub(super) root: ChildNodes<'on_disk>,
37
37
38 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
38 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
39 pub(super) nodes_with_entry_count: u32,
39 pub(super) nodes_with_entry_count: u32,
40
40
41 /// Number of nodes anywhere in the tree that have
41 /// Number of nodes anywhere in the tree that have
42 /// `.copy_source.is_some()`.
42 /// `.copy_source.is_some()`.
43 pub(super) nodes_with_copy_source_count: u32,
43 pub(super) nodes_with_copy_source_count: u32,
44
44
45 /// See on_disk::Header
45 /// See on_disk::Header
46 pub(super) ignore_patterns_hash: on_disk::IgnorePatternsHash,
46 pub(super) ignore_patterns_hash: on_disk::IgnorePatternsHash,
47 }
47 }
48
48
49 /// Using a plain `HgPathBuf` of the full path from the repository root as a
49 /// Using a plain `HgPathBuf` of the full path from the repository root as a
50 /// map key would also work: all paths in a given map have the same parent
50 /// map key would also work: all paths in a given map have the same parent
51 /// path, so comparing full paths gives the same result as comparing base
51 /// path, so comparing full paths gives the same result as comparing base
52 /// names. However `HashMap` would waste time always re-hashing the same
52 /// names. However `HashMap` would waste time always re-hashing the same
53 /// string prefix.
53 /// string prefix.
54 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
54 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
55
55
56 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
56 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
57 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
57 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
58 pub(super) enum BorrowedPath<'tree, 'on_disk> {
58 pub(super) enum BorrowedPath<'tree, 'on_disk> {
59 InMemory(&'tree HgPathBuf),
59 InMemory(&'tree HgPathBuf),
60 OnDisk(&'on_disk HgPath),
60 OnDisk(&'on_disk HgPath),
61 }
61 }
62
62
63 pub(super) enum ChildNodes<'on_disk> {
63 pub(super) enum ChildNodes<'on_disk> {
64 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
64 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
65 OnDisk(&'on_disk [on_disk::Node]),
65 OnDisk(&'on_disk [on_disk::Node]),
66 }
66 }
67
67
68 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
68 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
69 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
69 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
70 OnDisk(&'on_disk [on_disk::Node]),
70 OnDisk(&'on_disk [on_disk::Node]),
71 }
71 }
72
72
73 pub(super) enum NodeRef<'tree, 'on_disk> {
73 pub(super) enum NodeRef<'tree, 'on_disk> {
74 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
74 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
75 OnDisk(&'on_disk on_disk::Node),
75 OnDisk(&'on_disk on_disk::Node),
76 }
76 }
77
77
78 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
78 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
79 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
79 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
80 match *self {
80 match *self {
81 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
81 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
82 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
82 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
83 }
83 }
84 }
84 }
85 }
85 }
86
86
87 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
87 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
88 type Target = HgPath;
88 type Target = HgPath;
89
89
90 fn deref(&self) -> &HgPath {
90 fn deref(&self) -> &HgPath {
91 match *self {
91 match *self {
92 BorrowedPath::InMemory(in_memory) => in_memory,
92 BorrowedPath::InMemory(in_memory) => in_memory,
93 BorrowedPath::OnDisk(on_disk) => on_disk,
93 BorrowedPath::OnDisk(on_disk) => on_disk,
94 }
94 }
95 }
95 }
96 }
96 }
97
97
98 impl Default for ChildNodes<'_> {
98 impl Default for ChildNodes<'_> {
99 fn default() -> Self {
99 fn default() -> Self {
100 ChildNodes::InMemory(Default::default())
100 ChildNodes::InMemory(Default::default())
101 }
101 }
102 }
102 }
103
103
104 impl<'on_disk> ChildNodes<'on_disk> {
104 impl<'on_disk> ChildNodes<'on_disk> {
105 pub(super) fn as_ref<'tree>(
105 pub(super) fn as_ref<'tree>(
106 &'tree self,
106 &'tree self,
107 ) -> ChildNodesRef<'tree, 'on_disk> {
107 ) -> ChildNodesRef<'tree, 'on_disk> {
108 match self {
108 match self {
109 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
109 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
110 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
110 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
111 }
111 }
112 }
112 }
113
113
114 pub(super) fn is_empty(&self) -> bool {
114 pub(super) fn is_empty(&self) -> bool {
115 match self {
115 match self {
116 ChildNodes::InMemory(nodes) => nodes.is_empty(),
116 ChildNodes::InMemory(nodes) => nodes.is_empty(),
117 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
117 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
118 }
118 }
119 }
119 }
120
120
121 pub(super) fn make_mut(
121 pub(super) fn make_mut(
122 &mut self,
122 &mut self,
123 on_disk: &'on_disk [u8],
123 on_disk: &'on_disk [u8],
124 ) -> Result<
124 ) -> Result<
125 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
125 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
126 DirstateV2ParseError,
126 DirstateV2ParseError,
127 > {
127 > {
128 match self {
128 match self {
129 ChildNodes::InMemory(nodes) => Ok(nodes),
129 ChildNodes::InMemory(nodes) => Ok(nodes),
130 ChildNodes::OnDisk(nodes) => {
130 ChildNodes::OnDisk(nodes) => {
131 let nodes = nodes
131 let nodes = nodes
132 .iter()
132 .iter()
133 .map(|node| {
133 .map(|node| {
134 Ok((
134 Ok((
135 node.path(on_disk)?,
135 node.path(on_disk)?,
136 node.to_in_memory_node(on_disk)?,
136 node.to_in_memory_node(on_disk)?,
137 ))
137 ))
138 })
138 })
139 .collect::<Result<_, _>>()?;
139 .collect::<Result<_, _>>()?;
140 *self = ChildNodes::InMemory(nodes);
140 *self = ChildNodes::InMemory(nodes);
141 match self {
141 match self {
142 ChildNodes::InMemory(nodes) => Ok(nodes),
142 ChildNodes::InMemory(nodes) => Ok(nodes),
143 ChildNodes::OnDisk(_) => unreachable!(),
143 ChildNodes::OnDisk(_) => unreachable!(),
144 }
144 }
145 }
145 }
146 }
146 }
147 }
147 }
148 }
148 }
149
149
150 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
150 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
151 pub(super) fn get(
151 pub(super) fn get(
152 &self,
152 &self,
153 base_name: &HgPath,
153 base_name: &HgPath,
154 on_disk: &'on_disk [u8],
154 on_disk: &'on_disk [u8],
155 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
155 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
156 match self {
156 match self {
157 ChildNodesRef::InMemory(nodes) => Ok(nodes
157 ChildNodesRef::InMemory(nodes) => Ok(nodes
158 .get_key_value(base_name)
158 .get_key_value(base_name)
159 .map(|(k, v)| NodeRef::InMemory(k, v))),
159 .map(|(k, v)| NodeRef::InMemory(k, v))),
160 ChildNodesRef::OnDisk(nodes) => {
160 ChildNodesRef::OnDisk(nodes) => {
161 let mut parse_result = Ok(());
161 let mut parse_result = Ok(());
162 let search_result = nodes.binary_search_by(|node| {
162 let search_result = nodes.binary_search_by(|node| {
163 match node.base_name(on_disk) {
163 match node.base_name(on_disk) {
164 Ok(node_base_name) => node_base_name.cmp(base_name),
164 Ok(node_base_name) => node_base_name.cmp(base_name),
165 Err(e) => {
165 Err(e) => {
166 parse_result = Err(e);
166 parse_result = Err(e);
167 // Dummy comparison result, `search_result` won’t
167 // Dummy comparison result, `search_result` won’t
168 // be used since `parse_result` is an error
168 // be used since `parse_result` is an error
169 std::cmp::Ordering::Equal
169 std::cmp::Ordering::Equal
170 }
170 }
171 }
171 }
172 });
172 });
173 parse_result.map(|()| {
173 parse_result.map(|()| {
174 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
174 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
175 })
175 })
176 }
176 }
177 }
177 }
178 }
178 }
179
179
180 /// Iterate in undefined order
180 /// Iterate in undefined order
181 pub(super) fn iter(
181 pub(super) fn iter(
182 &self,
182 &self,
183 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
183 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
184 match self {
184 match self {
185 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
185 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
186 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
186 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
187 ),
187 ),
188 ChildNodesRef::OnDisk(nodes) => {
188 ChildNodesRef::OnDisk(nodes) => {
189 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
189 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
190 }
190 }
191 }
191 }
192 }
192 }
193
193
194 /// Iterate in parallel in undefined order
194 /// Iterate in parallel in undefined order
195 pub(super) fn par_iter(
195 pub(super) fn par_iter(
196 &self,
196 &self,
197 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
197 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
198 {
198 {
199 use rayon::prelude::*;
199 use rayon::prelude::*;
200 match self {
200 match self {
201 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
201 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
202 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
202 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
203 ),
203 ),
204 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
204 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
205 nodes.par_iter().map(NodeRef::OnDisk),
205 nodes.par_iter().map(NodeRef::OnDisk),
206 ),
206 ),
207 }
207 }
208 }
208 }
209
209
210 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
210 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
211 match self {
211 match self {
212 ChildNodesRef::InMemory(nodes) => {
212 ChildNodesRef::InMemory(nodes) => {
213 let mut vec: Vec<_> = nodes
213 let mut vec: Vec<_> = nodes
214 .iter()
214 .iter()
215 .map(|(k, v)| NodeRef::InMemory(k, v))
215 .map(|(k, v)| NodeRef::InMemory(k, v))
216 .collect();
216 .collect();
217 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
217 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
218 match node {
218 match node {
219 NodeRef::InMemory(path, _node) => path.base_name(),
219 NodeRef::InMemory(path, _node) => path.base_name(),
220 NodeRef::OnDisk(_) => unreachable!(),
220 NodeRef::OnDisk(_) => unreachable!(),
221 }
221 }
222 }
222 }
223 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
223 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
224 // value: https://github.com/rust-lang/rust/issues/34162
224 // value: https://github.com/rust-lang/rust/issues/34162
225 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
225 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
226 vec
226 vec
227 }
227 }
228 ChildNodesRef::OnDisk(nodes) => {
228 ChildNodesRef::OnDisk(nodes) => {
229 // Nodes on disk are already sorted
229 // Nodes on disk are already sorted
230 nodes.iter().map(NodeRef::OnDisk).collect()
230 nodes.iter().map(NodeRef::OnDisk).collect()
231 }
231 }
232 }
232 }
233 }
233 }
234 }
234 }
235
235
236 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
236 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
237 pub(super) fn full_path(
237 pub(super) fn full_path(
238 &self,
238 &self,
239 on_disk: &'on_disk [u8],
239 on_disk: &'on_disk [u8],
240 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
240 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
241 match self {
241 match self {
242 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
242 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
243 NodeRef::OnDisk(node) => node.full_path(on_disk),
243 NodeRef::OnDisk(node) => node.full_path(on_disk),
244 }
244 }
245 }
245 }
246
246
247 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
247 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
248 /// HgPath>` detached from `'tree`
248 /// HgPath>` detached from `'tree`
249 pub(super) fn full_path_borrowed(
249 pub(super) fn full_path_borrowed(
250 &self,
250 &self,
251 on_disk: &'on_disk [u8],
251 on_disk: &'on_disk [u8],
252 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
252 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
253 match self {
253 match self {
254 NodeRef::InMemory(path, _node) => match path.full_path() {
254 NodeRef::InMemory(path, _node) => match path.full_path() {
255 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
255 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
256 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
256 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
257 },
257 },
258 NodeRef::OnDisk(node) => {
258 NodeRef::OnDisk(node) => {
259 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
259 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
260 }
260 }
261 }
261 }
262 }
262 }
263
263
264 pub(super) fn base_name(
264 pub(super) fn base_name(
265 &self,
265 &self,
266 on_disk: &'on_disk [u8],
266 on_disk: &'on_disk [u8],
267 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
267 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
268 match self {
268 match self {
269 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
269 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
270 NodeRef::OnDisk(node) => node.base_name(on_disk),
270 NodeRef::OnDisk(node) => node.base_name(on_disk),
271 }
271 }
272 }
272 }
273
273
274 pub(super) fn children(
274 pub(super) fn children(
275 &self,
275 &self,
276 on_disk: &'on_disk [u8],
276 on_disk: &'on_disk [u8],
277 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
277 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
278 match self {
278 match self {
279 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
279 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
280 NodeRef::OnDisk(node) => {
280 NodeRef::OnDisk(node) => {
281 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
281 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
282 }
282 }
283 }
283 }
284 }
284 }
285
285
286 pub(super) fn has_copy_source(&self) -> bool {
286 pub(super) fn has_copy_source(&self) -> bool {
287 match self {
287 match self {
288 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
288 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
289 NodeRef::OnDisk(node) => node.has_copy_source(),
289 NodeRef::OnDisk(node) => node.has_copy_source(),
290 }
290 }
291 }
291 }
292
292
293 pub(super) fn copy_source(
293 pub(super) fn copy_source(
294 &self,
294 &self,
295 on_disk: &'on_disk [u8],
295 on_disk: &'on_disk [u8],
296 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
296 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
297 match self {
297 match self {
298 NodeRef::InMemory(_path, node) => {
298 NodeRef::InMemory(_path, node) => {
299 Ok(node.copy_source.as_ref().map(|s| &**s))
299 Ok(node.copy_source.as_ref().map(|s| &**s))
300 }
300 }
301 NodeRef::OnDisk(node) => node.copy_source(on_disk),
301 NodeRef::OnDisk(node) => node.copy_source(on_disk),
302 }
302 }
303 }
303 }
304
304
305 pub(super) fn entry(
305 pub(super) fn entry(
306 &self,
306 &self,
307 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
307 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
308 match self {
308 match self {
309 NodeRef::InMemory(_path, node) => {
309 NodeRef::InMemory(_path, node) => {
310 Ok(node.data.as_entry().copied())
310 Ok(node.data.as_entry().copied())
311 }
311 }
312 NodeRef::OnDisk(node) => node.entry(),
312 NodeRef::OnDisk(node) => node.entry(),
313 }
313 }
314 }
314 }
315
315
316 pub(super) fn state(
316 pub(super) fn state(
317 &self,
317 &self,
318 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
318 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
319 match self {
319 match self {
320 NodeRef::InMemory(_path, node) => {
320 NodeRef::InMemory(_path, node) => {
321 Ok(node.data.as_entry().map(|entry| entry.state))
321 Ok(node.data.as_entry().map(|entry| entry.state))
322 }
322 }
323 NodeRef::OnDisk(node) => node.state(),
323 NodeRef::OnDisk(node) => node.state(),
324 }
324 }
325 }
325 }
326
326
327 pub(super) fn cached_directory_mtime(
327 pub(super) fn cached_directory_mtime(
328 &self,
328 &self,
329 ) -> Option<&'tree on_disk::Timestamp> {
329 ) -> Option<&'tree on_disk::Timestamp> {
330 match self {
330 match self {
331 NodeRef::InMemory(_path, node) => match &node.data {
331 NodeRef::InMemory(_path, node) => match &node.data {
332 NodeData::CachedDirectory { mtime } => Some(mtime),
332 NodeData::CachedDirectory { mtime } => Some(mtime),
333 _ => None,
333 _ => None,
334 },
334 },
335 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
335 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
336 }
336 }
337 }
337 }
338
338
339 pub(super) fn descendants_with_entry_count(&self) -> u32 {
339 pub(super) fn descendants_with_entry_count(&self) -> u32 {
340 match self {
340 match self {
341 NodeRef::InMemory(_path, node) => {
341 NodeRef::InMemory(_path, node) => {
342 node.descendants_with_entry_count
342 node.descendants_with_entry_count
343 }
343 }
344 NodeRef::OnDisk(node) => node.descendants_with_entry_count.get(),
344 NodeRef::OnDisk(node) => node.descendants_with_entry_count.get(),
345 }
345 }
346 }
346 }
347
347
348 pub(super) fn tracked_descendants_count(&self) -> u32 {
348 pub(super) fn tracked_descendants_count(&self) -> u32 {
349 match self {
349 match self {
350 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
350 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
351 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
351 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
352 }
352 }
353 }
353 }
354 }
354 }
355
355
356 /// Represents a file or a directory
356 /// Represents a file or a directory
357 #[derive(Default)]
357 #[derive(Default)]
358 pub(super) struct Node<'on_disk> {
358 pub(super) struct Node<'on_disk> {
359 pub(super) data: NodeData,
359 pub(super) data: NodeData,
360
360
361 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
361 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
362
362
363 pub(super) children: ChildNodes<'on_disk>,
363 pub(super) children: ChildNodes<'on_disk>,
364
364
365 /// How many (non-inclusive) descendants of this node have an entry.
365 /// How many (non-inclusive) descendants of this node have an entry.
366 pub(super) descendants_with_entry_count: u32,
366 pub(super) descendants_with_entry_count: u32,
367
367
368 /// How many (non-inclusive) descendants of this node have an entry whose
368 /// How many (non-inclusive) descendants of this node have an entry whose
369 /// state is "tracked".
369 /// state is "tracked".
370 pub(super) tracked_descendants_count: u32,
370 pub(super) tracked_descendants_count: u32,
371 }
371 }
372
372
373 pub(super) enum NodeData {
373 pub(super) enum NodeData {
374 Entry(DirstateEntry),
374 Entry(DirstateEntry),
375 CachedDirectory { mtime: on_disk::Timestamp },
375 CachedDirectory { mtime: on_disk::Timestamp },
376 None,
376 None,
377 }
377 }
378
378
379 impl Default for NodeData {
379 impl Default for NodeData {
380 fn default() -> Self {
380 fn default() -> Self {
381 NodeData::None
381 NodeData::None
382 }
382 }
383 }
383 }
384
384
385 impl NodeData {
385 impl NodeData {
386 fn has_entry(&self) -> bool {
386 fn has_entry(&self) -> bool {
387 match self {
387 match self {
388 NodeData::Entry(_) => true,
388 NodeData::Entry(_) => true,
389 _ => false,
389 _ => false,
390 }
390 }
391 }
391 }
392
392
393 fn as_entry(&self) -> Option<&DirstateEntry> {
393 fn as_entry(&self) -> Option<&DirstateEntry> {
394 match self {
394 match self {
395 NodeData::Entry(entry) => Some(entry),
395 NodeData::Entry(entry) => Some(entry),
396 _ => None,
396 _ => None,
397 }
397 }
398 }
398 }
399 }
399 }
400
400
401 impl<'on_disk> DirstateMap<'on_disk> {
401 impl<'on_disk> DirstateMap<'on_disk> {
402 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
402 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
403 Self {
403 Self {
404 on_disk,
404 on_disk,
405 root: ChildNodes::default(),
405 root: ChildNodes::default(),
406 nodes_with_entry_count: 0,
406 nodes_with_entry_count: 0,
407 nodes_with_copy_source_count: 0,
407 nodes_with_copy_source_count: 0,
408 ignore_patterns_hash: [0; on_disk::IGNORE_PATTERNS_HASH_LEN],
408 ignore_patterns_hash: [0; on_disk::IGNORE_PATTERNS_HASH_LEN],
409 }
409 }
410 }
410 }
411
411
412 #[timed]
412 #[timed]
413 pub fn new_v2(
413 pub fn new_v2(
414 on_disk: &'on_disk [u8],
414 on_disk: &'on_disk [u8],
415 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
415 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
416 Ok(on_disk::read(on_disk)?)
416 Ok(on_disk::read(on_disk)?)
417 }
417 }
418
418
419 #[timed]
419 #[timed]
420 pub fn new_v1(
420 pub fn new_v1(
421 on_disk: &'on_disk [u8],
421 on_disk: &'on_disk [u8],
422 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
422 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
423 let mut map = Self::empty(on_disk);
423 let mut map = Self::empty(on_disk);
424 if map.on_disk.is_empty() {
424 if map.on_disk.is_empty() {
425 return Ok((map, None));
425 return Ok((map, None));
426 }
426 }
427
427
428 let parents = parse_dirstate_entries(
428 let parents = parse_dirstate_entries(
429 map.on_disk,
429 map.on_disk,
430 |path, entry, copy_source| {
430 |path, entry, copy_source| {
431 let tracked = entry.state.is_tracked();
431 let tracked = entry.state.is_tracked();
432 let node = Self::get_or_insert_node(
432 let node = Self::get_or_insert_node(
433 map.on_disk,
433 map.on_disk,
434 &mut map.root,
434 &mut map.root,
435 path,
435 path,
436 WithBasename::to_cow_borrowed,
436 WithBasename::to_cow_borrowed,
437 |ancestor| {
437 |ancestor| {
438 if tracked {
438 if tracked {
439 ancestor.tracked_descendants_count += 1
439 ancestor.tracked_descendants_count += 1
440 }
440 }
441 ancestor.descendants_with_entry_count += 1
441 ancestor.descendants_with_entry_count += 1
442 },
442 },
443 )?;
443 )?;
444 assert!(
444 assert!(
445 !node.data.has_entry(),
445 !node.data.has_entry(),
446 "duplicate dirstate entry in read"
446 "duplicate dirstate entry in read"
447 );
447 );
448 assert!(
448 assert!(
449 node.copy_source.is_none(),
449 node.copy_source.is_none(),
450 "duplicate dirstate entry in read"
450 "duplicate dirstate entry in read"
451 );
451 );
452 node.data = NodeData::Entry(*entry);
452 node.data = NodeData::Entry(*entry);
453 node.copy_source = copy_source.map(Cow::Borrowed);
453 node.copy_source = copy_source.map(Cow::Borrowed);
454 map.nodes_with_entry_count += 1;
454 map.nodes_with_entry_count += 1;
455 if copy_source.is_some() {
455 if copy_source.is_some() {
456 map.nodes_with_copy_source_count += 1
456 map.nodes_with_copy_source_count += 1
457 }
457 }
458 Ok(())
458 Ok(())
459 },
459 },
460 )?;
460 )?;
461 let parents = Some(parents.clone());
461 let parents = Some(parents.clone());
462
462
463 Ok((map, parents))
463 Ok((map, parents))
464 }
464 }
465
465
466 fn get_node<'tree>(
466 fn get_node<'tree>(
467 &'tree self,
467 &'tree self,
468 path: &HgPath,
468 path: &HgPath,
469 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
469 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
470 let mut children = self.root.as_ref();
470 let mut children = self.root.as_ref();
471 let mut components = path.components();
471 let mut components = path.components();
472 let mut component =
472 let mut component =
473 components.next().expect("expected at least one components");
473 components.next().expect("expected at least one components");
474 loop {
474 loop {
475 if let Some(child) = children.get(component, self.on_disk)? {
475 if let Some(child) = children.get(component, self.on_disk)? {
476 if let Some(next_component) = components.next() {
476 if let Some(next_component) = components.next() {
477 component = next_component;
477 component = next_component;
478 children = child.children(self.on_disk)?;
478 children = child.children(self.on_disk)?;
479 } else {
479 } else {
480 return Ok(Some(child));
480 return Ok(Some(child));
481 }
481 }
482 } else {
482 } else {
483 return Ok(None);
483 return Ok(None);
484 }
484 }
485 }
485 }
486 }
486 }
487
487
488 /// Returns a mutable reference to the node at `path` if it exists
488 /// Returns a mutable reference to the node at `path` if it exists
489 ///
489 ///
490 /// This takes `root` instead of `&mut self` so that callers can mutate
490 /// This takes `root` instead of `&mut self` so that callers can mutate
491 /// other fields while the returned borrow is still valid
491 /// other fields while the returned borrow is still valid
492 fn get_node_mut<'tree>(
492 fn get_node_mut<'tree>(
493 on_disk: &'on_disk [u8],
493 on_disk: &'on_disk [u8],
494 root: &'tree mut ChildNodes<'on_disk>,
494 root: &'tree mut ChildNodes<'on_disk>,
495 path: &HgPath,
495 path: &HgPath,
496 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
496 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
497 let mut children = root;
497 let mut children = root;
498 let mut components = path.components();
498 let mut components = path.components();
499 let mut component =
499 let mut component =
500 components.next().expect("expected at least one components");
500 components.next().expect("expected at least one components");
501 loop {
501 loop {
502 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
502 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
503 {
503 {
504 if let Some(next_component) = components.next() {
504 if let Some(next_component) = components.next() {
505 component = next_component;
505 component = next_component;
506 children = &mut child.children;
506 children = &mut child.children;
507 } else {
507 } else {
508 return Ok(Some(child));
508 return Ok(Some(child));
509 }
509 }
510 } else {
510 } else {
511 return Ok(None);
511 return Ok(None);
512 }
512 }
513 }
513 }
514 }
514 }
515
515
516 pub(super) fn get_or_insert<'tree, 'path>(
516 pub(super) fn get_or_insert<'tree, 'path>(
517 &'tree mut self,
517 &'tree mut self,
518 path: &HgPath,
518 path: &HgPath,
519 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
519 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
520 Self::get_or_insert_node(
520 Self::get_or_insert_node(
521 self.on_disk,
521 self.on_disk,
522 &mut self.root,
522 &mut self.root,
523 path,
523 path,
524 WithBasename::to_cow_owned,
524 WithBasename::to_cow_owned,
525 |_| {},
525 |_| {},
526 )
526 )
527 }
527 }
528
528
529 pub(super) fn get_or_insert_node<'tree, 'path>(
529 pub(super) fn get_or_insert_node<'tree, 'path>(
530 on_disk: &'on_disk [u8],
530 on_disk: &'on_disk [u8],
531 root: &'tree mut ChildNodes<'on_disk>,
531 root: &'tree mut ChildNodes<'on_disk>,
532 path: &'path HgPath,
532 path: &'path HgPath,
533 to_cow: impl Fn(
533 to_cow: impl Fn(
534 WithBasename<&'path HgPath>,
534 WithBasename<&'path HgPath>,
535 ) -> WithBasename<Cow<'on_disk, HgPath>>,
535 ) -> WithBasename<Cow<'on_disk, HgPath>>,
536 mut each_ancestor: impl FnMut(&mut Node),
536 mut each_ancestor: impl FnMut(&mut Node),
537 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
537 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
538 let mut child_nodes = root;
538 let mut child_nodes = root;
539 let mut inclusive_ancestor_paths =
539 let mut inclusive_ancestor_paths =
540 WithBasename::inclusive_ancestors_of(path);
540 WithBasename::inclusive_ancestors_of(path);
541 let mut ancestor_path = inclusive_ancestor_paths
541 let mut ancestor_path = inclusive_ancestor_paths
542 .next()
542 .next()
543 .expect("expected at least one inclusive ancestor");
543 .expect("expected at least one inclusive ancestor");
544 loop {
544 loop {
545 // TODO: can we avoid allocating an owned key in cases where the
545 // TODO: can we avoid allocating an owned key in cases where the
546 // map already contains that key, without introducing double
546 // map already contains that key, without introducing double
547 // lookup?
547 // lookup?
548 let child_node = child_nodes
548 let child_node = child_nodes
549 .make_mut(on_disk)?
549 .make_mut(on_disk)?
550 .entry(to_cow(ancestor_path))
550 .entry(to_cow(ancestor_path))
551 .or_default();
551 .or_default();
552 if let Some(next) = inclusive_ancestor_paths.next() {
552 if let Some(next) = inclusive_ancestor_paths.next() {
553 each_ancestor(child_node);
553 each_ancestor(child_node);
554 ancestor_path = next;
554 ancestor_path = next;
555 child_nodes = &mut child_node.children;
555 child_nodes = &mut child_node.children;
556 } else {
556 } else {
557 return Ok(child_node);
557 return Ok(child_node);
558 }
558 }
559 }
559 }
560 }
560 }
561
561
562 fn add_or_remove_file(
562 fn add_or_remove_file(
563 &mut self,
563 &mut self,
564 path: &HgPath,
564 path: &HgPath,
565 old_state: EntryState,
565 old_state: EntryState,
566 new_entry: DirstateEntry,
566 new_entry: DirstateEntry,
567 ) -> Result<(), DirstateV2ParseError> {
567 ) -> Result<(), DirstateV2ParseError> {
568 let had_entry = old_state != EntryState::Unknown;
568 let had_entry = old_state != EntryState::Unknown;
569 let tracked_count_increment =
569 let tracked_count_increment =
570 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
570 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
571 (false, true) => 1,
571 (false, true) => 1,
572 (true, false) => -1,
572 (true, false) => -1,
573 _ => 0,
573 _ => 0,
574 };
574 };
575
575
576 let node = Self::get_or_insert_node(
576 let node = Self::get_or_insert_node(
577 self.on_disk,
577 self.on_disk,
578 &mut self.root,
578 &mut self.root,
579 path,
579 path,
580 WithBasename::to_cow_owned,
580 WithBasename::to_cow_owned,
581 |ancestor| {
581 |ancestor| {
582 if !had_entry {
582 if !had_entry {
583 ancestor.descendants_with_entry_count += 1;
583 ancestor.descendants_with_entry_count += 1;
584 }
584 }
585
585
586 // We can’t use `+= increment` because the counter is unsigned,
586 // We can’t use `+= increment` because the counter is unsigned,
587 // and we want debug builds to detect accidental underflow
587 // and we want debug builds to detect accidental underflow
588 // through zero
588 // through zero
589 match tracked_count_increment {
589 match tracked_count_increment {
590 1 => ancestor.tracked_descendants_count += 1,
590 1 => ancestor.tracked_descendants_count += 1,
591 -1 => ancestor.tracked_descendants_count -= 1,
591 -1 => ancestor.tracked_descendants_count -= 1,
592 _ => {}
592 _ => {}
593 }
593 }
594 },
594 },
595 )?;
595 )?;
596 if !had_entry {
596 if !had_entry {
597 self.nodes_with_entry_count += 1
597 self.nodes_with_entry_count += 1
598 }
598 }
599 node.data = NodeData::Entry(new_entry);
599 node.data = NodeData::Entry(new_entry);
600 Ok(())
600 Ok(())
601 }
601 }
602
602
603 fn iter_nodes<'tree>(
603 fn iter_nodes<'tree>(
604 &'tree self,
604 &'tree self,
605 ) -> impl Iterator<
605 ) -> impl Iterator<
606 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
606 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
607 > + 'tree {
607 > + 'tree {
608 // Depth first tree traversal.
608 // Depth first tree traversal.
609 //
609 //
610 // If we could afford internal iteration and recursion,
610 // If we could afford internal iteration and recursion,
611 // this would look like:
611 // this would look like:
612 //
612 //
613 // ```
613 // ```
614 // fn traverse_children(
614 // fn traverse_children(
615 // children: &ChildNodes,
615 // children: &ChildNodes,
616 // each: &mut impl FnMut(&Node),
616 // each: &mut impl FnMut(&Node),
617 // ) {
617 // ) {
618 // for child in children.values() {
618 // for child in children.values() {
619 // traverse_children(&child.children, each);
619 // traverse_children(&child.children, each);
620 // each(child);
620 // each(child);
621 // }
621 // }
622 // }
622 // }
623 // ```
623 // ```
624 //
624 //
625 // However we want an external iterator and therefore can’t use the
625 // However we want an external iterator and therefore can’t use the
626 // call stack. Use an explicit stack instead:
626 // call stack. Use an explicit stack instead:
627 let mut stack = Vec::new();
627 let mut stack = Vec::new();
628 let mut iter = self.root.as_ref().iter();
628 let mut iter = self.root.as_ref().iter();
629 std::iter::from_fn(move || {
629 std::iter::from_fn(move || {
630 while let Some(child_node) = iter.next() {
630 while let Some(child_node) = iter.next() {
631 let children = match child_node.children(self.on_disk) {
631 let children = match child_node.children(self.on_disk) {
632 Ok(children) => children,
632 Ok(children) => children,
633 Err(error) => return Some(Err(error)),
633 Err(error) => return Some(Err(error)),
634 };
634 };
635 // Pseudo-recursion
635 // Pseudo-recursion
636 let new_iter = children.iter();
636 let new_iter = children.iter();
637 let old_iter = std::mem::replace(&mut iter, new_iter);
637 let old_iter = std::mem::replace(&mut iter, new_iter);
638 stack.push((child_node, old_iter));
638 stack.push((child_node, old_iter));
639 }
639 }
640 // Found the end of a `children.iter()` iterator.
640 // Found the end of a `children.iter()` iterator.
641 if let Some((child_node, next_iter)) = stack.pop() {
641 if let Some((child_node, next_iter)) = stack.pop() {
642 // "Return" from pseudo-recursion by restoring state from the
642 // "Return" from pseudo-recursion by restoring state from the
643 // explicit stack
643 // explicit stack
644 iter = next_iter;
644 iter = next_iter;
645
645
646 Some(Ok(child_node))
646 Some(Ok(child_node))
647 } else {
647 } else {
648 // Reached the bottom of the stack, we’re done
648 // Reached the bottom of the stack, we’re done
649 None
649 None
650 }
650 }
651 })
651 })
652 }
652 }
653
653
654 fn clear_known_ambiguous_mtimes(
654 fn clear_known_ambiguous_mtimes(
655 &mut self,
655 &mut self,
656 paths: &[impl AsRef<HgPath>],
656 paths: &[impl AsRef<HgPath>],
657 ) -> Result<(), DirstateV2ParseError> {
657 ) -> Result<(), DirstateV2ParseError> {
658 for path in paths {
658 for path in paths {
659 if let Some(node) = Self::get_node_mut(
659 if let Some(node) = Self::get_node_mut(
660 self.on_disk,
660 self.on_disk,
661 &mut self.root,
661 &mut self.root,
662 path.as_ref(),
662 path.as_ref(),
663 )? {
663 )? {
664 if let NodeData::Entry(entry) = &mut node.data {
664 if let NodeData::Entry(entry) = &mut node.data {
665 entry.clear_mtime();
665 entry.clear_mtime();
666 }
666 }
667 }
667 }
668 }
668 }
669 Ok(())
669 Ok(())
670 }
670 }
671
671
672 /// Return a faillilble iterator of full paths of nodes that have an
672 /// Return a faillilble iterator of full paths of nodes that have an
673 /// `entry` for which the given `predicate` returns true.
673 /// `entry` for which the given `predicate` returns true.
674 ///
674 ///
675 /// Fallibility means that each iterator item is a `Result`, which may
675 /// Fallibility means that each iterator item is a `Result`, which may
676 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
676 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
677 /// should only happen if Mercurial is buggy or a repository is corrupted.
677 /// should only happen if Mercurial is buggy or a repository is corrupted.
678 fn filter_full_paths<'tree>(
678 fn filter_full_paths<'tree>(
679 &'tree self,
679 &'tree self,
680 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
680 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
681 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
681 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
682 {
682 {
683 filter_map_results(self.iter_nodes(), move |node| {
683 filter_map_results(self.iter_nodes(), move |node| {
684 if let Some(entry) = node.entry()? {
684 if let Some(entry) = node.entry()? {
685 if predicate(&entry) {
685 if predicate(&entry) {
686 return Ok(Some(node.full_path(self.on_disk)?));
686 return Ok(Some(node.full_path(self.on_disk)?));
687 }
687 }
688 }
688 }
689 Ok(None)
689 Ok(None)
690 })
690 })
691 }
691 }
692 }
692 }
693
693
694 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
694 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
695 ///
695 ///
696 /// The callback is only called for incoming `Ok` values. Errors are passed
696 /// The callback is only called for incoming `Ok` values. Errors are passed
697 /// through as-is. In order to let it use the `?` operator the callback is
697 /// through as-is. In order to let it use the `?` operator the callback is
698 /// expected to return a `Result` of `Option`, instead of an `Option` of
698 /// expected to return a `Result` of `Option`, instead of an `Option` of
699 /// `Result`.
699 /// `Result`.
700 fn filter_map_results<'a, I, F, A, B, E>(
700 fn filter_map_results<'a, I, F, A, B, E>(
701 iter: I,
701 iter: I,
702 f: F,
702 f: F,
703 ) -> impl Iterator<Item = Result<B, E>> + 'a
703 ) -> impl Iterator<Item = Result<B, E>> + 'a
704 where
704 where
705 I: Iterator<Item = Result<A, E>> + 'a,
705 I: Iterator<Item = Result<A, E>> + 'a,
706 F: Fn(A) -> Result<Option<B>, E> + 'a,
706 F: Fn(A) -> Result<Option<B>, E> + 'a,
707 {
707 {
708 iter.filter_map(move |result| match result {
708 iter.filter_map(move |result| match result {
709 Ok(node) => f(node).transpose(),
709 Ok(node) => f(node).transpose(),
710 Err(e) => Some(Err(e)),
710 Err(e) => Some(Err(e)),
711 })
711 })
712 }
712 }
713
713
714 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
714 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
715 fn clear(&mut self) {
715 fn clear(&mut self) {
716 self.root = Default::default();
716 self.root = Default::default();
717 self.nodes_with_entry_count = 0;
717 self.nodes_with_entry_count = 0;
718 self.nodes_with_copy_source_count = 0;
718 self.nodes_with_copy_source_count = 0;
719 }
719 }
720
720
721 fn add_file(
721 fn add_file(
722 &mut self,
722 &mut self,
723 filename: &HgPath,
723 filename: &HgPath,
724 entry: DirstateEntry,
724 entry: DirstateEntry,
725 added: bool,
725 from_p2: bool,
726 from_p2: bool,
726 possibly_dirty: bool,
727 possibly_dirty: bool,
727 ) -> Result<(), DirstateError> {
728 ) -> Result<(), DirstateError> {
728 let mut entry = entry;
729 let mut entry = entry;
729 if entry.state == EntryState::Added {
730 if added {
730 assert!(!possibly_dirty);
731 assert!(!possibly_dirty);
731 assert!(!from_p2);
732 assert!(!from_p2);
733 entry.state = EntryState::Added;
732 entry.size = SIZE_NON_NORMAL;
734 entry.size = SIZE_NON_NORMAL;
733 entry.mtime = MTIME_UNSET;
735 entry.mtime = MTIME_UNSET;
734 } else if from_p2 {
736 } else if from_p2 {
735 assert!(!possibly_dirty);
737 assert!(!possibly_dirty);
736 entry.size = SIZE_FROM_OTHER_PARENT;
738 entry.size = SIZE_FROM_OTHER_PARENT;
737 entry.mtime = MTIME_UNSET;
739 entry.mtime = MTIME_UNSET;
738 } else if possibly_dirty {
740 } else if possibly_dirty {
739 entry.size = SIZE_NON_NORMAL;
741 entry.size = SIZE_NON_NORMAL;
740 entry.mtime = MTIME_UNSET;
742 entry.mtime = MTIME_UNSET;
741 } else {
743 } else {
742 entry.size = entry.size & V1_RANGEMASK;
744 entry.size = entry.size & V1_RANGEMASK;
743 entry.mtime = entry.mtime & V1_RANGEMASK;
745 entry.mtime = entry.mtime & V1_RANGEMASK;
744 }
746 }
745
747
746 let old_state = match self.get(filename)? {
748 let old_state = match self.get(filename)? {
747 Some(e) => e.state,
749 Some(e) => e.state,
748 None => EntryState::Unknown,
750 None => EntryState::Unknown,
749 };
751 };
750
752
751 Ok(self.add_or_remove_file(filename, old_state, entry)?)
753 Ok(self.add_or_remove_file(filename, old_state, entry)?)
752 }
754 }
753
755
754 fn remove_file(
756 fn remove_file(
755 &mut self,
757 &mut self,
756 filename: &HgPath,
758 filename: &HgPath,
757 in_merge: bool,
759 in_merge: bool,
758 ) -> Result<(), DirstateError> {
760 ) -> Result<(), DirstateError> {
759 let old_entry_opt = self.get(filename)?;
761 let old_entry_opt = self.get(filename)?;
760 let old_state = match old_entry_opt {
762 let old_state = match old_entry_opt {
761 Some(e) => e.state,
763 Some(e) => e.state,
762 None => EntryState::Unknown,
764 None => EntryState::Unknown,
763 };
765 };
764 let mut size = 0;
766 let mut size = 0;
765 if in_merge {
767 if in_merge {
766 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
768 // XXX we should not be able to have 'm' state and 'FROM_P2' if not
767 // during a merge. So I (marmoute) am not sure we need the
769 // during a merge. So I (marmoute) am not sure we need the
768 // conditionnal at all. Adding double checking this with assert
770 // conditionnal at all. Adding double checking this with assert
769 // would be nice.
771 // would be nice.
770 if let Some(old_entry) = old_entry_opt {
772 if let Some(old_entry) = old_entry_opt {
771 // backup the previous state
773 // backup the previous state
772 if old_entry.state == EntryState::Merged {
774 if old_entry.state == EntryState::Merged {
773 size = SIZE_NON_NORMAL;
775 size = SIZE_NON_NORMAL;
774 } else if old_entry.state == EntryState::Normal
776 } else if old_entry.state == EntryState::Normal
775 && old_entry.size == SIZE_FROM_OTHER_PARENT
777 && old_entry.size == SIZE_FROM_OTHER_PARENT
776 {
778 {
777 // other parent
779 // other parent
778 size = SIZE_FROM_OTHER_PARENT;
780 size = SIZE_FROM_OTHER_PARENT;
779 }
781 }
780 }
782 }
781 }
783 }
782 if size == 0 {
784 if size == 0 {
783 self.copy_map_remove(filename)?;
785 self.copy_map_remove(filename)?;
784 }
786 }
785 let entry = DirstateEntry {
787 let entry = DirstateEntry {
786 state: EntryState::Removed,
788 state: EntryState::Removed,
787 mode: 0,
789 mode: 0,
788 size,
790 size,
789 mtime: 0,
791 mtime: 0,
790 };
792 };
791 Ok(self.add_or_remove_file(filename, old_state, entry)?)
793 Ok(self.add_or_remove_file(filename, old_state, entry)?)
792 }
794 }
793
795
794 fn drop_file(
796 fn drop_file(
795 &mut self,
797 &mut self,
796 filename: &HgPath,
798 filename: &HgPath,
797 old_state: EntryState,
799 old_state: EntryState,
798 ) -> Result<bool, DirstateError> {
800 ) -> Result<bool, DirstateError> {
799 struct Dropped {
801 struct Dropped {
800 was_tracked: bool,
802 was_tracked: bool,
801 had_entry: bool,
803 had_entry: bool,
802 had_copy_source: bool,
804 had_copy_source: bool,
803 }
805 }
804
806
805 /// If this returns `Ok(Some((dropped, removed)))`, then
807 /// If this returns `Ok(Some((dropped, removed)))`, then
806 ///
808 ///
807 /// * `dropped` is about the leaf node that was at `filename`
809 /// * `dropped` is about the leaf node that was at `filename`
808 /// * `removed` is whether this particular level of recursion just
810 /// * `removed` is whether this particular level of recursion just
809 /// removed a node in `nodes`.
811 /// removed a node in `nodes`.
810 fn recur<'on_disk>(
812 fn recur<'on_disk>(
811 on_disk: &'on_disk [u8],
813 on_disk: &'on_disk [u8],
812 nodes: &mut ChildNodes<'on_disk>,
814 nodes: &mut ChildNodes<'on_disk>,
813 path: &HgPath,
815 path: &HgPath,
814 ) -> Result<Option<(Dropped, bool)>, DirstateV2ParseError> {
816 ) -> Result<Option<(Dropped, bool)>, DirstateV2ParseError> {
815 let (first_path_component, rest_of_path) =
817 let (first_path_component, rest_of_path) =
816 path.split_first_component();
818 path.split_first_component();
817 let node = if let Some(node) =
819 let node = if let Some(node) =
818 nodes.make_mut(on_disk)?.get_mut(first_path_component)
820 nodes.make_mut(on_disk)?.get_mut(first_path_component)
819 {
821 {
820 node
822 node
821 } else {
823 } else {
822 return Ok(None);
824 return Ok(None);
823 };
825 };
824 let dropped;
826 let dropped;
825 if let Some(rest) = rest_of_path {
827 if let Some(rest) = rest_of_path {
826 if let Some((d, removed)) =
828 if let Some((d, removed)) =
827 recur(on_disk, &mut node.children, rest)?
829 recur(on_disk, &mut node.children, rest)?
828 {
830 {
829 dropped = d;
831 dropped = d;
830 if dropped.had_entry {
832 if dropped.had_entry {
831 node.descendants_with_entry_count -= 1;
833 node.descendants_with_entry_count -= 1;
832 }
834 }
833 if dropped.was_tracked {
835 if dropped.was_tracked {
834 node.tracked_descendants_count -= 1;
836 node.tracked_descendants_count -= 1;
835 }
837 }
836
838
837 // Directory caches must be invalidated when removing a
839 // Directory caches must be invalidated when removing a
838 // child node
840 // child node
839 if removed {
841 if removed {
840 if let NodeData::CachedDirectory { .. } = &node.data {
842 if let NodeData::CachedDirectory { .. } = &node.data {
841 node.data = NodeData::None
843 node.data = NodeData::None
842 }
844 }
843 }
845 }
844 } else {
846 } else {
845 return Ok(None);
847 return Ok(None);
846 }
848 }
847 } else {
849 } else {
848 let had_entry = node.data.has_entry();
850 let had_entry = node.data.has_entry();
849 if had_entry {
851 if had_entry {
850 node.data = NodeData::None
852 node.data = NodeData::None
851 }
853 }
852 dropped = Dropped {
854 dropped = Dropped {
853 was_tracked: node
855 was_tracked: node
854 .data
856 .data
855 .as_entry()
857 .as_entry()
856 .map_or(false, |entry| entry.state.is_tracked()),
858 .map_or(false, |entry| entry.state.is_tracked()),
857 had_entry,
859 had_entry,
858 had_copy_source: node.copy_source.take().is_some(),
860 had_copy_source: node.copy_source.take().is_some(),
859 };
861 };
860 }
862 }
861 // After recursion, for both leaf (rest_of_path is None) nodes and
863 // After recursion, for both leaf (rest_of_path is None) nodes and
862 // parent nodes, remove a node if it just became empty.
864 // parent nodes, remove a node if it just became empty.
863 let remove = !node.data.has_entry()
865 let remove = !node.data.has_entry()
864 && node.copy_source.is_none()
866 && node.copy_source.is_none()
865 && node.children.is_empty();
867 && node.children.is_empty();
866 if remove {
868 if remove {
867 nodes.make_mut(on_disk)?.remove(first_path_component);
869 nodes.make_mut(on_disk)?.remove(first_path_component);
868 }
870 }
869 Ok(Some((dropped, remove)))
871 Ok(Some((dropped, remove)))
870 }
872 }
871
873
872 if let Some((dropped, _removed)) =
874 if let Some((dropped, _removed)) =
873 recur(self.on_disk, &mut self.root, filename)?
875 recur(self.on_disk, &mut self.root, filename)?
874 {
876 {
875 if dropped.had_entry {
877 if dropped.had_entry {
876 self.nodes_with_entry_count -= 1
878 self.nodes_with_entry_count -= 1
877 }
879 }
878 if dropped.had_copy_source {
880 if dropped.had_copy_source {
879 self.nodes_with_copy_source_count -= 1
881 self.nodes_with_copy_source_count -= 1
880 }
882 }
881 Ok(dropped.had_entry)
883 Ok(dropped.had_entry)
882 } else {
884 } else {
883 debug_assert!(!old_state.is_tracked());
885 debug_assert!(!old_state.is_tracked());
884 Ok(false)
886 Ok(false)
885 }
887 }
886 }
888 }
887
889
888 fn clear_ambiguous_times(
890 fn clear_ambiguous_times(
889 &mut self,
891 &mut self,
890 filenames: Vec<HgPathBuf>,
892 filenames: Vec<HgPathBuf>,
891 now: i32,
893 now: i32,
892 ) -> Result<(), DirstateV2ParseError> {
894 ) -> Result<(), DirstateV2ParseError> {
893 for filename in filenames {
895 for filename in filenames {
894 if let Some(node) =
896 if let Some(node) =
895 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
897 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
896 {
898 {
897 if let NodeData::Entry(entry) = &mut node.data {
899 if let NodeData::Entry(entry) = &mut node.data {
898 entry.clear_ambiguous_mtime(now);
900 entry.clear_ambiguous_mtime(now);
899 }
901 }
900 }
902 }
901 }
903 }
902 Ok(())
904 Ok(())
903 }
905 }
904
906
905 fn non_normal_entries_contains(
907 fn non_normal_entries_contains(
906 &mut self,
908 &mut self,
907 key: &HgPath,
909 key: &HgPath,
908 ) -> Result<bool, DirstateV2ParseError> {
910 ) -> Result<bool, DirstateV2ParseError> {
909 Ok(if let Some(node) = self.get_node(key)? {
911 Ok(if let Some(node) = self.get_node(key)? {
910 node.entry()?.map_or(false, |entry| entry.is_non_normal())
912 node.entry()?.map_or(false, |entry| entry.is_non_normal())
911 } else {
913 } else {
912 false
914 false
913 })
915 })
914 }
916 }
915
917
916 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
918 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
917 // Do nothing, this `DirstateMap` does not have a separate "non normal
919 // Do nothing, this `DirstateMap` does not have a separate "non normal
918 // entries" set that need to be kept up to date
920 // entries" set that need to be kept up to date
919 }
921 }
920
922
921 fn non_normal_or_other_parent_paths(
923 fn non_normal_or_other_parent_paths(
922 &mut self,
924 &mut self,
923 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
925 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
924 {
926 {
925 Box::new(self.filter_full_paths(|entry| {
927 Box::new(self.filter_full_paths(|entry| {
926 entry.is_non_normal() || entry.is_from_other_parent()
928 entry.is_non_normal() || entry.is_from_other_parent()
927 }))
929 }))
928 }
930 }
929
931
930 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
932 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
931 // Do nothing, this `DirstateMap` does not have a separate "non normal
933 // Do nothing, this `DirstateMap` does not have a separate "non normal
932 // entries" and "from other parent" sets that need to be recomputed
934 // entries" and "from other parent" sets that need to be recomputed
933 }
935 }
934
936
935 fn iter_non_normal_paths(
937 fn iter_non_normal_paths(
936 &mut self,
938 &mut self,
937 ) -> Box<
939 ) -> Box<
938 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
940 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
939 > {
941 > {
940 self.iter_non_normal_paths_panic()
942 self.iter_non_normal_paths_panic()
941 }
943 }
942
944
943 fn iter_non_normal_paths_panic(
945 fn iter_non_normal_paths_panic(
944 &self,
946 &self,
945 ) -> Box<
947 ) -> Box<
946 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
948 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
947 > {
949 > {
948 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
950 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
949 }
951 }
950
952
951 fn iter_other_parent_paths(
953 fn iter_other_parent_paths(
952 &mut self,
954 &mut self,
953 ) -> Box<
955 ) -> Box<
954 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
956 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
955 > {
957 > {
956 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
958 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
957 }
959 }
958
960
959 fn has_tracked_dir(
961 fn has_tracked_dir(
960 &mut self,
962 &mut self,
961 directory: &HgPath,
963 directory: &HgPath,
962 ) -> Result<bool, DirstateError> {
964 ) -> Result<bool, DirstateError> {
963 if let Some(node) = self.get_node(directory)? {
965 if let Some(node) = self.get_node(directory)? {
964 // A node without a `DirstateEntry` was created to hold child
966 // A node without a `DirstateEntry` was created to hold child
965 // nodes, and is therefore a directory.
967 // nodes, and is therefore a directory.
966 let state = node.state()?;
968 let state = node.state()?;
967 Ok(state.is_none() && node.tracked_descendants_count() > 0)
969 Ok(state.is_none() && node.tracked_descendants_count() > 0)
968 } else {
970 } else {
969 Ok(false)
971 Ok(false)
970 }
972 }
971 }
973 }
972
974
973 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
975 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
974 if let Some(node) = self.get_node(directory)? {
976 if let Some(node) = self.get_node(directory)? {
975 // A node without a `DirstateEntry` was created to hold child
977 // A node without a `DirstateEntry` was created to hold child
976 // nodes, and is therefore a directory.
978 // nodes, and is therefore a directory.
977 let state = node.state()?;
979 let state = node.state()?;
978 Ok(state.is_none() && node.descendants_with_entry_count() > 0)
980 Ok(state.is_none() && node.descendants_with_entry_count() > 0)
979 } else {
981 } else {
980 Ok(false)
982 Ok(false)
981 }
983 }
982 }
984 }
983
985
984 #[timed]
986 #[timed]
985 fn pack_v1(
987 fn pack_v1(
986 &mut self,
988 &mut self,
987 parents: DirstateParents,
989 parents: DirstateParents,
988 now: Timestamp,
990 now: Timestamp,
989 ) -> Result<Vec<u8>, DirstateError> {
991 ) -> Result<Vec<u8>, DirstateError> {
990 let now: i32 = now.0.try_into().expect("time overflow");
992 let now: i32 = now.0.try_into().expect("time overflow");
991 let mut ambiguous_mtimes = Vec::new();
993 let mut ambiguous_mtimes = Vec::new();
992 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
994 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
993 // reallocations
995 // reallocations
994 let mut size = parents.as_bytes().len();
996 let mut size = parents.as_bytes().len();
995 for node in self.iter_nodes() {
997 for node in self.iter_nodes() {
996 let node = node?;
998 let node = node?;
997 if let Some(entry) = node.entry()? {
999 if let Some(entry) = node.entry()? {
998 size += packed_entry_size(
1000 size += packed_entry_size(
999 node.full_path(self.on_disk)?,
1001 node.full_path(self.on_disk)?,
1000 node.copy_source(self.on_disk)?,
1002 node.copy_source(self.on_disk)?,
1001 );
1003 );
1002 if entry.mtime_is_ambiguous(now) {
1004 if entry.mtime_is_ambiguous(now) {
1003 ambiguous_mtimes.push(
1005 ambiguous_mtimes.push(
1004 node.full_path_borrowed(self.on_disk)?
1006 node.full_path_borrowed(self.on_disk)?
1005 .detach_from_tree(),
1007 .detach_from_tree(),
1006 )
1008 )
1007 }
1009 }
1008 }
1010 }
1009 }
1011 }
1010 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
1012 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
1011
1013
1012 let mut packed = Vec::with_capacity(size);
1014 let mut packed = Vec::with_capacity(size);
1013 packed.extend(parents.as_bytes());
1015 packed.extend(parents.as_bytes());
1014
1016
1015 for node in self.iter_nodes() {
1017 for node in self.iter_nodes() {
1016 let node = node?;
1018 let node = node?;
1017 if let Some(entry) = node.entry()? {
1019 if let Some(entry) = node.entry()? {
1018 pack_entry(
1020 pack_entry(
1019 node.full_path(self.on_disk)?,
1021 node.full_path(self.on_disk)?,
1020 &entry,
1022 &entry,
1021 node.copy_source(self.on_disk)?,
1023 node.copy_source(self.on_disk)?,
1022 &mut packed,
1024 &mut packed,
1023 );
1025 );
1024 }
1026 }
1025 }
1027 }
1026 Ok(packed)
1028 Ok(packed)
1027 }
1029 }
1028
1030
1029 #[timed]
1031 #[timed]
1030 fn pack_v2(
1032 fn pack_v2(
1031 &mut self,
1033 &mut self,
1032 parents: DirstateParents,
1034 parents: DirstateParents,
1033 now: Timestamp,
1035 now: Timestamp,
1034 ) -> Result<Vec<u8>, DirstateError> {
1036 ) -> Result<Vec<u8>, DirstateError> {
1035 // TODO:Β how do we want to handle this in 2038?
1037 // TODO:Β how do we want to handle this in 2038?
1036 let now: i32 = now.0.try_into().expect("time overflow");
1038 let now: i32 = now.0.try_into().expect("time overflow");
1037 let mut paths = Vec::new();
1039 let mut paths = Vec::new();
1038 for node in self.iter_nodes() {
1040 for node in self.iter_nodes() {
1039 let node = node?;
1041 let node = node?;
1040 if let Some(entry) = node.entry()? {
1042 if let Some(entry) = node.entry()? {
1041 if entry.mtime_is_ambiguous(now) {
1043 if entry.mtime_is_ambiguous(now) {
1042 paths.push(
1044 paths.push(
1043 node.full_path_borrowed(self.on_disk)?
1045 node.full_path_borrowed(self.on_disk)?
1044 .detach_from_tree(),
1046 .detach_from_tree(),
1045 )
1047 )
1046 }
1048 }
1047 }
1049 }
1048 }
1050 }
1049 // Borrow of `self` ends here since we collect cloned paths
1051 // Borrow of `self` ends here since we collect cloned paths
1050
1052
1051 self.clear_known_ambiguous_mtimes(&paths)?;
1053 self.clear_known_ambiguous_mtimes(&paths)?;
1052
1054
1053 on_disk::write(self, parents)
1055 on_disk::write(self, parents)
1054 }
1056 }
1055
1057
1056 fn status<'a>(
1058 fn status<'a>(
1057 &'a mut self,
1059 &'a mut self,
1058 matcher: &'a (dyn Matcher + Sync),
1060 matcher: &'a (dyn Matcher + Sync),
1059 root_dir: PathBuf,
1061 root_dir: PathBuf,
1060 ignore_files: Vec<PathBuf>,
1062 ignore_files: Vec<PathBuf>,
1061 options: StatusOptions,
1063 options: StatusOptions,
1062 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
1064 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
1063 {
1065 {
1064 super::status::status(self, matcher, root_dir, ignore_files, options)
1066 super::status::status(self, matcher, root_dir, ignore_files, options)
1065 }
1067 }
1066
1068
1067 fn copy_map_len(&self) -> usize {
1069 fn copy_map_len(&self) -> usize {
1068 self.nodes_with_copy_source_count as usize
1070 self.nodes_with_copy_source_count as usize
1069 }
1071 }
1070
1072
1071 fn copy_map_iter(&self) -> CopyMapIter<'_> {
1073 fn copy_map_iter(&self) -> CopyMapIter<'_> {
1072 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1074 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1073 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
1075 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
1074 Some((node.full_path(self.on_disk)?, source))
1076 Some((node.full_path(self.on_disk)?, source))
1075 } else {
1077 } else {
1076 None
1078 None
1077 })
1079 })
1078 }))
1080 }))
1079 }
1081 }
1080
1082
1081 fn copy_map_contains_key(
1083 fn copy_map_contains_key(
1082 &self,
1084 &self,
1083 key: &HgPath,
1085 key: &HgPath,
1084 ) -> Result<bool, DirstateV2ParseError> {
1086 ) -> Result<bool, DirstateV2ParseError> {
1085 Ok(if let Some(node) = self.get_node(key)? {
1087 Ok(if let Some(node) = self.get_node(key)? {
1086 node.has_copy_source()
1088 node.has_copy_source()
1087 } else {
1089 } else {
1088 false
1090 false
1089 })
1091 })
1090 }
1092 }
1091
1093
1092 fn copy_map_get(
1094 fn copy_map_get(
1093 &self,
1095 &self,
1094 key: &HgPath,
1096 key: &HgPath,
1095 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
1097 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
1096 if let Some(node) = self.get_node(key)? {
1098 if let Some(node) = self.get_node(key)? {
1097 if let Some(source) = node.copy_source(self.on_disk)? {
1099 if let Some(source) = node.copy_source(self.on_disk)? {
1098 return Ok(Some(source));
1100 return Ok(Some(source));
1099 }
1101 }
1100 }
1102 }
1101 Ok(None)
1103 Ok(None)
1102 }
1104 }
1103
1105
1104 fn copy_map_remove(
1106 fn copy_map_remove(
1105 &mut self,
1107 &mut self,
1106 key: &HgPath,
1108 key: &HgPath,
1107 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1109 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1108 let count = &mut self.nodes_with_copy_source_count;
1110 let count = &mut self.nodes_with_copy_source_count;
1109 Ok(
1111 Ok(
1110 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1112 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1111 |node| {
1113 |node| {
1112 if node.copy_source.is_some() {
1114 if node.copy_source.is_some() {
1113 *count -= 1
1115 *count -= 1
1114 }
1116 }
1115 node.copy_source.take().map(Cow::into_owned)
1117 node.copy_source.take().map(Cow::into_owned)
1116 },
1118 },
1117 ),
1119 ),
1118 )
1120 )
1119 }
1121 }
1120
1122
1121 fn copy_map_insert(
1123 fn copy_map_insert(
1122 &mut self,
1124 &mut self,
1123 key: HgPathBuf,
1125 key: HgPathBuf,
1124 value: HgPathBuf,
1126 value: HgPathBuf,
1125 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1127 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1126 let node = Self::get_or_insert_node(
1128 let node = Self::get_or_insert_node(
1127 self.on_disk,
1129 self.on_disk,
1128 &mut self.root,
1130 &mut self.root,
1129 &key,
1131 &key,
1130 WithBasename::to_cow_owned,
1132 WithBasename::to_cow_owned,
1131 |_ancestor| {},
1133 |_ancestor| {},
1132 )?;
1134 )?;
1133 if node.copy_source.is_none() {
1135 if node.copy_source.is_none() {
1134 self.nodes_with_copy_source_count += 1
1136 self.nodes_with_copy_source_count += 1
1135 }
1137 }
1136 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1138 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1137 }
1139 }
1138
1140
1139 fn len(&self) -> usize {
1141 fn len(&self) -> usize {
1140 self.nodes_with_entry_count as usize
1142 self.nodes_with_entry_count as usize
1141 }
1143 }
1142
1144
1143 fn contains_key(
1145 fn contains_key(
1144 &self,
1146 &self,
1145 key: &HgPath,
1147 key: &HgPath,
1146 ) -> Result<bool, DirstateV2ParseError> {
1148 ) -> Result<bool, DirstateV2ParseError> {
1147 Ok(self.get(key)?.is_some())
1149 Ok(self.get(key)?.is_some())
1148 }
1150 }
1149
1151
1150 fn get(
1152 fn get(
1151 &self,
1153 &self,
1152 key: &HgPath,
1154 key: &HgPath,
1153 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1155 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1154 Ok(if let Some(node) = self.get_node(key)? {
1156 Ok(if let Some(node) = self.get_node(key)? {
1155 node.entry()?
1157 node.entry()?
1156 } else {
1158 } else {
1157 None
1159 None
1158 })
1160 })
1159 }
1161 }
1160
1162
1161 fn iter(&self) -> StateMapIter<'_> {
1163 fn iter(&self) -> StateMapIter<'_> {
1162 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1164 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1163 Ok(if let Some(entry) = node.entry()? {
1165 Ok(if let Some(entry) = node.entry()? {
1164 Some((node.full_path(self.on_disk)?, entry))
1166 Some((node.full_path(self.on_disk)?, entry))
1165 } else {
1167 } else {
1166 None
1168 None
1167 })
1169 })
1168 }))
1170 }))
1169 }
1171 }
1170
1172
1171 fn iter_directories(
1173 fn iter_directories(
1172 &self,
1174 &self,
1173 ) -> Box<
1175 ) -> Box<
1174 dyn Iterator<
1176 dyn Iterator<
1175 Item = Result<
1177 Item = Result<
1176 (&HgPath, Option<Timestamp>),
1178 (&HgPath, Option<Timestamp>),
1177 DirstateV2ParseError,
1179 DirstateV2ParseError,
1178 >,
1180 >,
1179 > + Send
1181 > + Send
1180 + '_,
1182 + '_,
1181 > {
1183 > {
1182 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1184 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1183 Ok(if node.state()?.is_none() {
1185 Ok(if node.state()?.is_none() {
1184 Some((
1186 Some((
1185 node.full_path(self.on_disk)?,
1187 node.full_path(self.on_disk)?,
1186 node.cached_directory_mtime()
1188 node.cached_directory_mtime()
1187 .map(|mtime| Timestamp(mtime.seconds())),
1189 .map(|mtime| Timestamp(mtime.seconds())),
1188 ))
1190 ))
1189 } else {
1191 } else {
1190 None
1192 None
1191 })
1193 })
1192 }))
1194 }))
1193 }
1195 }
1194 }
1196 }
@@ -1,492 +1,494 b''
1 use std::path::PathBuf;
1 use std::path::PathBuf;
2
2
3 use crate::dirstate::parsers::Timestamp;
3 use crate::dirstate::parsers::Timestamp;
4 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
4 use crate::dirstate_tree::on_disk::DirstateV2ParseError;
5 use crate::matchers::Matcher;
5 use crate::matchers::Matcher;
6 use crate::utils::hg_path::{HgPath, HgPathBuf};
6 use crate::utils::hg_path::{HgPath, HgPathBuf};
7 use crate::CopyMapIter;
7 use crate::CopyMapIter;
8 use crate::DirstateEntry;
8 use crate::DirstateEntry;
9 use crate::DirstateError;
9 use crate::DirstateError;
10 use crate::DirstateMap;
10 use crate::DirstateMap;
11 use crate::DirstateParents;
11 use crate::DirstateParents;
12 use crate::DirstateStatus;
12 use crate::DirstateStatus;
13 use crate::EntryState;
13 use crate::EntryState;
14 use crate::PatternFileWarning;
14 use crate::PatternFileWarning;
15 use crate::StateMapIter;
15 use crate::StateMapIter;
16 use crate::StatusError;
16 use crate::StatusError;
17 use crate::StatusOptions;
17 use crate::StatusOptions;
18
18
19 /// `rust/hg-cpython/src/dirstate/dirstate_map.rs` implements in Rust a
19 /// `rust/hg-cpython/src/dirstate/dirstate_map.rs` implements in Rust a
20 /// `DirstateMap` Python class that wraps `Box<dyn DirstateMapMethods + Send>`,
20 /// `DirstateMap` Python class that wraps `Box<dyn DirstateMapMethods + Send>`,
21 /// a trait object of this trait. Except for constructors, this trait defines
21 /// a trait object of this trait. Except for constructors, this trait defines
22 /// all APIs that the class needs to interact with its inner dirstate map.
22 /// all APIs that the class needs to interact with its inner dirstate map.
23 ///
23 ///
24 /// A trait object is used to support two different concrete types:
24 /// A trait object is used to support two different concrete types:
25 ///
25 ///
26 /// * `rust/hg-core/src/dirstate/dirstate_map.rs` defines the "flat dirstate
26 /// * `rust/hg-core/src/dirstate/dirstate_map.rs` defines the "flat dirstate
27 /// map" which is based on a few large `HgPath`-keyed `HashMap` and `HashSet`
27 /// map" which is based on a few large `HgPath`-keyed `HashMap` and `HashSet`
28 /// fields.
28 /// fields.
29 /// * `rust/hg-core/src/dirstate_tree/dirstate_map.rs` defines the "tree
29 /// * `rust/hg-core/src/dirstate_tree/dirstate_map.rs` defines the "tree
30 /// dirstate map" based on a tree data struture with nodes for directories
30 /// dirstate map" based on a tree data struture with nodes for directories
31 /// containing child nodes for their files and sub-directories. This tree
31 /// containing child nodes for their files and sub-directories. This tree
32 /// enables a more efficient algorithm for `hg status`, but its details are
32 /// enables a more efficient algorithm for `hg status`, but its details are
33 /// abstracted in this trait.
33 /// abstracted in this trait.
34 ///
34 ///
35 /// The dirstate map associates paths of files in the working directory to
35 /// The dirstate map associates paths of files in the working directory to
36 /// various information about the state of those files.
36 /// various information about the state of those files.
37 pub trait DirstateMapMethods {
37 pub trait DirstateMapMethods {
38 /// Remove information about all files in this map
38 /// Remove information about all files in this map
39 fn clear(&mut self);
39 fn clear(&mut self);
40
40
41 /// Add or change the information associated to a given file.
41 /// Add or change the information associated to a given file.
42 ///
42 ///
43 /// `old_state` is the state in the entry that `get` would have returned
43 /// `old_state` is the state in the entry that `get` would have returned
44 /// before this call, or `EntryState::Unknown` if there was no such entry.
44 /// before this call, or `EntryState::Unknown` if there was no such entry.
45 ///
45 ///
46 /// `entry.state` should never be `EntryState::Unknown`.
46 /// `entry.state` should never be `EntryState::Unknown`.
47 fn add_file(
47 fn add_file(
48 &mut self,
48 &mut self,
49 filename: &HgPath,
49 filename: &HgPath,
50 entry: DirstateEntry,
50 entry: DirstateEntry,
51 added: bool,
51 from_p2: bool,
52 from_p2: bool,
52 possibly_dirty: bool,
53 possibly_dirty: bool,
53 ) -> Result<(), DirstateError>;
54 ) -> Result<(), DirstateError>;
54
55
55 /// Mark a file as "removed" (as in `hg rm`).
56 /// Mark a file as "removed" (as in `hg rm`).
56 ///
57 ///
57 /// `old_state` is the state in the entry that `get` would have returned
58 /// `old_state` is the state in the entry that `get` would have returned
58 /// before this call, or `EntryState::Unknown` if there was no such entry.
59 /// before this call, or `EntryState::Unknown` if there was no such entry.
59 ///
60 ///
60 /// `size` is not actually a size but the 0 or -1 or -2 value that would be
61 /// `size` is not actually a size but the 0 or -1 or -2 value that would be
61 /// put in the size field in the dirstate-v1Β format.
62 /// put in the size field in the dirstate-v1Β format.
62 fn remove_file(
63 fn remove_file(
63 &mut self,
64 &mut self,
64 filename: &HgPath,
65 filename: &HgPath,
65 in_merge: bool,
66 in_merge: bool,
66 ) -> Result<(), DirstateError>;
67 ) -> Result<(), DirstateError>;
67
68
68 /// Drop information about this file from the map if any, and return
69 /// Drop information about this file from the map if any, and return
69 /// whether there was any.
70 /// whether there was any.
70 ///
71 ///
71 /// `get` will now return `None` for this filename.
72 /// `get` will now return `None` for this filename.
72 ///
73 ///
73 /// `old_state` is the state in the entry that `get` would have returned
74 /// `old_state` is the state in the entry that `get` would have returned
74 /// before this call, or `EntryState::Unknown` if there was no such entry.
75 /// before this call, or `EntryState::Unknown` if there was no such entry.
75 fn drop_file(
76 fn drop_file(
76 &mut self,
77 &mut self,
77 filename: &HgPath,
78 filename: &HgPath,
78 old_state: EntryState,
79 old_state: EntryState,
79 ) -> Result<bool, DirstateError>;
80 ) -> Result<bool, DirstateError>;
80
81
81 /// Among given files, mark the stored `mtime` as ambiguous if there is one
82 /// Among given files, mark the stored `mtime` as ambiguous if there is one
82 /// (if `state == EntryState::Normal`) equal to the given current Unix
83 /// (if `state == EntryState::Normal`) equal to the given current Unix
83 /// timestamp.
84 /// timestamp.
84 fn clear_ambiguous_times(
85 fn clear_ambiguous_times(
85 &mut self,
86 &mut self,
86 filenames: Vec<HgPathBuf>,
87 filenames: Vec<HgPathBuf>,
87 now: i32,
88 now: i32,
88 ) -> Result<(), DirstateV2ParseError>;
89 ) -> Result<(), DirstateV2ParseError>;
89
90
90 /// Return whether the map has an "non-normal" entry for the given
91 /// Return whether the map has an "non-normal" entry for the given
91 /// filename. That is, any entry with a `state` other than
92 /// filename. That is, any entry with a `state` other than
92 /// `EntryState::Normal` or with an ambiguous `mtime`.
93 /// `EntryState::Normal` or with an ambiguous `mtime`.
93 fn non_normal_entries_contains(
94 fn non_normal_entries_contains(
94 &mut self,
95 &mut self,
95 key: &HgPath,
96 key: &HgPath,
96 ) -> Result<bool, DirstateV2ParseError>;
97 ) -> Result<bool, DirstateV2ParseError>;
97
98
98 /// Mark the given path as "normal" file. This is only relevant in the flat
99 /// Mark the given path as "normal" file. This is only relevant in the flat
99 /// dirstate map where there is a separate `HashSet` that needs to be kept
100 /// dirstate map where there is a separate `HashSet` that needs to be kept
100 /// up to date.
101 /// up to date.
101 fn non_normal_entries_remove(&mut self, key: &HgPath);
102 fn non_normal_entries_remove(&mut self, key: &HgPath);
102
103
103 /// Return an iterator of paths whose respective entry are either
104 /// Return an iterator of paths whose respective entry are either
104 /// "non-normal" (see `non_normal_entries_contains`) or "from other
105 /// "non-normal" (see `non_normal_entries_contains`) or "from other
105 /// parent".
106 /// parent".
106 ///
107 ///
107 /// If that information is cached, create the cache as needed.
108 /// If that information is cached, create the cache as needed.
108 ///
109 ///
109 /// "From other parent" is defined as `state == Normal && size == -2`.
110 /// "From other parent" is defined as `state == Normal && size == -2`.
110 ///
111 ///
111 /// Because parse errors can happen during iteration, the iterated items
112 /// Because parse errors can happen during iteration, the iterated items
112 /// are `Result`s.
113 /// are `Result`s.
113 fn non_normal_or_other_parent_paths(
114 fn non_normal_or_other_parent_paths(
114 &mut self,
115 &mut self,
115 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>;
116 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>;
116
117
117 /// Create the cache for `non_normal_or_other_parent_paths` if needed.
118 /// Create the cache for `non_normal_or_other_parent_paths` if needed.
118 ///
119 ///
119 /// If `force` is true, the cache is re-created even if it already exists.
120 /// If `force` is true, the cache is re-created even if it already exists.
120 fn set_non_normal_other_parent_entries(&mut self, force: bool);
121 fn set_non_normal_other_parent_entries(&mut self, force: bool);
121
122
122 /// Return an iterator of paths whose respective entry are "non-normal"
123 /// Return an iterator of paths whose respective entry are "non-normal"
123 /// (see `non_normal_entries_contains`).
124 /// (see `non_normal_entries_contains`).
124 ///
125 ///
125 /// If that information is cached, create the cache as needed.
126 /// If that information is cached, create the cache as needed.
126 ///
127 ///
127 /// Because parse errors can happen during iteration, the iterated items
128 /// Because parse errors can happen during iteration, the iterated items
128 /// are `Result`s.
129 /// are `Result`s.
129 fn iter_non_normal_paths(
130 fn iter_non_normal_paths(
130 &mut self,
131 &mut self,
131 ) -> Box<
132 ) -> Box<
132 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
133 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
133 >;
134 >;
134
135
135 /// Same as `iter_non_normal_paths`, but takes `&self` instead of `&mut
136 /// Same as `iter_non_normal_paths`, but takes `&self` instead of `&mut
136 /// self`.
137 /// self`.
137 ///
138 ///
138 /// Panics if a cache is necessary but does not exist yet.
139 /// Panics if a cache is necessary but does not exist yet.
139 fn iter_non_normal_paths_panic(
140 fn iter_non_normal_paths_panic(
140 &self,
141 &self,
141 ) -> Box<
142 ) -> Box<
142 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
143 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
143 >;
144 >;
144
145
145 /// Return an iterator of paths whose respective entry are "from other
146 /// Return an iterator of paths whose respective entry are "from other
146 /// parent".
147 /// parent".
147 ///
148 ///
148 /// If that information is cached, create the cache as needed.
149 /// If that information is cached, create the cache as needed.
149 ///
150 ///
150 /// "From other parent" is defined as `state == Normal && size == -2`.
151 /// "From other parent" is defined as `state == Normal && size == -2`.
151 ///
152 ///
152 /// Because parse errors can happen during iteration, the iterated items
153 /// Because parse errors can happen during iteration, the iterated items
153 /// are `Result`s.
154 /// are `Result`s.
154 fn iter_other_parent_paths(
155 fn iter_other_parent_paths(
155 &mut self,
156 &mut self,
156 ) -> Box<
157 ) -> Box<
157 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
158 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
158 >;
159 >;
159
160
160 /// Returns whether the sub-tree rooted at the given directory contains any
161 /// Returns whether the sub-tree rooted at the given directory contains any
161 /// tracked file.
162 /// tracked file.
162 ///
163 ///
163 /// A file is tracked if it has a `state` other than `EntryState::Removed`.
164 /// A file is tracked if it has a `state` other than `EntryState::Removed`.
164 fn has_tracked_dir(
165 fn has_tracked_dir(
165 &mut self,
166 &mut self,
166 directory: &HgPath,
167 directory: &HgPath,
167 ) -> Result<bool, DirstateError>;
168 ) -> Result<bool, DirstateError>;
168
169
169 /// Returns whether the sub-tree rooted at the given directory contains any
170 /// Returns whether the sub-tree rooted at the given directory contains any
170 /// file with a dirstate entry.
171 /// file with a dirstate entry.
171 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError>;
172 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError>;
172
173
173 /// Clear mtimes that are ambigous with `now` (similar to
174 /// Clear mtimes that are ambigous with `now` (similar to
174 /// `clear_ambiguous_times` but for all files in the dirstate map), and
175 /// `clear_ambiguous_times` but for all files in the dirstate map), and
175 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v1
176 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v1
176 /// format.
177 /// format.
177 fn pack_v1(
178 fn pack_v1(
178 &mut self,
179 &mut self,
179 parents: DirstateParents,
180 parents: DirstateParents,
180 now: Timestamp,
181 now: Timestamp,
181 ) -> Result<Vec<u8>, DirstateError>;
182 ) -> Result<Vec<u8>, DirstateError>;
182
183
183 /// Clear mtimes that are ambigous with `now` (similar to
184 /// Clear mtimes that are ambigous with `now` (similar to
184 /// `clear_ambiguous_times` but for all files in the dirstate map), and
185 /// `clear_ambiguous_times` but for all files in the dirstate map), and
185 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v2
186 /// serialize bytes to write the `.hg/dirstate` file to disk in dirstate-v2
186 /// format.
187 /// format.
187 ///
188 ///
188 /// Note: this is only supported by the tree dirstate map.
189 /// Note: this is only supported by the tree dirstate map.
189 fn pack_v2(
190 fn pack_v2(
190 &mut self,
191 &mut self,
191 parents: DirstateParents,
192 parents: DirstateParents,
192 now: Timestamp,
193 now: Timestamp,
193 ) -> Result<Vec<u8>, DirstateError>;
194 ) -> Result<Vec<u8>, DirstateError>;
194
195
195 /// Run the status algorithm.
196 /// Run the status algorithm.
196 ///
197 ///
197 /// This is not sematically a method of the dirstate map, but a different
198 /// This is not sematically a method of the dirstate map, but a different
198 /// algorithm is used for the flat v.s. tree dirstate map so having it in
199 /// algorithm is used for the flat v.s. tree dirstate map so having it in
199 /// this trait enables the same dynamic dispatch as with other methods.
200 /// this trait enables the same dynamic dispatch as with other methods.
200 fn status<'a>(
201 fn status<'a>(
201 &'a mut self,
202 &'a mut self,
202 matcher: &'a (dyn Matcher + Sync),
203 matcher: &'a (dyn Matcher + Sync),
203 root_dir: PathBuf,
204 root_dir: PathBuf,
204 ignore_files: Vec<PathBuf>,
205 ignore_files: Vec<PathBuf>,
205 options: StatusOptions,
206 options: StatusOptions,
206 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
207 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
207
208
208 /// Returns how many files in the dirstate map have a recorded copy source.
209 /// Returns how many files in the dirstate map have a recorded copy source.
209 fn copy_map_len(&self) -> usize;
210 fn copy_map_len(&self) -> usize;
210
211
211 /// Returns an iterator of `(path, copy_source)` for all files that have a
212 /// Returns an iterator of `(path, copy_source)` for all files that have a
212 /// copy source.
213 /// copy source.
213 fn copy_map_iter(&self) -> CopyMapIter<'_>;
214 fn copy_map_iter(&self) -> CopyMapIter<'_>;
214
215
215 /// Returns whether the givef file has a copy source.
216 /// Returns whether the givef file has a copy source.
216 fn copy_map_contains_key(
217 fn copy_map_contains_key(
217 &self,
218 &self,
218 key: &HgPath,
219 key: &HgPath,
219 ) -> Result<bool, DirstateV2ParseError>;
220 ) -> Result<bool, DirstateV2ParseError>;
220
221
221 /// Returns the copy source for the given file.
222 /// Returns the copy source for the given file.
222 fn copy_map_get(
223 fn copy_map_get(
223 &self,
224 &self,
224 key: &HgPath,
225 key: &HgPath,
225 ) -> Result<Option<&HgPath>, DirstateV2ParseError>;
226 ) -> Result<Option<&HgPath>, DirstateV2ParseError>;
226
227
227 /// Removes the recorded copy source if any for the given file, and returns
228 /// Removes the recorded copy source if any for the given file, and returns
228 /// it.
229 /// it.
229 fn copy_map_remove(
230 fn copy_map_remove(
230 &mut self,
231 &mut self,
231 key: &HgPath,
232 key: &HgPath,
232 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
233 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
233
234
234 /// Set the given `value` copy source for the given `key` file.
235 /// Set the given `value` copy source for the given `key` file.
235 fn copy_map_insert(
236 fn copy_map_insert(
236 &mut self,
237 &mut self,
237 key: HgPathBuf,
238 key: HgPathBuf,
238 value: HgPathBuf,
239 value: HgPathBuf,
239 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
240 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError>;
240
241
241 /// Returns the number of files that have an entry.
242 /// Returns the number of files that have an entry.
242 fn len(&self) -> usize;
243 fn len(&self) -> usize;
243
244
244 /// Returns whether the given file has an entry.
245 /// Returns whether the given file has an entry.
245 fn contains_key(&self, key: &HgPath)
246 fn contains_key(&self, key: &HgPath)
246 -> Result<bool, DirstateV2ParseError>;
247 -> Result<bool, DirstateV2ParseError>;
247
248
248 /// Returns the entry, if any, for the given file.
249 /// Returns the entry, if any, for the given file.
249 fn get(
250 fn get(
250 &self,
251 &self,
251 key: &HgPath,
252 key: &HgPath,
252 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError>;
253 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError>;
253
254
254 /// Returns a `(path, entry)` iterator of files that have an entry.
255 /// Returns a `(path, entry)` iterator of files that have an entry.
255 ///
256 ///
256 /// Because parse errors can happen during iteration, the iterated items
257 /// Because parse errors can happen during iteration, the iterated items
257 /// are `Result`s.
258 /// are `Result`s.
258 fn iter(&self) -> StateMapIter<'_>;
259 fn iter(&self) -> StateMapIter<'_>;
259
260
260 /// In the tree dirstate, return an iterator of "directory" (entry-less)
261 /// In the tree dirstate, return an iterator of "directory" (entry-less)
261 /// nodes with the data stored for them. This is for `hg debugdirstate
262 /// nodes with the data stored for them. This is for `hg debugdirstate
262 /// --dirs`.
263 /// --dirs`.
263 ///
264 ///
264 /// In the flat dirstate, returns an empty iterator.
265 /// In the flat dirstate, returns an empty iterator.
265 ///
266 ///
266 /// Because parse errors can happen during iteration, the iterated items
267 /// Because parse errors can happen during iteration, the iterated items
267 /// are `Result`s.
268 /// are `Result`s.
268 fn iter_directories(
269 fn iter_directories(
269 &self,
270 &self,
270 ) -> Box<
271 ) -> Box<
271 dyn Iterator<
272 dyn Iterator<
272 Item = Result<
273 Item = Result<
273 (&HgPath, Option<Timestamp>),
274 (&HgPath, Option<Timestamp>),
274 DirstateV2ParseError,
275 DirstateV2ParseError,
275 >,
276 >,
276 > + Send
277 > + Send
277 + '_,
278 + '_,
278 >;
279 >;
279 }
280 }
280
281
281 impl DirstateMapMethods for DirstateMap {
282 impl DirstateMapMethods for DirstateMap {
282 fn clear(&mut self) {
283 fn clear(&mut self) {
283 self.clear()
284 self.clear()
284 }
285 }
285
286
286 fn add_file(
287 fn add_file(
287 &mut self,
288 &mut self,
288 filename: &HgPath,
289 filename: &HgPath,
289 entry: DirstateEntry,
290 entry: DirstateEntry,
291 added: bool,
290 from_p2: bool,
292 from_p2: bool,
291 possibly_dirty: bool,
293 possibly_dirty: bool,
292 ) -> Result<(), DirstateError> {
294 ) -> Result<(), DirstateError> {
293 self.add_file(filename, entry, from_p2, possibly_dirty)
295 self.add_file(filename, entry, added, from_p2, possibly_dirty)
294 }
296 }
295
297
296 fn remove_file(
298 fn remove_file(
297 &mut self,
299 &mut self,
298 filename: &HgPath,
300 filename: &HgPath,
299 in_merge: bool,
301 in_merge: bool,
300 ) -> Result<(), DirstateError> {
302 ) -> Result<(), DirstateError> {
301 self.remove_file(filename, in_merge)
303 self.remove_file(filename, in_merge)
302 }
304 }
303
305
304 fn drop_file(
306 fn drop_file(
305 &mut self,
307 &mut self,
306 filename: &HgPath,
308 filename: &HgPath,
307 old_state: EntryState,
309 old_state: EntryState,
308 ) -> Result<bool, DirstateError> {
310 ) -> Result<bool, DirstateError> {
309 self.drop_file(filename, old_state)
311 self.drop_file(filename, old_state)
310 }
312 }
311
313
312 fn clear_ambiguous_times(
314 fn clear_ambiguous_times(
313 &mut self,
315 &mut self,
314 filenames: Vec<HgPathBuf>,
316 filenames: Vec<HgPathBuf>,
315 now: i32,
317 now: i32,
316 ) -> Result<(), DirstateV2ParseError> {
318 ) -> Result<(), DirstateV2ParseError> {
317 Ok(self.clear_ambiguous_times(filenames, now))
319 Ok(self.clear_ambiguous_times(filenames, now))
318 }
320 }
319
321
320 fn non_normal_entries_contains(
322 fn non_normal_entries_contains(
321 &mut self,
323 &mut self,
322 key: &HgPath,
324 key: &HgPath,
323 ) -> Result<bool, DirstateV2ParseError> {
325 ) -> Result<bool, DirstateV2ParseError> {
324 let (non_normal, _other_parent) =
326 let (non_normal, _other_parent) =
325 self.get_non_normal_other_parent_entries();
327 self.get_non_normal_other_parent_entries();
326 Ok(non_normal.contains(key))
328 Ok(non_normal.contains(key))
327 }
329 }
328
330
329 fn non_normal_entries_remove(&mut self, key: &HgPath) {
331 fn non_normal_entries_remove(&mut self, key: &HgPath) {
330 self.non_normal_entries_remove(key)
332 self.non_normal_entries_remove(key)
331 }
333 }
332
334
333 fn non_normal_or_other_parent_paths(
335 fn non_normal_or_other_parent_paths(
334 &mut self,
336 &mut self,
335 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
337 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
336 {
338 {
337 let (non_normal, other_parent) =
339 let (non_normal, other_parent) =
338 self.get_non_normal_other_parent_entries();
340 self.get_non_normal_other_parent_entries();
339 Box::new(non_normal.union(other_parent).map(|p| Ok(&**p)))
341 Box::new(non_normal.union(other_parent).map(|p| Ok(&**p)))
340 }
342 }
341
343
342 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
344 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
343 self.set_non_normal_other_parent_entries(force)
345 self.set_non_normal_other_parent_entries(force)
344 }
346 }
345
347
346 fn iter_non_normal_paths(
348 fn iter_non_normal_paths(
347 &mut self,
349 &mut self,
348 ) -> Box<
350 ) -> Box<
349 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
351 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
350 > {
352 > {
351 let (non_normal, _other_parent) =
353 let (non_normal, _other_parent) =
352 self.get_non_normal_other_parent_entries();
354 self.get_non_normal_other_parent_entries();
353 Box::new(non_normal.iter().map(|p| Ok(&**p)))
355 Box::new(non_normal.iter().map(|p| Ok(&**p)))
354 }
356 }
355
357
356 fn iter_non_normal_paths_panic(
358 fn iter_non_normal_paths_panic(
357 &self,
359 &self,
358 ) -> Box<
360 ) -> Box<
359 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
361 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
360 > {
362 > {
361 let (non_normal, _other_parent) =
363 let (non_normal, _other_parent) =
362 self.get_non_normal_other_parent_entries_panic();
364 self.get_non_normal_other_parent_entries_panic();
363 Box::new(non_normal.iter().map(|p| Ok(&**p)))
365 Box::new(non_normal.iter().map(|p| Ok(&**p)))
364 }
366 }
365
367
366 fn iter_other_parent_paths(
368 fn iter_other_parent_paths(
367 &mut self,
369 &mut self,
368 ) -> Box<
370 ) -> Box<
369 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
371 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
370 > {
372 > {
371 let (_non_normal, other_parent) =
373 let (_non_normal, other_parent) =
372 self.get_non_normal_other_parent_entries();
374 self.get_non_normal_other_parent_entries();
373 Box::new(other_parent.iter().map(|p| Ok(&**p)))
375 Box::new(other_parent.iter().map(|p| Ok(&**p)))
374 }
376 }
375
377
376 fn has_tracked_dir(
378 fn has_tracked_dir(
377 &mut self,
379 &mut self,
378 directory: &HgPath,
380 directory: &HgPath,
379 ) -> Result<bool, DirstateError> {
381 ) -> Result<bool, DirstateError> {
380 self.has_tracked_dir(directory)
382 self.has_tracked_dir(directory)
381 }
383 }
382
384
383 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
385 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
384 self.has_dir(directory)
386 self.has_dir(directory)
385 }
387 }
386
388
387 fn pack_v1(
389 fn pack_v1(
388 &mut self,
390 &mut self,
389 parents: DirstateParents,
391 parents: DirstateParents,
390 now: Timestamp,
392 now: Timestamp,
391 ) -> Result<Vec<u8>, DirstateError> {
393 ) -> Result<Vec<u8>, DirstateError> {
392 self.pack(parents, now)
394 self.pack(parents, now)
393 }
395 }
394
396
395 fn pack_v2(
397 fn pack_v2(
396 &mut self,
398 &mut self,
397 _parents: DirstateParents,
399 _parents: DirstateParents,
398 _now: Timestamp,
400 _now: Timestamp,
399 ) -> Result<Vec<u8>, DirstateError> {
401 ) -> Result<Vec<u8>, DirstateError> {
400 panic!(
402 panic!(
401 "should have used dirstate_tree::DirstateMap to use the v2 format"
403 "should have used dirstate_tree::DirstateMap to use the v2 format"
402 )
404 )
403 }
405 }
404
406
405 fn status<'a>(
407 fn status<'a>(
406 &'a mut self,
408 &'a mut self,
407 matcher: &'a (dyn Matcher + Sync),
409 matcher: &'a (dyn Matcher + Sync),
408 root_dir: PathBuf,
410 root_dir: PathBuf,
409 ignore_files: Vec<PathBuf>,
411 ignore_files: Vec<PathBuf>,
410 options: StatusOptions,
412 options: StatusOptions,
411 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
413 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
412 {
414 {
413 crate::status(self, matcher, root_dir, ignore_files, options)
415 crate::status(self, matcher, root_dir, ignore_files, options)
414 }
416 }
415
417
416 fn copy_map_len(&self) -> usize {
418 fn copy_map_len(&self) -> usize {
417 self.copy_map.len()
419 self.copy_map.len()
418 }
420 }
419
421
420 fn copy_map_iter(&self) -> CopyMapIter<'_> {
422 fn copy_map_iter(&self) -> CopyMapIter<'_> {
421 Box::new(
423 Box::new(
422 self.copy_map
424 self.copy_map
423 .iter()
425 .iter()
424 .map(|(key, value)| Ok((&**key, &**value))),
426 .map(|(key, value)| Ok((&**key, &**value))),
425 )
427 )
426 }
428 }
427
429
428 fn copy_map_contains_key(
430 fn copy_map_contains_key(
429 &self,
431 &self,
430 key: &HgPath,
432 key: &HgPath,
431 ) -> Result<bool, DirstateV2ParseError> {
433 ) -> Result<bool, DirstateV2ParseError> {
432 Ok(self.copy_map.contains_key(key))
434 Ok(self.copy_map.contains_key(key))
433 }
435 }
434
436
435 fn copy_map_get(
437 fn copy_map_get(
436 &self,
438 &self,
437 key: &HgPath,
439 key: &HgPath,
438 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
440 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
439 Ok(self.copy_map.get(key).map(|p| &**p))
441 Ok(self.copy_map.get(key).map(|p| &**p))
440 }
442 }
441
443
442 fn copy_map_remove(
444 fn copy_map_remove(
443 &mut self,
445 &mut self,
444 key: &HgPath,
446 key: &HgPath,
445 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
447 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
446 Ok(self.copy_map.remove(key))
448 Ok(self.copy_map.remove(key))
447 }
449 }
448
450
449 fn copy_map_insert(
451 fn copy_map_insert(
450 &mut self,
452 &mut self,
451 key: HgPathBuf,
453 key: HgPathBuf,
452 value: HgPathBuf,
454 value: HgPathBuf,
453 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
455 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
454 Ok(self.copy_map.insert(key, value))
456 Ok(self.copy_map.insert(key, value))
455 }
457 }
456
458
457 fn len(&self) -> usize {
459 fn len(&self) -> usize {
458 (&**self).len()
460 (&**self).len()
459 }
461 }
460
462
461 fn contains_key(
463 fn contains_key(
462 &self,
464 &self,
463 key: &HgPath,
465 key: &HgPath,
464 ) -> Result<bool, DirstateV2ParseError> {
466 ) -> Result<bool, DirstateV2ParseError> {
465 Ok((&**self).contains_key(key))
467 Ok((&**self).contains_key(key))
466 }
468 }
467
469
468 fn get(
470 fn get(
469 &self,
471 &self,
470 key: &HgPath,
472 key: &HgPath,
471 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
473 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
472 Ok((&**self).get(key).cloned())
474 Ok((&**self).get(key).cloned())
473 }
475 }
474
476
475 fn iter(&self) -> StateMapIter<'_> {
477 fn iter(&self) -> StateMapIter<'_> {
476 Box::new((&**self).iter().map(|(key, value)| Ok((&**key, *value))))
478 Box::new((&**self).iter().map(|(key, value)| Ok((&**key, *value))))
477 }
479 }
478
480
479 fn iter_directories(
481 fn iter_directories(
480 &self,
482 &self,
481 ) -> Box<
483 ) -> Box<
482 dyn Iterator<
484 dyn Iterator<
483 Item = Result<
485 Item = Result<
484 (&HgPath, Option<Timestamp>),
486 (&HgPath, Option<Timestamp>),
485 DirstateV2ParseError,
487 DirstateV2ParseError,
486 >,
488 >,
487 > + Send
489 > + Send
488 + '_,
490 + '_,
489 > {
491 > {
490 Box::new(std::iter::empty())
492 Box::new(std::iter::empty())
491 }
493 }
492 }
494 }
@@ -1,581 +1,594 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10
10
11 use std::cell::{RefCell, RefMut};
11 use std::cell::{RefCell, RefMut};
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13
13
14 use cpython::{
14 use cpython::{
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
17 UnsafePyLeaked,
17 UnsafePyLeaked,
18 };
18 };
19
19
20 use crate::{
20 use crate::{
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 dirstate::make_dirstate_tuple,
22 dirstate::make_dirstate_tuple,
23 dirstate::non_normal_entries::{
23 dirstate::non_normal_entries::{
24 NonNormalEntries, NonNormalEntriesIterator,
24 NonNormalEntries, NonNormalEntriesIterator,
25 },
25 },
26 dirstate::owning::OwningDirstateMap,
26 dirstate::owning::OwningDirstateMap,
27 parsers::dirstate_parents_to_pytuple,
27 parsers::dirstate_parents_to_pytuple,
28 };
28 };
29 use hg::{
29 use hg::{
30 dirstate::parsers::Timestamp,
30 dirstate::parsers::Timestamp,
31 dirstate::MTIME_UNSET,
31 dirstate::MTIME_UNSET,
32 dirstate::SIZE_NON_NORMAL,
32 dirstate::SIZE_NON_NORMAL,
33 dirstate_tree::dispatch::DirstateMapMethods,
33 dirstate_tree::dispatch::DirstateMapMethods,
34 dirstate_tree::on_disk::DirstateV2ParseError,
34 dirstate_tree::on_disk::DirstateV2ParseError,
35 errors::HgError,
35 errors::HgError,
36 revlog::Node,
36 revlog::Node,
37 utils::files::normalize_case,
37 utils::files::normalize_case,
38 utils::hg_path::{HgPath, HgPathBuf},
38 utils::hg_path::{HgPath, HgPathBuf},
39 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
39 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
40 DirstateParents, EntryState, StateMapIter,
40 DirstateParents, EntryState, StateMapIter,
41 };
41 };
42
42
43 // TODO
43 // TODO
44 // This object needs to share references to multiple members of its Rust
44 // This object needs to share references to multiple members of its Rust
45 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
45 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
46 // Right now `CopyMap` is done, but it needs to have an explicit reference
46 // Right now `CopyMap` is done, but it needs to have an explicit reference
47 // to `RustDirstateMap` which itself needs to have an encapsulation for
47 // to `RustDirstateMap` which itself needs to have an encapsulation for
48 // every method in `CopyMap` (copymapcopy, etc.).
48 // every method in `CopyMap` (copymapcopy, etc.).
49 // This is ugly and hard to maintain.
49 // This is ugly and hard to maintain.
50 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
50 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
51 // `py_class!` is already implemented and does not mention
51 // `py_class!` is already implemented and does not mention
52 // `RustDirstateMap`, rightfully so.
52 // `RustDirstateMap`, rightfully so.
53 // All attributes also have to have a separate refcount data attribute for
53 // All attributes also have to have a separate refcount data attribute for
54 // leaks, with all methods that go along for reference sharing.
54 // leaks, with all methods that go along for reference sharing.
55 py_class!(pub class DirstateMap |py| {
55 py_class!(pub class DirstateMap |py| {
56 @shared data inner: Box<dyn DirstateMapMethods + Send>;
56 @shared data inner: Box<dyn DirstateMapMethods + Send>;
57
57
58 /// Returns a `(dirstate_map, parents)` tuple
58 /// Returns a `(dirstate_map, parents)` tuple
59 @staticmethod
59 @staticmethod
60 def new(
60 def new(
61 use_dirstate_tree: bool,
61 use_dirstate_tree: bool,
62 use_dirstate_v2: bool,
62 use_dirstate_v2: bool,
63 on_disk: PyBytes,
63 on_disk: PyBytes,
64 ) -> PyResult<PyObject> {
64 ) -> PyResult<PyObject> {
65 let dirstate_error = |e: DirstateError| {
65 let dirstate_error = |e: DirstateError| {
66 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
66 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
67 };
67 };
68 let (inner, parents) = if use_dirstate_tree || use_dirstate_v2 {
68 let (inner, parents) = if use_dirstate_tree || use_dirstate_v2 {
69 let (map, parents) =
69 let (map, parents) =
70 OwningDirstateMap::new(py, on_disk, use_dirstate_v2)
70 OwningDirstateMap::new(py, on_disk, use_dirstate_v2)
71 .map_err(dirstate_error)?;
71 .map_err(dirstate_error)?;
72 (Box::new(map) as _, parents)
72 (Box::new(map) as _, parents)
73 } else {
73 } else {
74 let bytes = on_disk.data(py);
74 let bytes = on_disk.data(py);
75 let mut map = RustDirstateMap::default();
75 let mut map = RustDirstateMap::default();
76 let parents = map.read(bytes).map_err(dirstate_error)?;
76 let parents = map.read(bytes).map_err(dirstate_error)?;
77 (Box::new(map) as _, parents)
77 (Box::new(map) as _, parents)
78 };
78 };
79 let map = Self::create_instance(py, inner)?;
79 let map = Self::create_instance(py, inner)?;
80 let parents = parents.map(|p| dirstate_parents_to_pytuple(py, &p));
80 let parents = parents.map(|p| dirstate_parents_to_pytuple(py, &p));
81 Ok((map, parents).to_py_object(py).into_object())
81 Ok((map, parents).to_py_object(py).into_object())
82 }
82 }
83
83
84 def clear(&self) -> PyResult<PyObject> {
84 def clear(&self) -> PyResult<PyObject> {
85 self.inner(py).borrow_mut().clear();
85 self.inner(py).borrow_mut().clear();
86 Ok(py.None())
86 Ok(py.None())
87 }
87 }
88
88
89 def get(
89 def get(
90 &self,
90 &self,
91 key: PyObject,
91 key: PyObject,
92 default: Option<PyObject> = None
92 default: Option<PyObject> = None
93 ) -> PyResult<Option<PyObject>> {
93 ) -> PyResult<Option<PyObject>> {
94 let key = key.extract::<PyBytes>(py)?;
94 let key = key.extract::<PyBytes>(py)?;
95 match self
95 match self
96 .inner(py)
96 .inner(py)
97 .borrow()
97 .borrow()
98 .get(HgPath::new(key.data(py)))
98 .get(HgPath::new(key.data(py)))
99 .map_err(|e| v2_error(py, e))?
99 .map_err(|e| v2_error(py, e))?
100 {
100 {
101 Some(entry) => {
101 Some(entry) => {
102 Ok(Some(make_dirstate_tuple(py, &entry)?))
102 Ok(Some(make_dirstate_tuple(py, &entry)?))
103 },
103 },
104 None => Ok(default)
104 None => Ok(default)
105 }
105 }
106 }
106 }
107
107
108 def addfile(
108 def addfile(
109 &self,
109 &self,
110 f: PyObject,
110 f: PyObject,
111 state: PyObject,
111 state: PyObject,
112 mode: PyObject,
112 mode: PyObject,
113 size: PyObject,
113 size: PyObject,
114 mtime: PyObject,
114 mtime: PyObject,
115 added: PyObject,
115 from_p2: PyObject,
116 from_p2: PyObject,
116 possibly_dirty: PyObject,
117 possibly_dirty: PyObject,
117 ) -> PyResult<PyObject> {
118 ) -> PyResult<PyObject> {
118 let f = f.extract::<PyBytes>(py)?;
119 let f = f.extract::<PyBytes>(py)?;
119 let filename = HgPath::new(f.data(py));
120 let filename = HgPath::new(f.data(py));
120 let state = state.extract::<PyBytes>(py)?.data(py)[0]
121 let state = if state.is_none(py) {
122 // Arbitrary default value
123 EntryState::Normal
124 } else {
125 state.extract::<PyBytes>(py)?.data(py)[0]
121 .try_into()
126 .try_into()
122 .map_err(|e: HgError| {
127 .map_err(|e: HgError| {
123 PyErr::new::<exc::ValueError, _>(py, e.to_string())
128 PyErr::new::<exc::ValueError, _>(py, e.to_string())
124 })?;
129 })?
125 let mode = mode.extract(py)?;
130 };
131 let mode = if mode.is_none(py) {
132 // fallback default value
133 0
134 } else {
135 mode.extract(py)?
136 };
126 let size = if size.is_none(py) {
137 let size = if size.is_none(py) {
127 // fallback default value
138 // fallback default value
128 SIZE_NON_NORMAL
139 SIZE_NON_NORMAL
129 } else {
140 } else {
130 size.extract(py)?
141 size.extract(py)?
131 };
142 };
132 let mtime = if mtime.is_none(py) {
143 let mtime = if mtime.is_none(py) {
133 // fallback default value
144 // fallback default value
134 MTIME_UNSET
145 MTIME_UNSET
135 } else {
146 } else {
136 mtime.extract(py)?
147 mtime.extract(py)?
137 };
148 };
138 let entry = DirstateEntry {
149 let entry = DirstateEntry {
139 state: state,
150 state: state,
140 mode: mode,
151 mode: mode,
141 size: size,
152 size: size,
142 mtime: mtime,
153 mtime: mtime,
143 };
154 };
155 let added = added.extract::<PyBool>(py)?.is_true();
144 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
156 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
145 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
157 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
146 self.inner(py).borrow_mut().add_file(
158 self.inner(py).borrow_mut().add_file(
147 filename,
159 filename,
148 entry,
160 entry,
161 added,
149 from_p2,
162 from_p2,
150 possibly_dirty
163 possibly_dirty
151 ).and(Ok(py.None())).or_else(|e: DirstateError| {
164 ).and(Ok(py.None())).or_else(|e: DirstateError| {
152 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
165 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
153 })
166 })
154 }
167 }
155
168
156 def removefile(
169 def removefile(
157 &self,
170 &self,
158 f: PyObject,
171 f: PyObject,
159 in_merge: PyObject
172 in_merge: PyObject
160 ) -> PyResult<PyObject> {
173 ) -> PyResult<PyObject> {
161 self.inner(py).borrow_mut()
174 self.inner(py).borrow_mut()
162 .remove_file(
175 .remove_file(
163 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
176 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
164 in_merge.extract::<PyBool>(py)?.is_true(),
177 in_merge.extract::<PyBool>(py)?.is_true(),
165 )
178 )
166 .or_else(|_| {
179 .or_else(|_| {
167 Err(PyErr::new::<exc::OSError, _>(
180 Err(PyErr::new::<exc::OSError, _>(
168 py,
181 py,
169 "Dirstate error".to_string(),
182 "Dirstate error".to_string(),
170 ))
183 ))
171 })?;
184 })?;
172 Ok(py.None())
185 Ok(py.None())
173 }
186 }
174
187
175 def dropfile(
188 def dropfile(
176 &self,
189 &self,
177 f: PyObject,
190 f: PyObject,
178 oldstate: PyObject
191 oldstate: PyObject
179 ) -> PyResult<PyBool> {
192 ) -> PyResult<PyBool> {
180 self.inner(py).borrow_mut()
193 self.inner(py).borrow_mut()
181 .drop_file(
194 .drop_file(
182 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
195 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
183 oldstate.extract::<PyBytes>(py)?.data(py)[0]
196 oldstate.extract::<PyBytes>(py)?.data(py)[0]
184 .try_into()
197 .try_into()
185 .map_err(|e: HgError| {
198 .map_err(|e: HgError| {
186 PyErr::new::<exc::ValueError, _>(py, e.to_string())
199 PyErr::new::<exc::ValueError, _>(py, e.to_string())
187 })?,
200 })?,
188 )
201 )
189 .and_then(|b| Ok(b.to_py_object(py)))
202 .and_then(|b| Ok(b.to_py_object(py)))
190 .or_else(|e| {
203 .or_else(|e| {
191 Err(PyErr::new::<exc::OSError, _>(
204 Err(PyErr::new::<exc::OSError, _>(
192 py,
205 py,
193 format!("Dirstate error: {}", e.to_string()),
206 format!("Dirstate error: {}", e.to_string()),
194 ))
207 ))
195 })
208 })
196 }
209 }
197
210
198 def clearambiguoustimes(
211 def clearambiguoustimes(
199 &self,
212 &self,
200 files: PyObject,
213 files: PyObject,
201 now: PyObject
214 now: PyObject
202 ) -> PyResult<PyObject> {
215 ) -> PyResult<PyObject> {
203 let files: PyResult<Vec<HgPathBuf>> = files
216 let files: PyResult<Vec<HgPathBuf>> = files
204 .iter(py)?
217 .iter(py)?
205 .map(|filename| {
218 .map(|filename| {
206 Ok(HgPathBuf::from_bytes(
219 Ok(HgPathBuf::from_bytes(
207 filename?.extract::<PyBytes>(py)?.data(py),
220 filename?.extract::<PyBytes>(py)?.data(py),
208 ))
221 ))
209 })
222 })
210 .collect();
223 .collect();
211 self.inner(py)
224 self.inner(py)
212 .borrow_mut()
225 .borrow_mut()
213 .clear_ambiguous_times(files?, now.extract(py)?)
226 .clear_ambiguous_times(files?, now.extract(py)?)
214 .map_err(|e| v2_error(py, e))?;
227 .map_err(|e| v2_error(py, e))?;
215 Ok(py.None())
228 Ok(py.None())
216 }
229 }
217
230
218 def other_parent_entries(&self) -> PyResult<PyObject> {
231 def other_parent_entries(&self) -> PyResult<PyObject> {
219 let mut inner_shared = self.inner(py).borrow_mut();
232 let mut inner_shared = self.inner(py).borrow_mut();
220 let set = PySet::empty(py)?;
233 let set = PySet::empty(py)?;
221 for path in inner_shared.iter_other_parent_paths() {
234 for path in inner_shared.iter_other_parent_paths() {
222 let path = path.map_err(|e| v2_error(py, e))?;
235 let path = path.map_err(|e| v2_error(py, e))?;
223 set.add(py, PyBytes::new(py, path.as_bytes()))?;
236 set.add(py, PyBytes::new(py, path.as_bytes()))?;
224 }
237 }
225 Ok(set.into_object())
238 Ok(set.into_object())
226 }
239 }
227
240
228 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
241 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
229 NonNormalEntries::from_inner(py, self.clone_ref(py))
242 NonNormalEntries::from_inner(py, self.clone_ref(py))
230 }
243 }
231
244
232 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
245 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
233 let key = key.extract::<PyBytes>(py)?;
246 let key = key.extract::<PyBytes>(py)?;
234 self.inner(py)
247 self.inner(py)
235 .borrow_mut()
248 .borrow_mut()
236 .non_normal_entries_contains(HgPath::new(key.data(py)))
249 .non_normal_entries_contains(HgPath::new(key.data(py)))
237 .map_err(|e| v2_error(py, e))
250 .map_err(|e| v2_error(py, e))
238 }
251 }
239
252
240 def non_normal_entries_display(&self) -> PyResult<PyString> {
253 def non_normal_entries_display(&self) -> PyResult<PyString> {
241 let mut inner = self.inner(py).borrow_mut();
254 let mut inner = self.inner(py).borrow_mut();
242 let paths = inner
255 let paths = inner
243 .iter_non_normal_paths()
256 .iter_non_normal_paths()
244 .collect::<Result<Vec<_>, _>>()
257 .collect::<Result<Vec<_>, _>>()
245 .map_err(|e| v2_error(py, e))?;
258 .map_err(|e| v2_error(py, e))?;
246 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
259 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
247 Ok(PyString::new(py, &formatted))
260 Ok(PyString::new(py, &formatted))
248 }
261 }
249
262
250 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
263 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
251 let key = key.extract::<PyBytes>(py)?;
264 let key = key.extract::<PyBytes>(py)?;
252 self
265 self
253 .inner(py)
266 .inner(py)
254 .borrow_mut()
267 .borrow_mut()
255 .non_normal_entries_remove(HgPath::new(key.data(py)));
268 .non_normal_entries_remove(HgPath::new(key.data(py)));
256 Ok(py.None())
269 Ok(py.None())
257 }
270 }
258
271
259 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
272 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
260 let mut inner = self.inner(py).borrow_mut();
273 let mut inner = self.inner(py).borrow_mut();
261
274
262 let ret = PyList::new(py, &[]);
275 let ret = PyList::new(py, &[]);
263 for filename in inner.non_normal_or_other_parent_paths() {
276 for filename in inner.non_normal_or_other_parent_paths() {
264 let filename = filename.map_err(|e| v2_error(py, e))?;
277 let filename = filename.map_err(|e| v2_error(py, e))?;
265 let as_pystring = PyBytes::new(py, filename.as_bytes());
278 let as_pystring = PyBytes::new(py, filename.as_bytes());
266 ret.append(py, as_pystring.into_object());
279 ret.append(py, as_pystring.into_object());
267 }
280 }
268 Ok(ret)
281 Ok(ret)
269 }
282 }
270
283
271 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
284 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
272 // Make sure the sets are defined before we no longer have a mutable
285 // Make sure the sets are defined before we no longer have a mutable
273 // reference to the dmap.
286 // reference to the dmap.
274 self.inner(py)
287 self.inner(py)
275 .borrow_mut()
288 .borrow_mut()
276 .set_non_normal_other_parent_entries(false);
289 .set_non_normal_other_parent_entries(false);
277
290
278 let leaked_ref = self.inner(py).leak_immutable();
291 let leaked_ref = self.inner(py).leak_immutable();
279
292
280 NonNormalEntriesIterator::from_inner(py, unsafe {
293 NonNormalEntriesIterator::from_inner(py, unsafe {
281 leaked_ref.map(py, |o| {
294 leaked_ref.map(py, |o| {
282 o.iter_non_normal_paths_panic()
295 o.iter_non_normal_paths_panic()
283 })
296 })
284 })
297 })
285 }
298 }
286
299
287 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
300 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
288 let d = d.extract::<PyBytes>(py)?;
301 let d = d.extract::<PyBytes>(py)?;
289 Ok(self.inner(py).borrow_mut()
302 Ok(self.inner(py).borrow_mut()
290 .has_tracked_dir(HgPath::new(d.data(py)))
303 .has_tracked_dir(HgPath::new(d.data(py)))
291 .map_err(|e| {
304 .map_err(|e| {
292 PyErr::new::<exc::ValueError, _>(py, e.to_string())
305 PyErr::new::<exc::ValueError, _>(py, e.to_string())
293 })?
306 })?
294 .to_py_object(py))
307 .to_py_object(py))
295 }
308 }
296
309
297 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
310 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
298 let d = d.extract::<PyBytes>(py)?;
311 let d = d.extract::<PyBytes>(py)?;
299 Ok(self.inner(py).borrow_mut()
312 Ok(self.inner(py).borrow_mut()
300 .has_dir(HgPath::new(d.data(py)))
313 .has_dir(HgPath::new(d.data(py)))
301 .map_err(|e| {
314 .map_err(|e| {
302 PyErr::new::<exc::ValueError, _>(py, e.to_string())
315 PyErr::new::<exc::ValueError, _>(py, e.to_string())
303 })?
316 })?
304 .to_py_object(py))
317 .to_py_object(py))
305 }
318 }
306
319
307 def write(
320 def write(
308 &self,
321 &self,
309 use_dirstate_v2: bool,
322 use_dirstate_v2: bool,
310 p1: PyObject,
323 p1: PyObject,
311 p2: PyObject,
324 p2: PyObject,
312 now: PyObject
325 now: PyObject
313 ) -> PyResult<PyBytes> {
326 ) -> PyResult<PyBytes> {
314 let now = Timestamp(now.extract(py)?);
327 let now = Timestamp(now.extract(py)?);
315 let parents = DirstateParents {
328 let parents = DirstateParents {
316 p1: extract_node_id(py, &p1)?,
329 p1: extract_node_id(py, &p1)?,
317 p2: extract_node_id(py, &p2)?,
330 p2: extract_node_id(py, &p2)?,
318 };
331 };
319
332
320 let mut inner = self.inner(py).borrow_mut();
333 let mut inner = self.inner(py).borrow_mut();
321 let result = if use_dirstate_v2 {
334 let result = if use_dirstate_v2 {
322 inner.pack_v2(parents, now)
335 inner.pack_v2(parents, now)
323 } else {
336 } else {
324 inner.pack_v1(parents, now)
337 inner.pack_v1(parents, now)
325 };
338 };
326 match result {
339 match result {
327 Ok(packed) => Ok(PyBytes::new(py, &packed)),
340 Ok(packed) => Ok(PyBytes::new(py, &packed)),
328 Err(_) => Err(PyErr::new::<exc::OSError, _>(
341 Err(_) => Err(PyErr::new::<exc::OSError, _>(
329 py,
342 py,
330 "Dirstate error".to_string(),
343 "Dirstate error".to_string(),
331 )),
344 )),
332 }
345 }
333 }
346 }
334
347
335 def filefoldmapasdict(&self) -> PyResult<PyDict> {
348 def filefoldmapasdict(&self) -> PyResult<PyDict> {
336 let dict = PyDict::new(py);
349 let dict = PyDict::new(py);
337 for item in self.inner(py).borrow_mut().iter() {
350 for item in self.inner(py).borrow_mut().iter() {
338 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
351 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
339 if entry.state != EntryState::Removed {
352 if entry.state != EntryState::Removed {
340 let key = normalize_case(path);
353 let key = normalize_case(path);
341 let value = path;
354 let value = path;
342 dict.set_item(
355 dict.set_item(
343 py,
356 py,
344 PyBytes::new(py, key.as_bytes()).into_object(),
357 PyBytes::new(py, key.as_bytes()).into_object(),
345 PyBytes::new(py, value.as_bytes()).into_object(),
358 PyBytes::new(py, value.as_bytes()).into_object(),
346 )?;
359 )?;
347 }
360 }
348 }
361 }
349 Ok(dict)
362 Ok(dict)
350 }
363 }
351
364
352 def __len__(&self) -> PyResult<usize> {
365 def __len__(&self) -> PyResult<usize> {
353 Ok(self.inner(py).borrow().len())
366 Ok(self.inner(py).borrow().len())
354 }
367 }
355
368
356 def __contains__(&self, key: PyObject) -> PyResult<bool> {
369 def __contains__(&self, key: PyObject) -> PyResult<bool> {
357 let key = key.extract::<PyBytes>(py)?;
370 let key = key.extract::<PyBytes>(py)?;
358 self.inner(py)
371 self.inner(py)
359 .borrow()
372 .borrow()
360 .contains_key(HgPath::new(key.data(py)))
373 .contains_key(HgPath::new(key.data(py)))
361 .map_err(|e| v2_error(py, e))
374 .map_err(|e| v2_error(py, e))
362 }
375 }
363
376
364 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
377 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
365 let key = key.extract::<PyBytes>(py)?;
378 let key = key.extract::<PyBytes>(py)?;
366 let key = HgPath::new(key.data(py));
379 let key = HgPath::new(key.data(py));
367 match self
380 match self
368 .inner(py)
381 .inner(py)
369 .borrow()
382 .borrow()
370 .get(key)
383 .get(key)
371 .map_err(|e| v2_error(py, e))?
384 .map_err(|e| v2_error(py, e))?
372 {
385 {
373 Some(entry) => {
386 Some(entry) => {
374 Ok(make_dirstate_tuple(py, &entry)?)
387 Ok(make_dirstate_tuple(py, &entry)?)
375 },
388 },
376 None => Err(PyErr::new::<exc::KeyError, _>(
389 None => Err(PyErr::new::<exc::KeyError, _>(
377 py,
390 py,
378 String::from_utf8_lossy(key.as_bytes()),
391 String::from_utf8_lossy(key.as_bytes()),
379 )),
392 )),
380 }
393 }
381 }
394 }
382
395
383 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
396 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
384 let leaked_ref = self.inner(py).leak_immutable();
397 let leaked_ref = self.inner(py).leak_immutable();
385 DirstateMapKeysIterator::from_inner(
398 DirstateMapKeysIterator::from_inner(
386 py,
399 py,
387 unsafe { leaked_ref.map(py, |o| o.iter()) },
400 unsafe { leaked_ref.map(py, |o| o.iter()) },
388 )
401 )
389 }
402 }
390
403
391 def items(&self) -> PyResult<DirstateMapItemsIterator> {
404 def items(&self) -> PyResult<DirstateMapItemsIterator> {
392 let leaked_ref = self.inner(py).leak_immutable();
405 let leaked_ref = self.inner(py).leak_immutable();
393 DirstateMapItemsIterator::from_inner(
406 DirstateMapItemsIterator::from_inner(
394 py,
407 py,
395 unsafe { leaked_ref.map(py, |o| o.iter()) },
408 unsafe { leaked_ref.map(py, |o| o.iter()) },
396 )
409 )
397 }
410 }
398
411
399 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
412 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
400 let leaked_ref = self.inner(py).leak_immutable();
413 let leaked_ref = self.inner(py).leak_immutable();
401 DirstateMapKeysIterator::from_inner(
414 DirstateMapKeysIterator::from_inner(
402 py,
415 py,
403 unsafe { leaked_ref.map(py, |o| o.iter()) },
416 unsafe { leaked_ref.map(py, |o| o.iter()) },
404 )
417 )
405 }
418 }
406
419
407 // TODO all copymap* methods, see docstring above
420 // TODO all copymap* methods, see docstring above
408 def copymapcopy(&self) -> PyResult<PyDict> {
421 def copymapcopy(&self) -> PyResult<PyDict> {
409 let dict = PyDict::new(py);
422 let dict = PyDict::new(py);
410 for item in self.inner(py).borrow().copy_map_iter() {
423 for item in self.inner(py).borrow().copy_map_iter() {
411 let (key, value) = item.map_err(|e| v2_error(py, e))?;
424 let (key, value) = item.map_err(|e| v2_error(py, e))?;
412 dict.set_item(
425 dict.set_item(
413 py,
426 py,
414 PyBytes::new(py, key.as_bytes()),
427 PyBytes::new(py, key.as_bytes()),
415 PyBytes::new(py, value.as_bytes()),
428 PyBytes::new(py, value.as_bytes()),
416 )?;
429 )?;
417 }
430 }
418 Ok(dict)
431 Ok(dict)
419 }
432 }
420
433
421 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
434 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
422 let key = key.extract::<PyBytes>(py)?;
435 let key = key.extract::<PyBytes>(py)?;
423 match self
436 match self
424 .inner(py)
437 .inner(py)
425 .borrow()
438 .borrow()
426 .copy_map_get(HgPath::new(key.data(py)))
439 .copy_map_get(HgPath::new(key.data(py)))
427 .map_err(|e| v2_error(py, e))?
440 .map_err(|e| v2_error(py, e))?
428 {
441 {
429 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
442 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
430 None => Err(PyErr::new::<exc::KeyError, _>(
443 None => Err(PyErr::new::<exc::KeyError, _>(
431 py,
444 py,
432 String::from_utf8_lossy(key.data(py)),
445 String::from_utf8_lossy(key.data(py)),
433 )),
446 )),
434 }
447 }
435 }
448 }
436 def copymap(&self) -> PyResult<CopyMap> {
449 def copymap(&self) -> PyResult<CopyMap> {
437 CopyMap::from_inner(py, self.clone_ref(py))
450 CopyMap::from_inner(py, self.clone_ref(py))
438 }
451 }
439
452
440 def copymaplen(&self) -> PyResult<usize> {
453 def copymaplen(&self) -> PyResult<usize> {
441 Ok(self.inner(py).borrow().copy_map_len())
454 Ok(self.inner(py).borrow().copy_map_len())
442 }
455 }
443 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
456 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
444 let key = key.extract::<PyBytes>(py)?;
457 let key = key.extract::<PyBytes>(py)?;
445 self.inner(py)
458 self.inner(py)
446 .borrow()
459 .borrow()
447 .copy_map_contains_key(HgPath::new(key.data(py)))
460 .copy_map_contains_key(HgPath::new(key.data(py)))
448 .map_err(|e| v2_error(py, e))
461 .map_err(|e| v2_error(py, e))
449 }
462 }
450 def copymapget(
463 def copymapget(
451 &self,
464 &self,
452 key: PyObject,
465 key: PyObject,
453 default: Option<PyObject>
466 default: Option<PyObject>
454 ) -> PyResult<Option<PyObject>> {
467 ) -> PyResult<Option<PyObject>> {
455 let key = key.extract::<PyBytes>(py)?;
468 let key = key.extract::<PyBytes>(py)?;
456 match self
469 match self
457 .inner(py)
470 .inner(py)
458 .borrow()
471 .borrow()
459 .copy_map_get(HgPath::new(key.data(py)))
472 .copy_map_get(HgPath::new(key.data(py)))
460 .map_err(|e| v2_error(py, e))?
473 .map_err(|e| v2_error(py, e))?
461 {
474 {
462 Some(copy) => Ok(Some(
475 Some(copy) => Ok(Some(
463 PyBytes::new(py, copy.as_bytes()).into_object(),
476 PyBytes::new(py, copy.as_bytes()).into_object(),
464 )),
477 )),
465 None => Ok(default),
478 None => Ok(default),
466 }
479 }
467 }
480 }
468 def copymapsetitem(
481 def copymapsetitem(
469 &self,
482 &self,
470 key: PyObject,
483 key: PyObject,
471 value: PyObject
484 value: PyObject
472 ) -> PyResult<PyObject> {
485 ) -> PyResult<PyObject> {
473 let key = key.extract::<PyBytes>(py)?;
486 let key = key.extract::<PyBytes>(py)?;
474 let value = value.extract::<PyBytes>(py)?;
487 let value = value.extract::<PyBytes>(py)?;
475 self.inner(py)
488 self.inner(py)
476 .borrow_mut()
489 .borrow_mut()
477 .copy_map_insert(
490 .copy_map_insert(
478 HgPathBuf::from_bytes(key.data(py)),
491 HgPathBuf::from_bytes(key.data(py)),
479 HgPathBuf::from_bytes(value.data(py)),
492 HgPathBuf::from_bytes(value.data(py)),
480 )
493 )
481 .map_err(|e| v2_error(py, e))?;
494 .map_err(|e| v2_error(py, e))?;
482 Ok(py.None())
495 Ok(py.None())
483 }
496 }
484 def copymappop(
497 def copymappop(
485 &self,
498 &self,
486 key: PyObject,
499 key: PyObject,
487 default: Option<PyObject>
500 default: Option<PyObject>
488 ) -> PyResult<Option<PyObject>> {
501 ) -> PyResult<Option<PyObject>> {
489 let key = key.extract::<PyBytes>(py)?;
502 let key = key.extract::<PyBytes>(py)?;
490 match self
503 match self
491 .inner(py)
504 .inner(py)
492 .borrow_mut()
505 .borrow_mut()
493 .copy_map_remove(HgPath::new(key.data(py)))
506 .copy_map_remove(HgPath::new(key.data(py)))
494 .map_err(|e| v2_error(py, e))?
507 .map_err(|e| v2_error(py, e))?
495 {
508 {
496 Some(_) => Ok(None),
509 Some(_) => Ok(None),
497 None => Ok(default),
510 None => Ok(default),
498 }
511 }
499 }
512 }
500
513
501 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
514 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
502 let leaked_ref = self.inner(py).leak_immutable();
515 let leaked_ref = self.inner(py).leak_immutable();
503 CopyMapKeysIterator::from_inner(
516 CopyMapKeysIterator::from_inner(
504 py,
517 py,
505 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
518 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
506 )
519 )
507 }
520 }
508
521
509 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
522 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
510 let leaked_ref = self.inner(py).leak_immutable();
523 let leaked_ref = self.inner(py).leak_immutable();
511 CopyMapItemsIterator::from_inner(
524 CopyMapItemsIterator::from_inner(
512 py,
525 py,
513 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
526 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
514 )
527 )
515 }
528 }
516
529
517 def directories(&self) -> PyResult<PyList> {
530 def directories(&self) -> PyResult<PyList> {
518 let dirs = PyList::new(py, &[]);
531 let dirs = PyList::new(py, &[]);
519 for item in self.inner(py).borrow().iter_directories() {
532 for item in self.inner(py).borrow().iter_directories() {
520 let (path, mtime) = item.map_err(|e| v2_error(py, e))?;
533 let (path, mtime) = item.map_err(|e| v2_error(py, e))?;
521 let path = PyBytes::new(py, path.as_bytes());
534 let path = PyBytes::new(py, path.as_bytes());
522 let mtime = mtime.map(|t| t.0).unwrap_or(-1);
535 let mtime = mtime.map(|t| t.0).unwrap_or(-1);
523 let tuple = (path, (b'd', 0, 0, mtime));
536 let tuple = (path, (b'd', 0, 0, mtime));
524 dirs.append(py, tuple.to_py_object(py).into_object())
537 dirs.append(py, tuple.to_py_object(py).into_object())
525 }
538 }
526 Ok(dirs)
539 Ok(dirs)
527 }
540 }
528
541
529 });
542 });
530
543
531 impl DirstateMap {
544 impl DirstateMap {
532 pub fn get_inner_mut<'a>(
545 pub fn get_inner_mut<'a>(
533 &'a self,
546 &'a self,
534 py: Python<'a>,
547 py: Python<'a>,
535 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
548 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
536 self.inner(py).borrow_mut()
549 self.inner(py).borrow_mut()
537 }
550 }
538 fn translate_key(
551 fn translate_key(
539 py: Python,
552 py: Python,
540 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
553 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
541 ) -> PyResult<Option<PyBytes>> {
554 ) -> PyResult<Option<PyBytes>> {
542 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
555 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
543 Ok(Some(PyBytes::new(py, f.as_bytes())))
556 Ok(Some(PyBytes::new(py, f.as_bytes())))
544 }
557 }
545 fn translate_key_value(
558 fn translate_key_value(
546 py: Python,
559 py: Python,
547 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
560 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
548 ) -> PyResult<Option<(PyBytes, PyObject)>> {
561 ) -> PyResult<Option<(PyBytes, PyObject)>> {
549 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
562 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
550 Ok(Some((
563 Ok(Some((
551 PyBytes::new(py, f.as_bytes()),
564 PyBytes::new(py, f.as_bytes()),
552 make_dirstate_tuple(py, &entry)?,
565 make_dirstate_tuple(py, &entry)?,
553 )))
566 )))
554 }
567 }
555 }
568 }
556
569
557 py_shared_iterator!(
570 py_shared_iterator!(
558 DirstateMapKeysIterator,
571 DirstateMapKeysIterator,
559 UnsafePyLeaked<StateMapIter<'static>>,
572 UnsafePyLeaked<StateMapIter<'static>>,
560 DirstateMap::translate_key,
573 DirstateMap::translate_key,
561 Option<PyBytes>
574 Option<PyBytes>
562 );
575 );
563
576
564 py_shared_iterator!(
577 py_shared_iterator!(
565 DirstateMapItemsIterator,
578 DirstateMapItemsIterator,
566 UnsafePyLeaked<StateMapIter<'static>>,
579 UnsafePyLeaked<StateMapIter<'static>>,
567 DirstateMap::translate_key_value,
580 DirstateMap::translate_key_value,
568 Option<(PyBytes, PyObject)>
581 Option<(PyBytes, PyObject)>
569 );
582 );
570
583
571 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
584 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
572 let bytes = obj.extract::<PyBytes>(py)?;
585 let bytes = obj.extract::<PyBytes>(py)?;
573 match bytes.data(py).try_into() {
586 match bytes.data(py).try_into() {
574 Ok(s) => Ok(s),
587 Ok(s) => Ok(s),
575 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
588 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
576 }
589 }
577 }
590 }
578
591
579 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
592 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
580 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
593 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
581 }
594 }
@@ -1,216 +1,222 b''
1 use crate::dirstate::owning::OwningDirstateMap;
1 use crate::dirstate::owning::OwningDirstateMap;
2 use hg::dirstate::parsers::Timestamp;
2 use hg::dirstate::parsers::Timestamp;
3 use hg::dirstate_tree::dispatch::DirstateMapMethods;
3 use hg::dirstate_tree::dispatch::DirstateMapMethods;
4 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
4 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
5 use hg::matchers::Matcher;
5 use hg::matchers::Matcher;
6 use hg::utils::hg_path::{HgPath, HgPathBuf};
6 use hg::utils::hg_path::{HgPath, HgPathBuf};
7 use hg::CopyMapIter;
7 use hg::CopyMapIter;
8 use hg::DirstateEntry;
8 use hg::DirstateEntry;
9 use hg::DirstateError;
9 use hg::DirstateError;
10 use hg::DirstateParents;
10 use hg::DirstateParents;
11 use hg::DirstateStatus;
11 use hg::DirstateStatus;
12 use hg::EntryState;
12 use hg::EntryState;
13 use hg::PatternFileWarning;
13 use hg::PatternFileWarning;
14 use hg::StateMapIter;
14 use hg::StateMapIter;
15 use hg::StatusError;
15 use hg::StatusError;
16 use hg::StatusOptions;
16 use hg::StatusOptions;
17 use std::path::PathBuf;
17 use std::path::PathBuf;
18
18
19 impl DirstateMapMethods for OwningDirstateMap {
19 impl DirstateMapMethods for OwningDirstateMap {
20 fn clear(&mut self) {
20 fn clear(&mut self) {
21 self.get_mut().clear()
21 self.get_mut().clear()
22 }
22 }
23
23
24 fn add_file(
24 fn add_file(
25 &mut self,
25 &mut self,
26 filename: &HgPath,
26 filename: &HgPath,
27 entry: DirstateEntry,
27 entry: DirstateEntry,
28 added: bool,
28 from_p2: bool,
29 from_p2: bool,
29 possibly_dirty: bool,
30 possibly_dirty: bool,
30 ) -> Result<(), DirstateError> {
31 ) -> Result<(), DirstateError> {
31 self.get_mut()
32 self.get_mut().add_file(
32 .add_file(filename, entry, from_p2, possibly_dirty)
33 filename,
34 entry,
35 added,
36 from_p2,
37 possibly_dirty,
38 )
33 }
39 }
34
40
35 fn remove_file(
41 fn remove_file(
36 &mut self,
42 &mut self,
37 filename: &HgPath,
43 filename: &HgPath,
38 in_merge: bool,
44 in_merge: bool,
39 ) -> Result<(), DirstateError> {
45 ) -> Result<(), DirstateError> {
40 self.get_mut().remove_file(filename, in_merge)
46 self.get_mut().remove_file(filename, in_merge)
41 }
47 }
42
48
43 fn drop_file(
49 fn drop_file(
44 &mut self,
50 &mut self,
45 filename: &HgPath,
51 filename: &HgPath,
46 old_state: EntryState,
52 old_state: EntryState,
47 ) -> Result<bool, DirstateError> {
53 ) -> Result<bool, DirstateError> {
48 self.get_mut().drop_file(filename, old_state)
54 self.get_mut().drop_file(filename, old_state)
49 }
55 }
50
56
51 fn clear_ambiguous_times(
57 fn clear_ambiguous_times(
52 &mut self,
58 &mut self,
53 filenames: Vec<HgPathBuf>,
59 filenames: Vec<HgPathBuf>,
54 now: i32,
60 now: i32,
55 ) -> Result<(), DirstateV2ParseError> {
61 ) -> Result<(), DirstateV2ParseError> {
56 self.get_mut().clear_ambiguous_times(filenames, now)
62 self.get_mut().clear_ambiguous_times(filenames, now)
57 }
63 }
58
64
59 fn non_normal_entries_contains(
65 fn non_normal_entries_contains(
60 &mut self,
66 &mut self,
61 key: &HgPath,
67 key: &HgPath,
62 ) -> Result<bool, DirstateV2ParseError> {
68 ) -> Result<bool, DirstateV2ParseError> {
63 self.get_mut().non_normal_entries_contains(key)
69 self.get_mut().non_normal_entries_contains(key)
64 }
70 }
65
71
66 fn non_normal_entries_remove(&mut self, key: &HgPath) {
72 fn non_normal_entries_remove(&mut self, key: &HgPath) {
67 self.get_mut().non_normal_entries_remove(key)
73 self.get_mut().non_normal_entries_remove(key)
68 }
74 }
69
75
70 fn non_normal_or_other_parent_paths(
76 fn non_normal_or_other_parent_paths(
71 &mut self,
77 &mut self,
72 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
78 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
73 {
79 {
74 self.get_mut().non_normal_or_other_parent_paths()
80 self.get_mut().non_normal_or_other_parent_paths()
75 }
81 }
76
82
77 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
83 fn set_non_normal_other_parent_entries(&mut self, force: bool) {
78 self.get_mut().set_non_normal_other_parent_entries(force)
84 self.get_mut().set_non_normal_other_parent_entries(force)
79 }
85 }
80
86
81 fn iter_non_normal_paths(
87 fn iter_non_normal_paths(
82 &mut self,
88 &mut self,
83 ) -> Box<
89 ) -> Box<
84 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
90 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
85 > {
91 > {
86 self.get_mut().iter_non_normal_paths()
92 self.get_mut().iter_non_normal_paths()
87 }
93 }
88
94
89 fn iter_non_normal_paths_panic(
95 fn iter_non_normal_paths_panic(
90 &self,
96 &self,
91 ) -> Box<
97 ) -> Box<
92 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
98 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
93 > {
99 > {
94 self.get().iter_non_normal_paths_panic()
100 self.get().iter_non_normal_paths_panic()
95 }
101 }
96
102
97 fn iter_other_parent_paths(
103 fn iter_other_parent_paths(
98 &mut self,
104 &mut self,
99 ) -> Box<
105 ) -> Box<
100 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
106 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
101 > {
107 > {
102 self.get_mut().iter_other_parent_paths()
108 self.get_mut().iter_other_parent_paths()
103 }
109 }
104
110
105 fn has_tracked_dir(
111 fn has_tracked_dir(
106 &mut self,
112 &mut self,
107 directory: &HgPath,
113 directory: &HgPath,
108 ) -> Result<bool, DirstateError> {
114 ) -> Result<bool, DirstateError> {
109 self.get_mut().has_tracked_dir(directory)
115 self.get_mut().has_tracked_dir(directory)
110 }
116 }
111
117
112 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
118 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
113 self.get_mut().has_dir(directory)
119 self.get_mut().has_dir(directory)
114 }
120 }
115
121
116 fn pack_v1(
122 fn pack_v1(
117 &mut self,
123 &mut self,
118 parents: DirstateParents,
124 parents: DirstateParents,
119 now: Timestamp,
125 now: Timestamp,
120 ) -> Result<Vec<u8>, DirstateError> {
126 ) -> Result<Vec<u8>, DirstateError> {
121 self.get_mut().pack_v1(parents, now)
127 self.get_mut().pack_v1(parents, now)
122 }
128 }
123
129
124 fn pack_v2(
130 fn pack_v2(
125 &mut self,
131 &mut self,
126 parents: DirstateParents,
132 parents: DirstateParents,
127 now: Timestamp,
133 now: Timestamp,
128 ) -> Result<Vec<u8>, DirstateError> {
134 ) -> Result<Vec<u8>, DirstateError> {
129 self.get_mut().pack_v2(parents, now)
135 self.get_mut().pack_v2(parents, now)
130 }
136 }
131
137
132 fn status<'a>(
138 fn status<'a>(
133 &'a mut self,
139 &'a mut self,
134 matcher: &'a (dyn Matcher + Sync),
140 matcher: &'a (dyn Matcher + Sync),
135 root_dir: PathBuf,
141 root_dir: PathBuf,
136 ignore_files: Vec<PathBuf>,
142 ignore_files: Vec<PathBuf>,
137 options: StatusOptions,
143 options: StatusOptions,
138 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
144 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
139 {
145 {
140 self.get_mut()
146 self.get_mut()
141 .status(matcher, root_dir, ignore_files, options)
147 .status(matcher, root_dir, ignore_files, options)
142 }
148 }
143
149
144 fn copy_map_len(&self) -> usize {
150 fn copy_map_len(&self) -> usize {
145 self.get().copy_map_len()
151 self.get().copy_map_len()
146 }
152 }
147
153
148 fn copy_map_iter(&self) -> CopyMapIter<'_> {
154 fn copy_map_iter(&self) -> CopyMapIter<'_> {
149 self.get().copy_map_iter()
155 self.get().copy_map_iter()
150 }
156 }
151
157
152 fn copy_map_contains_key(
158 fn copy_map_contains_key(
153 &self,
159 &self,
154 key: &HgPath,
160 key: &HgPath,
155 ) -> Result<bool, DirstateV2ParseError> {
161 ) -> Result<bool, DirstateV2ParseError> {
156 self.get().copy_map_contains_key(key)
162 self.get().copy_map_contains_key(key)
157 }
163 }
158
164
159 fn copy_map_get(
165 fn copy_map_get(
160 &self,
166 &self,
161 key: &HgPath,
167 key: &HgPath,
162 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
168 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
163 self.get().copy_map_get(key)
169 self.get().copy_map_get(key)
164 }
170 }
165
171
166 fn copy_map_remove(
172 fn copy_map_remove(
167 &mut self,
173 &mut self,
168 key: &HgPath,
174 key: &HgPath,
169 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
175 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
170 self.get_mut().copy_map_remove(key)
176 self.get_mut().copy_map_remove(key)
171 }
177 }
172
178
173 fn copy_map_insert(
179 fn copy_map_insert(
174 &mut self,
180 &mut self,
175 key: HgPathBuf,
181 key: HgPathBuf,
176 value: HgPathBuf,
182 value: HgPathBuf,
177 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
183 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
178 self.get_mut().copy_map_insert(key, value)
184 self.get_mut().copy_map_insert(key, value)
179 }
185 }
180
186
181 fn len(&self) -> usize {
187 fn len(&self) -> usize {
182 self.get().len()
188 self.get().len()
183 }
189 }
184
190
185 fn contains_key(
191 fn contains_key(
186 &self,
192 &self,
187 key: &HgPath,
193 key: &HgPath,
188 ) -> Result<bool, DirstateV2ParseError> {
194 ) -> Result<bool, DirstateV2ParseError> {
189 self.get().contains_key(key)
195 self.get().contains_key(key)
190 }
196 }
191
197
192 fn get(
198 fn get(
193 &self,
199 &self,
194 key: &HgPath,
200 key: &HgPath,
195 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
201 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
196 self.get().get(key)
202 self.get().get(key)
197 }
203 }
198
204
199 fn iter(&self) -> StateMapIter<'_> {
205 fn iter(&self) -> StateMapIter<'_> {
200 self.get().iter()
206 self.get().iter()
201 }
207 }
202
208
203 fn iter_directories(
209 fn iter_directories(
204 &self,
210 &self,
205 ) -> Box<
211 ) -> Box<
206 dyn Iterator<
212 dyn Iterator<
207 Item = Result<
213 Item = Result<
208 (&HgPath, Option<Timestamp>),
214 (&HgPath, Option<Timestamp>),
209 DirstateV2ParseError,
215 DirstateV2ParseError,
210 >,
216 >,
211 > + Send
217 > + Send
212 + '_,
218 + '_,
213 > {
219 > {
214 self.get().iter_directories()
220 self.get().iter_directories()
215 }
221 }
216 }
222 }
General Comments 0
You need to be logged in to leave comments. Login now