##// END OF EJS Templates
rust: Read dirstate from disk in DirstateMap constructor...
Simon Sapin -
r47892:9aba0cde default
parent child Browse files
Show More
@@ -1,1956 +1,1952
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 sparse,
29 sparse,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(
75 def __init__(
76 self, opener, ui, root, validate, sparsematchfn, nodeconstants
76 self, opener, ui, root, validate, sparsematchfn, nodeconstants
77 ):
77 ):
78 """Create a new dirstate object.
78 """Create a new dirstate object.
79
79
80 opener is an open()-like callable that can be used to open the
80 opener is an open()-like callable that can be used to open the
81 dirstate file; root is the root of the directory tracked by
81 dirstate file; root is the root of the directory tracked by
82 the dirstate.
82 the dirstate.
83 """
83 """
84 self._nodeconstants = nodeconstants
84 self._nodeconstants = nodeconstants
85 self._opener = opener
85 self._opener = opener
86 self._validate = validate
86 self._validate = validate
87 self._root = root
87 self._root = root
88 self._sparsematchfn = sparsematchfn
88 self._sparsematchfn = sparsematchfn
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
90 # UNC path pointing to root share (issue4557)
90 # UNC path pointing to root share (issue4557)
91 self._rootdir = pathutil.normasprefix(root)
91 self._rootdir = pathutil.normasprefix(root)
92 self._dirty = False
92 self._dirty = False
93 self._lastnormaltime = 0
93 self._lastnormaltime = 0
94 self._ui = ui
94 self._ui = ui
95 self._filecache = {}
95 self._filecache = {}
96 self._parentwriters = 0
96 self._parentwriters = 0
97 self._filename = b'dirstate'
97 self._filename = b'dirstate'
98 self._pendingfilename = b'%s.pending' % self._filename
98 self._pendingfilename = b'%s.pending' % self._filename
99 self._plchangecallbacks = {}
99 self._plchangecallbacks = {}
100 self._origpl = None
100 self._origpl = None
101 self._updatedfiles = set()
101 self._updatedfiles = set()
102 self._mapcls = dirstatemap
102 self._mapcls = dirstatemap
103 # Access and cache cwd early, so we don't access it for the first time
103 # Access and cache cwd early, so we don't access it for the first time
104 # after a working-copy update caused it to not exist (accessing it then
104 # after a working-copy update caused it to not exist (accessing it then
105 # raises an exception).
105 # raises an exception).
106 self._cwd
106 self._cwd
107
107
108 def prefetch_parents(self):
108 def prefetch_parents(self):
109 """make sure the parents are loaded
109 """make sure the parents are loaded
110
110
111 Used to avoid a race condition.
111 Used to avoid a race condition.
112 """
112 """
113 self._pl
113 self._pl
114
114
115 @contextlib.contextmanager
115 @contextlib.contextmanager
116 def parentchange(self):
116 def parentchange(self):
117 """Context manager for handling dirstate parents.
117 """Context manager for handling dirstate parents.
118
118
119 If an exception occurs in the scope of the context manager,
119 If an exception occurs in the scope of the context manager,
120 the incoherent dirstate won't be written when wlock is
120 the incoherent dirstate won't be written when wlock is
121 released.
121 released.
122 """
122 """
123 self._parentwriters += 1
123 self._parentwriters += 1
124 yield
124 yield
125 # Typically we want the "undo" step of a context manager in a
125 # Typically we want the "undo" step of a context manager in a
126 # finally block so it happens even when an exception
126 # finally block so it happens even when an exception
127 # occurs. In this case, however, we only want to decrement
127 # occurs. In this case, however, we only want to decrement
128 # parentwriters if the code in the with statement exits
128 # parentwriters if the code in the with statement exits
129 # normally, so we don't have a try/finally here on purpose.
129 # normally, so we don't have a try/finally here on purpose.
130 self._parentwriters -= 1
130 self._parentwriters -= 1
131
131
132 def pendingparentchange(self):
132 def pendingparentchange(self):
133 """Returns true if the dirstate is in the middle of a set of changes
133 """Returns true if the dirstate is in the middle of a set of changes
134 that modify the dirstate parent.
134 that modify the dirstate parent.
135 """
135 """
136 return self._parentwriters > 0
136 return self._parentwriters > 0
137
137
138 @propertycache
138 @propertycache
139 def _map(self):
139 def _map(self):
140 """Return the dirstate contents (see documentation for dirstatemap)."""
140 """Return the dirstate contents (see documentation for dirstatemap)."""
141 self._map = self._mapcls(
141 self._map = self._mapcls(
142 self._ui, self._opener, self._root, self._nodeconstants
142 self._ui, self._opener, self._root, self._nodeconstants
143 )
143 )
144 return self._map
144 return self._map
145
145
146 @property
146 @property
147 def _sparsematcher(self):
147 def _sparsematcher(self):
148 """The matcher for the sparse checkout.
148 """The matcher for the sparse checkout.
149
149
150 The working directory may not include every file from a manifest. The
150 The working directory may not include every file from a manifest. The
151 matcher obtained by this property will match a path if it is to be
151 matcher obtained by this property will match a path if it is to be
152 included in the working directory.
152 included in the working directory.
153 """
153 """
154 # TODO there is potential to cache this property. For now, the matcher
154 # TODO there is potential to cache this property. For now, the matcher
155 # is resolved on every access. (But the called function does use a
155 # is resolved on every access. (But the called function does use a
156 # cache to keep the lookup fast.)
156 # cache to keep the lookup fast.)
157 return self._sparsematchfn()
157 return self._sparsematchfn()
158
158
159 @repocache(b'branch')
159 @repocache(b'branch')
160 def _branch(self):
160 def _branch(self):
161 try:
161 try:
162 return self._opener.read(b"branch").strip() or b"default"
162 return self._opener.read(b"branch").strip() or b"default"
163 except IOError as inst:
163 except IOError as inst:
164 if inst.errno != errno.ENOENT:
164 if inst.errno != errno.ENOENT:
165 raise
165 raise
166 return b"default"
166 return b"default"
167
167
168 @property
168 @property
169 def _pl(self):
169 def _pl(self):
170 return self._map.parents()
170 return self._map.parents()
171
171
172 def hasdir(self, d):
172 def hasdir(self, d):
173 return self._map.hastrackeddir(d)
173 return self._map.hastrackeddir(d)
174
174
175 @rootcache(b'.hgignore')
175 @rootcache(b'.hgignore')
176 def _ignore(self):
176 def _ignore(self):
177 files = self._ignorefiles()
177 files = self._ignorefiles()
178 if not files:
178 if not files:
179 return matchmod.never()
179 return matchmod.never()
180
180
181 pats = [b'include:%s' % f for f in files]
181 pats = [b'include:%s' % f for f in files]
182 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
182 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
183
183
184 @propertycache
184 @propertycache
185 def _slash(self):
185 def _slash(self):
186 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
186 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
187
187
188 @propertycache
188 @propertycache
189 def _checklink(self):
189 def _checklink(self):
190 return util.checklink(self._root)
190 return util.checklink(self._root)
191
191
192 @propertycache
192 @propertycache
193 def _checkexec(self):
193 def _checkexec(self):
194 return bool(util.checkexec(self._root))
194 return bool(util.checkexec(self._root))
195
195
196 @propertycache
196 @propertycache
197 def _checkcase(self):
197 def _checkcase(self):
198 return not util.fscasesensitive(self._join(b'.hg'))
198 return not util.fscasesensitive(self._join(b'.hg'))
199
199
200 def _join(self, f):
200 def _join(self, f):
201 # much faster than os.path.join()
201 # much faster than os.path.join()
202 # it's safe because f is always a relative path
202 # it's safe because f is always a relative path
203 return self._rootdir + f
203 return self._rootdir + f
204
204
205 def flagfunc(self, buildfallback):
205 def flagfunc(self, buildfallback):
206 if self._checklink and self._checkexec:
206 if self._checklink and self._checkexec:
207
207
208 def f(x):
208 def f(x):
209 try:
209 try:
210 st = os.lstat(self._join(x))
210 st = os.lstat(self._join(x))
211 if util.statislink(st):
211 if util.statislink(st):
212 return b'l'
212 return b'l'
213 if util.statisexec(st):
213 if util.statisexec(st):
214 return b'x'
214 return b'x'
215 except OSError:
215 except OSError:
216 pass
216 pass
217 return b''
217 return b''
218
218
219 return f
219 return f
220
220
221 fallback = buildfallback()
221 fallback = buildfallback()
222 if self._checklink:
222 if self._checklink:
223
223
224 def f(x):
224 def f(x):
225 if os.path.islink(self._join(x)):
225 if os.path.islink(self._join(x)):
226 return b'l'
226 return b'l'
227 if b'x' in fallback(x):
227 if b'x' in fallback(x):
228 return b'x'
228 return b'x'
229 return b''
229 return b''
230
230
231 return f
231 return f
232 if self._checkexec:
232 if self._checkexec:
233
233
234 def f(x):
234 def f(x):
235 if b'l' in fallback(x):
235 if b'l' in fallback(x):
236 return b'l'
236 return b'l'
237 if util.isexec(self._join(x)):
237 if util.isexec(self._join(x)):
238 return b'x'
238 return b'x'
239 return b''
239 return b''
240
240
241 return f
241 return f
242 else:
242 else:
243 return fallback
243 return fallback
244
244
245 @propertycache
245 @propertycache
246 def _cwd(self):
246 def _cwd(self):
247 # internal config: ui.forcecwd
247 # internal config: ui.forcecwd
248 forcecwd = self._ui.config(b'ui', b'forcecwd')
248 forcecwd = self._ui.config(b'ui', b'forcecwd')
249 if forcecwd:
249 if forcecwd:
250 return forcecwd
250 return forcecwd
251 return encoding.getcwd()
251 return encoding.getcwd()
252
252
253 def getcwd(self):
253 def getcwd(self):
254 """Return the path from which a canonical path is calculated.
254 """Return the path from which a canonical path is calculated.
255
255
256 This path should be used to resolve file patterns or to convert
256 This path should be used to resolve file patterns or to convert
257 canonical paths back to file paths for display. It shouldn't be
257 canonical paths back to file paths for display. It shouldn't be
258 used to get real file paths. Use vfs functions instead.
258 used to get real file paths. Use vfs functions instead.
259 """
259 """
260 cwd = self._cwd
260 cwd = self._cwd
261 if cwd == self._root:
261 if cwd == self._root:
262 return b''
262 return b''
263 # self._root ends with a path separator if self._root is '/' or 'C:\'
263 # self._root ends with a path separator if self._root is '/' or 'C:\'
264 rootsep = self._root
264 rootsep = self._root
265 if not util.endswithsep(rootsep):
265 if not util.endswithsep(rootsep):
266 rootsep += pycompat.ossep
266 rootsep += pycompat.ossep
267 if cwd.startswith(rootsep):
267 if cwd.startswith(rootsep):
268 return cwd[len(rootsep) :]
268 return cwd[len(rootsep) :]
269 else:
269 else:
270 # we're outside the repo. return an absolute path.
270 # we're outside the repo. return an absolute path.
271 return cwd
271 return cwd
272
272
273 def pathto(self, f, cwd=None):
273 def pathto(self, f, cwd=None):
274 if cwd is None:
274 if cwd is None:
275 cwd = self.getcwd()
275 cwd = self.getcwd()
276 path = util.pathto(self._root, cwd, f)
276 path = util.pathto(self._root, cwd, f)
277 if self._slash:
277 if self._slash:
278 return util.pconvert(path)
278 return util.pconvert(path)
279 return path
279 return path
280
280
281 def __getitem__(self, key):
281 def __getitem__(self, key):
282 """Return the current state of key (a filename) in the dirstate.
282 """Return the current state of key (a filename) in the dirstate.
283
283
284 States are:
284 States are:
285 n normal
285 n normal
286 m needs merging
286 m needs merging
287 r marked for removal
287 r marked for removal
288 a marked for addition
288 a marked for addition
289 ? not tracked
289 ? not tracked
290 """
290 """
291 return self._map.get(key, (b"?",))[0]
291 return self._map.get(key, (b"?",))[0]
292
292
293 def __contains__(self, key):
293 def __contains__(self, key):
294 return key in self._map
294 return key in self._map
295
295
296 def __iter__(self):
296 def __iter__(self):
297 return iter(sorted(self._map))
297 return iter(sorted(self._map))
298
298
299 def items(self):
299 def items(self):
300 return pycompat.iteritems(self._map)
300 return pycompat.iteritems(self._map)
301
301
302 iteritems = items
302 iteritems = items
303
303
304 def parents(self):
304 def parents(self):
305 return [self._validate(p) for p in self._pl]
305 return [self._validate(p) for p in self._pl]
306
306
307 def p1(self):
307 def p1(self):
308 return self._validate(self._pl[0])
308 return self._validate(self._pl[0])
309
309
310 def p2(self):
310 def p2(self):
311 return self._validate(self._pl[1])
311 return self._validate(self._pl[1])
312
312
313 def branch(self):
313 def branch(self):
314 return encoding.tolocal(self._branch)
314 return encoding.tolocal(self._branch)
315
315
316 def setparents(self, p1, p2=None):
316 def setparents(self, p1, p2=None):
317 """Set dirstate parents to p1 and p2.
317 """Set dirstate parents to p1 and p2.
318
318
319 When moving from two parents to one, 'm' merged entries a
319 When moving from two parents to one, 'm' merged entries a
320 adjusted to normal and previous copy records discarded and
320 adjusted to normal and previous copy records discarded and
321 returned by the call.
321 returned by the call.
322
322
323 See localrepo.setparents()
323 See localrepo.setparents()
324 """
324 """
325 if p2 is None:
325 if p2 is None:
326 p2 = self._nodeconstants.nullid
326 p2 = self._nodeconstants.nullid
327 if self._parentwriters == 0:
327 if self._parentwriters == 0:
328 raise ValueError(
328 raise ValueError(
329 b"cannot set dirstate parent outside of "
329 b"cannot set dirstate parent outside of "
330 b"dirstate.parentchange context manager"
330 b"dirstate.parentchange context manager"
331 )
331 )
332
332
333 self._dirty = True
333 self._dirty = True
334 oldp2 = self._pl[1]
334 oldp2 = self._pl[1]
335 if self._origpl is None:
335 if self._origpl is None:
336 self._origpl = self._pl
336 self._origpl = self._pl
337 self._map.setparents(p1, p2)
337 self._map.setparents(p1, p2)
338 copies = {}
338 copies = {}
339 if (
339 if (
340 oldp2 != self._nodeconstants.nullid
340 oldp2 != self._nodeconstants.nullid
341 and p2 == self._nodeconstants.nullid
341 and p2 == self._nodeconstants.nullid
342 ):
342 ):
343 candidatefiles = self._map.non_normal_or_other_parent_paths()
343 candidatefiles = self._map.non_normal_or_other_parent_paths()
344
344
345 for f in candidatefiles:
345 for f in candidatefiles:
346 s = self._map.get(f)
346 s = self._map.get(f)
347 if s is None:
347 if s is None:
348 continue
348 continue
349
349
350 # Discard 'm' markers when moving away from a merge state
350 # Discard 'm' markers when moving away from a merge state
351 if s[0] == b'm':
351 if s[0] == b'm':
352 source = self._map.copymap.get(f)
352 source = self._map.copymap.get(f)
353 if source:
353 if source:
354 copies[f] = source
354 copies[f] = source
355 self.normallookup(f)
355 self.normallookup(f)
356 # Also fix up otherparent markers
356 # Also fix up otherparent markers
357 elif s[0] == b'n' and s[2] == -2:
357 elif s[0] == b'n' and s[2] == -2:
358 source = self._map.copymap.get(f)
358 source = self._map.copymap.get(f)
359 if source:
359 if source:
360 copies[f] = source
360 copies[f] = source
361 self.add(f)
361 self.add(f)
362 return copies
362 return copies
363
363
364 def setbranch(self, branch):
364 def setbranch(self, branch):
365 self.__class__._branch.set(self, encoding.fromlocal(branch))
365 self.__class__._branch.set(self, encoding.fromlocal(branch))
366 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
366 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
367 try:
367 try:
368 f.write(self._branch + b'\n')
368 f.write(self._branch + b'\n')
369 f.close()
369 f.close()
370
370
371 # make sure filecache has the correct stat info for _branch after
371 # make sure filecache has the correct stat info for _branch after
372 # replacing the underlying file
372 # replacing the underlying file
373 ce = self._filecache[b'_branch']
373 ce = self._filecache[b'_branch']
374 if ce:
374 if ce:
375 ce.refresh()
375 ce.refresh()
376 except: # re-raises
376 except: # re-raises
377 f.discard()
377 f.discard()
378 raise
378 raise
379
379
380 def invalidate(self):
380 def invalidate(self):
381 """Causes the next access to reread the dirstate.
381 """Causes the next access to reread the dirstate.
382
382
383 This is different from localrepo.invalidatedirstate() because it always
383 This is different from localrepo.invalidatedirstate() because it always
384 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
384 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
385 check whether the dirstate has changed before rereading it."""
385 check whether the dirstate has changed before rereading it."""
386
386
387 for a in ("_map", "_branch", "_ignore"):
387 for a in ("_map", "_branch", "_ignore"):
388 if a in self.__dict__:
388 if a in self.__dict__:
389 delattr(self, a)
389 delattr(self, a)
390 self._lastnormaltime = 0
390 self._lastnormaltime = 0
391 self._dirty = False
391 self._dirty = False
392 self._updatedfiles.clear()
392 self._updatedfiles.clear()
393 self._parentwriters = 0
393 self._parentwriters = 0
394 self._origpl = None
394 self._origpl = None
395
395
396 def copy(self, source, dest):
396 def copy(self, source, dest):
397 """Mark dest as a copy of source. Unmark dest if source is None."""
397 """Mark dest as a copy of source. Unmark dest if source is None."""
398 if source == dest:
398 if source == dest:
399 return
399 return
400 self._dirty = True
400 self._dirty = True
401 if source is not None:
401 if source is not None:
402 self._map.copymap[dest] = source
402 self._map.copymap[dest] = source
403 self._updatedfiles.add(source)
403 self._updatedfiles.add(source)
404 self._updatedfiles.add(dest)
404 self._updatedfiles.add(dest)
405 elif self._map.copymap.pop(dest, None):
405 elif self._map.copymap.pop(dest, None):
406 self._updatedfiles.add(dest)
406 self._updatedfiles.add(dest)
407
407
408 def copied(self, file):
408 def copied(self, file):
409 return self._map.copymap.get(file, None)
409 return self._map.copymap.get(file, None)
410
410
411 def copies(self):
411 def copies(self):
412 return self._map.copymap
412 return self._map.copymap
413
413
414 def _addpath(self, f, state, mode, size, mtime):
414 def _addpath(self, f, state, mode, size, mtime):
415 oldstate = self[f]
415 oldstate = self[f]
416 if state == b'a' or oldstate == b'r':
416 if state == b'a' or oldstate == b'r':
417 scmutil.checkfilename(f)
417 scmutil.checkfilename(f)
418 if self._map.hastrackeddir(f):
418 if self._map.hastrackeddir(f):
419 raise error.Abort(
419 raise error.Abort(
420 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
420 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
421 )
421 )
422 # shadows
422 # shadows
423 for d in pathutil.finddirs(f):
423 for d in pathutil.finddirs(f):
424 if self._map.hastrackeddir(d):
424 if self._map.hastrackeddir(d):
425 break
425 break
426 entry = self._map.get(d)
426 entry = self._map.get(d)
427 if entry is not None and entry[0] != b'r':
427 if entry is not None and entry[0] != b'r':
428 raise error.Abort(
428 raise error.Abort(
429 _(b'file %r in dirstate clashes with %r')
429 _(b'file %r in dirstate clashes with %r')
430 % (pycompat.bytestr(d), pycompat.bytestr(f))
430 % (pycompat.bytestr(d), pycompat.bytestr(f))
431 )
431 )
432 self._dirty = True
432 self._dirty = True
433 self._updatedfiles.add(f)
433 self._updatedfiles.add(f)
434 self._map.addfile(f, oldstate, state, mode, size, mtime)
434 self._map.addfile(f, oldstate, state, mode, size, mtime)
435
435
436 def normal(self, f, parentfiledata=None):
436 def normal(self, f, parentfiledata=None):
437 """Mark a file normal and clean.
437 """Mark a file normal and clean.
438
438
439 parentfiledata: (mode, size, mtime) of the clean file
439 parentfiledata: (mode, size, mtime) of the clean file
440
440
441 parentfiledata should be computed from memory (for mode,
441 parentfiledata should be computed from memory (for mode,
442 size), as or close as possible from the point where we
442 size), as or close as possible from the point where we
443 determined the file was clean, to limit the risk of the
443 determined the file was clean, to limit the risk of the
444 file having been changed by an external process between the
444 file having been changed by an external process between the
445 moment where the file was determined to be clean and now."""
445 moment where the file was determined to be clean and now."""
446 if parentfiledata:
446 if parentfiledata:
447 (mode, size, mtime) = parentfiledata
447 (mode, size, mtime) = parentfiledata
448 else:
448 else:
449 s = os.lstat(self._join(f))
449 s = os.lstat(self._join(f))
450 mode = s.st_mode
450 mode = s.st_mode
451 size = s.st_size
451 size = s.st_size
452 mtime = s[stat.ST_MTIME]
452 mtime = s[stat.ST_MTIME]
453 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
453 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
454 self._map.copymap.pop(f, None)
454 self._map.copymap.pop(f, None)
455 if f in self._map.nonnormalset:
455 if f in self._map.nonnormalset:
456 self._map.nonnormalset.remove(f)
456 self._map.nonnormalset.remove(f)
457 if mtime > self._lastnormaltime:
457 if mtime > self._lastnormaltime:
458 # Remember the most recent modification timeslot for status(),
458 # Remember the most recent modification timeslot for status(),
459 # to make sure we won't miss future size-preserving file content
459 # to make sure we won't miss future size-preserving file content
460 # modifications that happen within the same timeslot.
460 # modifications that happen within the same timeslot.
461 self._lastnormaltime = mtime
461 self._lastnormaltime = mtime
462
462
463 def normallookup(self, f):
463 def normallookup(self, f):
464 '''Mark a file normal, but possibly dirty.'''
464 '''Mark a file normal, but possibly dirty.'''
465 if self._pl[1] != self._nodeconstants.nullid:
465 if self._pl[1] != self._nodeconstants.nullid:
466 # if there is a merge going on and the file was either
466 # if there is a merge going on and the file was either
467 # in state 'm' (-1) or coming from other parent (-2) before
467 # in state 'm' (-1) or coming from other parent (-2) before
468 # being removed, restore that state.
468 # being removed, restore that state.
469 entry = self._map.get(f)
469 entry = self._map.get(f)
470 if entry is not None:
470 if entry is not None:
471 if entry[0] == b'r' and entry[2] in (-1, -2):
471 if entry[0] == b'r' and entry[2] in (-1, -2):
472 source = self._map.copymap.get(f)
472 source = self._map.copymap.get(f)
473 if entry[2] == -1:
473 if entry[2] == -1:
474 self.merge(f)
474 self.merge(f)
475 elif entry[2] == -2:
475 elif entry[2] == -2:
476 self.otherparent(f)
476 self.otherparent(f)
477 if source:
477 if source:
478 self.copy(source, f)
478 self.copy(source, f)
479 return
479 return
480 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
480 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
481 return
481 return
482 self._addpath(f, b'n', 0, -1, -1)
482 self._addpath(f, b'n', 0, -1, -1)
483 self._map.copymap.pop(f, None)
483 self._map.copymap.pop(f, None)
484
484
485 def otherparent(self, f):
485 def otherparent(self, f):
486 '''Mark as coming from the other parent, always dirty.'''
486 '''Mark as coming from the other parent, always dirty.'''
487 if self._pl[1] == self._nodeconstants.nullid:
487 if self._pl[1] == self._nodeconstants.nullid:
488 raise error.Abort(
488 raise error.Abort(
489 _(b"setting %r to other parent only allowed in merges") % f
489 _(b"setting %r to other parent only allowed in merges") % f
490 )
490 )
491 if f in self and self[f] == b'n':
491 if f in self and self[f] == b'n':
492 # merge-like
492 # merge-like
493 self._addpath(f, b'm', 0, -2, -1)
493 self._addpath(f, b'm', 0, -2, -1)
494 else:
494 else:
495 # add-like
495 # add-like
496 self._addpath(f, b'n', 0, -2, -1)
496 self._addpath(f, b'n', 0, -2, -1)
497 self._map.copymap.pop(f, None)
497 self._map.copymap.pop(f, None)
498
498
499 def add(self, f):
499 def add(self, f):
500 '''Mark a file added.'''
500 '''Mark a file added.'''
501 self._addpath(f, b'a', 0, -1, -1)
501 self._addpath(f, b'a', 0, -1, -1)
502 self._map.copymap.pop(f, None)
502 self._map.copymap.pop(f, None)
503
503
504 def remove(self, f):
504 def remove(self, f):
505 '''Mark a file removed.'''
505 '''Mark a file removed.'''
506 self._dirty = True
506 self._dirty = True
507 oldstate = self[f]
507 oldstate = self[f]
508 size = 0
508 size = 0
509 if self._pl[1] != self._nodeconstants.nullid:
509 if self._pl[1] != self._nodeconstants.nullid:
510 entry = self._map.get(f)
510 entry = self._map.get(f)
511 if entry is not None:
511 if entry is not None:
512 # backup the previous state
512 # backup the previous state
513 if entry[0] == b'm': # merge
513 if entry[0] == b'm': # merge
514 size = -1
514 size = -1
515 elif entry[0] == b'n' and entry[2] == -2: # other parent
515 elif entry[0] == b'n' and entry[2] == -2: # other parent
516 size = -2
516 size = -2
517 self._map.otherparentset.add(f)
517 self._map.otherparentset.add(f)
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.removefile(f, oldstate, size)
519 self._map.removefile(f, oldstate, size)
520 if size == 0:
520 if size == 0:
521 self._map.copymap.pop(f, None)
521 self._map.copymap.pop(f, None)
522
522
523 def merge(self, f):
523 def merge(self, f):
524 '''Mark a file merged.'''
524 '''Mark a file merged.'''
525 if self._pl[1] == self._nodeconstants.nullid:
525 if self._pl[1] == self._nodeconstants.nullid:
526 return self.normallookup(f)
526 return self.normallookup(f)
527 return self.otherparent(f)
527 return self.otherparent(f)
528
528
529 def drop(self, f):
529 def drop(self, f):
530 '''Drop a file from the dirstate'''
530 '''Drop a file from the dirstate'''
531 oldstate = self[f]
531 oldstate = self[f]
532 if self._map.dropfile(f, oldstate):
532 if self._map.dropfile(f, oldstate):
533 self._dirty = True
533 self._dirty = True
534 self._updatedfiles.add(f)
534 self._updatedfiles.add(f)
535 self._map.copymap.pop(f, None)
535 self._map.copymap.pop(f, None)
536
536
537 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
537 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
538 if exists is None:
538 if exists is None:
539 exists = os.path.lexists(os.path.join(self._root, path))
539 exists = os.path.lexists(os.path.join(self._root, path))
540 if not exists:
540 if not exists:
541 # Maybe a path component exists
541 # Maybe a path component exists
542 if not ignoremissing and b'/' in path:
542 if not ignoremissing and b'/' in path:
543 d, f = path.rsplit(b'/', 1)
543 d, f = path.rsplit(b'/', 1)
544 d = self._normalize(d, False, ignoremissing, None)
544 d = self._normalize(d, False, ignoremissing, None)
545 folded = d + b"/" + f
545 folded = d + b"/" + f
546 else:
546 else:
547 # No path components, preserve original case
547 # No path components, preserve original case
548 folded = path
548 folded = path
549 else:
549 else:
550 # recursively normalize leading directory components
550 # recursively normalize leading directory components
551 # against dirstate
551 # against dirstate
552 if b'/' in normed:
552 if b'/' in normed:
553 d, f = normed.rsplit(b'/', 1)
553 d, f = normed.rsplit(b'/', 1)
554 d = self._normalize(d, False, ignoremissing, True)
554 d = self._normalize(d, False, ignoremissing, True)
555 r = self._root + b"/" + d
555 r = self._root + b"/" + d
556 folded = d + b"/" + util.fspath(f, r)
556 folded = d + b"/" + util.fspath(f, r)
557 else:
557 else:
558 folded = util.fspath(normed, self._root)
558 folded = util.fspath(normed, self._root)
559 storemap[normed] = folded
559 storemap[normed] = folded
560
560
561 return folded
561 return folded
562
562
563 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
563 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
564 normed = util.normcase(path)
564 normed = util.normcase(path)
565 folded = self._map.filefoldmap.get(normed, None)
565 folded = self._map.filefoldmap.get(normed, None)
566 if folded is None:
566 if folded is None:
567 if isknown:
567 if isknown:
568 folded = path
568 folded = path
569 else:
569 else:
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.filefoldmap
571 path, normed, ignoremissing, exists, self._map.filefoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
575 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
576 normed = util.normcase(path)
576 normed = util.normcase(path)
577 folded = self._map.filefoldmap.get(normed, None)
577 folded = self._map.filefoldmap.get(normed, None)
578 if folded is None:
578 if folded is None:
579 folded = self._map.dirfoldmap.get(normed, None)
579 folded = self._map.dirfoldmap.get(normed, None)
580 if folded is None:
580 if folded is None:
581 if isknown:
581 if isknown:
582 folded = path
582 folded = path
583 else:
583 else:
584 # store discovered result in dirfoldmap so that future
584 # store discovered result in dirfoldmap so that future
585 # normalizefile calls don't start matching directories
585 # normalizefile calls don't start matching directories
586 folded = self._discoverpath(
586 folded = self._discoverpath(
587 path, normed, ignoremissing, exists, self._map.dirfoldmap
587 path, normed, ignoremissing, exists, self._map.dirfoldmap
588 )
588 )
589 return folded
589 return folded
590
590
591 def normalize(self, path, isknown=False, ignoremissing=False):
591 def normalize(self, path, isknown=False, ignoremissing=False):
592 """
592 """
593 normalize the case of a pathname when on a casefolding filesystem
593 normalize the case of a pathname when on a casefolding filesystem
594
594
595 isknown specifies whether the filename came from walking the
595 isknown specifies whether the filename came from walking the
596 disk, to avoid extra filesystem access.
596 disk, to avoid extra filesystem access.
597
597
598 If ignoremissing is True, missing path are returned
598 If ignoremissing is True, missing path are returned
599 unchanged. Otherwise, we try harder to normalize possibly
599 unchanged. Otherwise, we try harder to normalize possibly
600 existing path components.
600 existing path components.
601
601
602 The normalized case is determined based on the following precedence:
602 The normalized case is determined based on the following precedence:
603
603
604 - version of name already stored in the dirstate
604 - version of name already stored in the dirstate
605 - version of name stored on disk
605 - version of name stored on disk
606 - version provided via command arguments
606 - version provided via command arguments
607 """
607 """
608
608
609 if self._checkcase:
609 if self._checkcase:
610 return self._normalize(path, isknown, ignoremissing)
610 return self._normalize(path, isknown, ignoremissing)
611 return path
611 return path
612
612
613 def clear(self):
613 def clear(self):
614 self._map.clear()
614 self._map.clear()
615 self._lastnormaltime = 0
615 self._lastnormaltime = 0
616 self._updatedfiles.clear()
616 self._updatedfiles.clear()
617 self._dirty = True
617 self._dirty = True
618
618
619 def rebuild(self, parent, allfiles, changedfiles=None):
619 def rebuild(self, parent, allfiles, changedfiles=None):
620 if changedfiles is None:
620 if changedfiles is None:
621 # Rebuild entire dirstate
621 # Rebuild entire dirstate
622 to_lookup = allfiles
622 to_lookup = allfiles
623 to_drop = []
623 to_drop = []
624 lastnormaltime = self._lastnormaltime
624 lastnormaltime = self._lastnormaltime
625 self.clear()
625 self.clear()
626 self._lastnormaltime = lastnormaltime
626 self._lastnormaltime = lastnormaltime
627 elif len(changedfiles) < 10:
627 elif len(changedfiles) < 10:
628 # Avoid turning allfiles into a set, which can be expensive if it's
628 # Avoid turning allfiles into a set, which can be expensive if it's
629 # large.
629 # large.
630 to_lookup = []
630 to_lookup = []
631 to_drop = []
631 to_drop = []
632 for f in changedfiles:
632 for f in changedfiles:
633 if f in allfiles:
633 if f in allfiles:
634 to_lookup.append(f)
634 to_lookup.append(f)
635 else:
635 else:
636 to_drop.append(f)
636 to_drop.append(f)
637 else:
637 else:
638 changedfilesset = set(changedfiles)
638 changedfilesset = set(changedfiles)
639 to_lookup = changedfilesset & set(allfiles)
639 to_lookup = changedfilesset & set(allfiles)
640 to_drop = changedfilesset - to_lookup
640 to_drop = changedfilesset - to_lookup
641
641
642 if self._origpl is None:
642 if self._origpl is None:
643 self._origpl = self._pl
643 self._origpl = self._pl
644 self._map.setparents(parent, self._nodeconstants.nullid)
644 self._map.setparents(parent, self._nodeconstants.nullid)
645
645
646 for f in to_lookup:
646 for f in to_lookup:
647 self.normallookup(f)
647 self.normallookup(f)
648 for f in to_drop:
648 for f in to_drop:
649 self.drop(f)
649 self.drop(f)
650
650
651 self._dirty = True
651 self._dirty = True
652
652
653 def identity(self):
653 def identity(self):
654 """Return identity of dirstate itself to detect changing in storage
654 """Return identity of dirstate itself to detect changing in storage
655
655
656 If identity of previous dirstate is equal to this, writing
656 If identity of previous dirstate is equal to this, writing
657 changes based on the former dirstate out can keep consistency.
657 changes based on the former dirstate out can keep consistency.
658 """
658 """
659 return self._map.identity
659 return self._map.identity
660
660
661 def write(self, tr):
661 def write(self, tr):
662 if not self._dirty:
662 if not self._dirty:
663 return
663 return
664
664
665 filename = self._filename
665 filename = self._filename
666 if tr:
666 if tr:
667 # 'dirstate.write()' is not only for writing in-memory
667 # 'dirstate.write()' is not only for writing in-memory
668 # changes out, but also for dropping ambiguous timestamp.
668 # changes out, but also for dropping ambiguous timestamp.
669 # delayed writing re-raise "ambiguous timestamp issue".
669 # delayed writing re-raise "ambiguous timestamp issue".
670 # See also the wiki page below for detail:
670 # See also the wiki page below for detail:
671 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
671 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
672
672
673 # emulate dropping timestamp in 'parsers.pack_dirstate'
673 # emulate dropping timestamp in 'parsers.pack_dirstate'
674 now = _getfsnow(self._opener)
674 now = _getfsnow(self._opener)
675 self._map.clearambiguoustimes(self._updatedfiles, now)
675 self._map.clearambiguoustimes(self._updatedfiles, now)
676
676
677 # emulate that all 'dirstate.normal' results are written out
677 # emulate that all 'dirstate.normal' results are written out
678 self._lastnormaltime = 0
678 self._lastnormaltime = 0
679 self._updatedfiles.clear()
679 self._updatedfiles.clear()
680
680
681 # delay writing in-memory changes out
681 # delay writing in-memory changes out
682 tr.addfilegenerator(
682 tr.addfilegenerator(
683 b'dirstate',
683 b'dirstate',
684 (self._filename,),
684 (self._filename,),
685 self._writedirstate,
685 self._writedirstate,
686 location=b'plain',
686 location=b'plain',
687 )
687 )
688 return
688 return
689
689
690 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
690 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
691 self._writedirstate(st)
691 self._writedirstate(st)
692
692
693 def addparentchangecallback(self, category, callback):
693 def addparentchangecallback(self, category, callback):
694 """add a callback to be called when the wd parents are changed
694 """add a callback to be called when the wd parents are changed
695
695
696 Callback will be called with the following arguments:
696 Callback will be called with the following arguments:
697 dirstate, (oldp1, oldp2), (newp1, newp2)
697 dirstate, (oldp1, oldp2), (newp1, newp2)
698
698
699 Category is a unique identifier to allow overwriting an old callback
699 Category is a unique identifier to allow overwriting an old callback
700 with a newer callback.
700 with a newer callback.
701 """
701 """
702 self._plchangecallbacks[category] = callback
702 self._plchangecallbacks[category] = callback
703
703
704 def _writedirstate(self, st):
704 def _writedirstate(self, st):
705 # notify callbacks about parents change
705 # notify callbacks about parents change
706 if self._origpl is not None and self._origpl != self._pl:
706 if self._origpl is not None and self._origpl != self._pl:
707 for c, callback in sorted(
707 for c, callback in sorted(
708 pycompat.iteritems(self._plchangecallbacks)
708 pycompat.iteritems(self._plchangecallbacks)
709 ):
709 ):
710 callback(self, self._origpl, self._pl)
710 callback(self, self._origpl, self._pl)
711 self._origpl = None
711 self._origpl = None
712 # use the modification time of the newly created temporary file as the
712 # use the modification time of the newly created temporary file as the
713 # filesystem's notion of 'now'
713 # filesystem's notion of 'now'
714 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
714 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
715
715
716 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
716 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
717 # timestamp of each entries in dirstate, because of 'now > mtime'
717 # timestamp of each entries in dirstate, because of 'now > mtime'
718 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
718 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
719 if delaywrite > 0:
719 if delaywrite > 0:
720 # do we have any files to delay for?
720 # do we have any files to delay for?
721 for f, e in pycompat.iteritems(self._map):
721 for f, e in pycompat.iteritems(self._map):
722 if e[0] == b'n' and e[3] == now:
722 if e[0] == b'n' and e[3] == now:
723 import time # to avoid useless import
723 import time # to avoid useless import
724
724
725 # rather than sleep n seconds, sleep until the next
725 # rather than sleep n seconds, sleep until the next
726 # multiple of n seconds
726 # multiple of n seconds
727 clock = time.time()
727 clock = time.time()
728 start = int(clock) - (int(clock) % delaywrite)
728 start = int(clock) - (int(clock) % delaywrite)
729 end = start + delaywrite
729 end = start + delaywrite
730 time.sleep(end - clock)
730 time.sleep(end - clock)
731 now = end # trust our estimate that the end is near now
731 now = end # trust our estimate that the end is near now
732 break
732 break
733
733
734 self._map.write(st, now)
734 self._map.write(st, now)
735 self._lastnormaltime = 0
735 self._lastnormaltime = 0
736 self._dirty = False
736 self._dirty = False
737
737
738 def _dirignore(self, f):
738 def _dirignore(self, f):
739 if self._ignore(f):
739 if self._ignore(f):
740 return True
740 return True
741 for p in pathutil.finddirs(f):
741 for p in pathutil.finddirs(f):
742 if self._ignore(p):
742 if self._ignore(p):
743 return True
743 return True
744 return False
744 return False
745
745
746 def _ignorefiles(self):
746 def _ignorefiles(self):
747 files = []
747 files = []
748 if os.path.exists(self._join(b'.hgignore')):
748 if os.path.exists(self._join(b'.hgignore')):
749 files.append(self._join(b'.hgignore'))
749 files.append(self._join(b'.hgignore'))
750 for name, path in self._ui.configitems(b"ui"):
750 for name, path in self._ui.configitems(b"ui"):
751 if name == b'ignore' or name.startswith(b'ignore.'):
751 if name == b'ignore' or name.startswith(b'ignore.'):
752 # we need to use os.path.join here rather than self._join
752 # we need to use os.path.join here rather than self._join
753 # because path is arbitrary and user-specified
753 # because path is arbitrary and user-specified
754 files.append(os.path.join(self._rootdir, util.expandpath(path)))
754 files.append(os.path.join(self._rootdir, util.expandpath(path)))
755 return files
755 return files
756
756
757 def _ignorefileandline(self, f):
757 def _ignorefileandline(self, f):
758 files = collections.deque(self._ignorefiles())
758 files = collections.deque(self._ignorefiles())
759 visited = set()
759 visited = set()
760 while files:
760 while files:
761 i = files.popleft()
761 i = files.popleft()
762 patterns = matchmod.readpatternfile(
762 patterns = matchmod.readpatternfile(
763 i, self._ui.warn, sourceinfo=True
763 i, self._ui.warn, sourceinfo=True
764 )
764 )
765 for pattern, lineno, line in patterns:
765 for pattern, lineno, line in patterns:
766 kind, p = matchmod._patsplit(pattern, b'glob')
766 kind, p = matchmod._patsplit(pattern, b'glob')
767 if kind == b"subinclude":
767 if kind == b"subinclude":
768 if p not in visited:
768 if p not in visited:
769 files.append(p)
769 files.append(p)
770 continue
770 continue
771 m = matchmod.match(
771 m = matchmod.match(
772 self._root, b'', [], [pattern], warn=self._ui.warn
772 self._root, b'', [], [pattern], warn=self._ui.warn
773 )
773 )
774 if m(f):
774 if m(f):
775 return (i, lineno, line)
775 return (i, lineno, line)
776 visited.add(i)
776 visited.add(i)
777 return (None, -1, b"")
777 return (None, -1, b"")
778
778
779 def _walkexplicit(self, match, subrepos):
779 def _walkexplicit(self, match, subrepos):
780 """Get stat data about the files explicitly specified by match.
780 """Get stat data about the files explicitly specified by match.
781
781
782 Return a triple (results, dirsfound, dirsnotfound).
782 Return a triple (results, dirsfound, dirsnotfound).
783 - results is a mapping from filename to stat result. It also contains
783 - results is a mapping from filename to stat result. It also contains
784 listings mapping subrepos and .hg to None.
784 listings mapping subrepos and .hg to None.
785 - dirsfound is a list of files found to be directories.
785 - dirsfound is a list of files found to be directories.
786 - dirsnotfound is a list of files that the dirstate thinks are
786 - dirsnotfound is a list of files that the dirstate thinks are
787 directories and that were not found."""
787 directories and that were not found."""
788
788
789 def badtype(mode):
789 def badtype(mode):
790 kind = _(b'unknown')
790 kind = _(b'unknown')
791 if stat.S_ISCHR(mode):
791 if stat.S_ISCHR(mode):
792 kind = _(b'character device')
792 kind = _(b'character device')
793 elif stat.S_ISBLK(mode):
793 elif stat.S_ISBLK(mode):
794 kind = _(b'block device')
794 kind = _(b'block device')
795 elif stat.S_ISFIFO(mode):
795 elif stat.S_ISFIFO(mode):
796 kind = _(b'fifo')
796 kind = _(b'fifo')
797 elif stat.S_ISSOCK(mode):
797 elif stat.S_ISSOCK(mode):
798 kind = _(b'socket')
798 kind = _(b'socket')
799 elif stat.S_ISDIR(mode):
799 elif stat.S_ISDIR(mode):
800 kind = _(b'directory')
800 kind = _(b'directory')
801 return _(b'unsupported file type (type is %s)') % kind
801 return _(b'unsupported file type (type is %s)') % kind
802
802
803 badfn = match.bad
803 badfn = match.bad
804 dmap = self._map
804 dmap = self._map
805 lstat = os.lstat
805 lstat = os.lstat
806 getkind = stat.S_IFMT
806 getkind = stat.S_IFMT
807 dirkind = stat.S_IFDIR
807 dirkind = stat.S_IFDIR
808 regkind = stat.S_IFREG
808 regkind = stat.S_IFREG
809 lnkkind = stat.S_IFLNK
809 lnkkind = stat.S_IFLNK
810 join = self._join
810 join = self._join
811 dirsfound = []
811 dirsfound = []
812 foundadd = dirsfound.append
812 foundadd = dirsfound.append
813 dirsnotfound = []
813 dirsnotfound = []
814 notfoundadd = dirsnotfound.append
814 notfoundadd = dirsnotfound.append
815
815
816 if not match.isexact() and self._checkcase:
816 if not match.isexact() and self._checkcase:
817 normalize = self._normalize
817 normalize = self._normalize
818 else:
818 else:
819 normalize = None
819 normalize = None
820
820
821 files = sorted(match.files())
821 files = sorted(match.files())
822 subrepos.sort()
822 subrepos.sort()
823 i, j = 0, 0
823 i, j = 0, 0
824 while i < len(files) and j < len(subrepos):
824 while i < len(files) and j < len(subrepos):
825 subpath = subrepos[j] + b"/"
825 subpath = subrepos[j] + b"/"
826 if files[i] < subpath:
826 if files[i] < subpath:
827 i += 1
827 i += 1
828 continue
828 continue
829 while i < len(files) and files[i].startswith(subpath):
829 while i < len(files) and files[i].startswith(subpath):
830 del files[i]
830 del files[i]
831 j += 1
831 j += 1
832
832
833 if not files or b'' in files:
833 if not files or b'' in files:
834 files = [b'']
834 files = [b'']
835 # constructing the foldmap is expensive, so don't do it for the
835 # constructing the foldmap is expensive, so don't do it for the
836 # common case where files is ['']
836 # common case where files is ['']
837 normalize = None
837 normalize = None
838 results = dict.fromkeys(subrepos)
838 results = dict.fromkeys(subrepos)
839 results[b'.hg'] = None
839 results[b'.hg'] = None
840
840
841 for ff in files:
841 for ff in files:
842 if normalize:
842 if normalize:
843 nf = normalize(ff, False, True)
843 nf = normalize(ff, False, True)
844 else:
844 else:
845 nf = ff
845 nf = ff
846 if nf in results:
846 if nf in results:
847 continue
847 continue
848
848
849 try:
849 try:
850 st = lstat(join(nf))
850 st = lstat(join(nf))
851 kind = getkind(st.st_mode)
851 kind = getkind(st.st_mode)
852 if kind == dirkind:
852 if kind == dirkind:
853 if nf in dmap:
853 if nf in dmap:
854 # file replaced by dir on disk but still in dirstate
854 # file replaced by dir on disk but still in dirstate
855 results[nf] = None
855 results[nf] = None
856 foundadd((nf, ff))
856 foundadd((nf, ff))
857 elif kind == regkind or kind == lnkkind:
857 elif kind == regkind or kind == lnkkind:
858 results[nf] = st
858 results[nf] = st
859 else:
859 else:
860 badfn(ff, badtype(kind))
860 badfn(ff, badtype(kind))
861 if nf in dmap:
861 if nf in dmap:
862 results[nf] = None
862 results[nf] = None
863 except OSError as inst: # nf not found on disk - it is dirstate only
863 except OSError as inst: # nf not found on disk - it is dirstate only
864 if nf in dmap: # does it exactly match a missing file?
864 if nf in dmap: # does it exactly match a missing file?
865 results[nf] = None
865 results[nf] = None
866 else: # does it match a missing directory?
866 else: # does it match a missing directory?
867 if self._map.hasdir(nf):
867 if self._map.hasdir(nf):
868 notfoundadd(nf)
868 notfoundadd(nf)
869 else:
869 else:
870 badfn(ff, encoding.strtolocal(inst.strerror))
870 badfn(ff, encoding.strtolocal(inst.strerror))
871
871
872 # match.files() may contain explicitly-specified paths that shouldn't
872 # match.files() may contain explicitly-specified paths that shouldn't
873 # be taken; drop them from the list of files found. dirsfound/notfound
873 # be taken; drop them from the list of files found. dirsfound/notfound
874 # aren't filtered here because they will be tested later.
874 # aren't filtered here because they will be tested later.
875 if match.anypats():
875 if match.anypats():
876 for f in list(results):
876 for f in list(results):
877 if f == b'.hg' or f in subrepos:
877 if f == b'.hg' or f in subrepos:
878 # keep sentinel to disable further out-of-repo walks
878 # keep sentinel to disable further out-of-repo walks
879 continue
879 continue
880 if not match(f):
880 if not match(f):
881 del results[f]
881 del results[f]
882
882
883 # Case insensitive filesystems cannot rely on lstat() failing to detect
883 # Case insensitive filesystems cannot rely on lstat() failing to detect
884 # a case-only rename. Prune the stat object for any file that does not
884 # a case-only rename. Prune the stat object for any file that does not
885 # match the case in the filesystem, if there are multiple files that
885 # match the case in the filesystem, if there are multiple files that
886 # normalize to the same path.
886 # normalize to the same path.
887 if match.isexact() and self._checkcase:
887 if match.isexact() and self._checkcase:
888 normed = {}
888 normed = {}
889
889
890 for f, st in pycompat.iteritems(results):
890 for f, st in pycompat.iteritems(results):
891 if st is None:
891 if st is None:
892 continue
892 continue
893
893
894 nc = util.normcase(f)
894 nc = util.normcase(f)
895 paths = normed.get(nc)
895 paths = normed.get(nc)
896
896
897 if paths is None:
897 if paths is None:
898 paths = set()
898 paths = set()
899 normed[nc] = paths
899 normed[nc] = paths
900
900
901 paths.add(f)
901 paths.add(f)
902
902
903 for norm, paths in pycompat.iteritems(normed):
903 for norm, paths in pycompat.iteritems(normed):
904 if len(paths) > 1:
904 if len(paths) > 1:
905 for path in paths:
905 for path in paths:
906 folded = self._discoverpath(
906 folded = self._discoverpath(
907 path, norm, True, None, self._map.dirfoldmap
907 path, norm, True, None, self._map.dirfoldmap
908 )
908 )
909 if path != folded:
909 if path != folded:
910 results[path] = None
910 results[path] = None
911
911
912 return results, dirsfound, dirsnotfound
912 return results, dirsfound, dirsnotfound
913
913
914 def walk(self, match, subrepos, unknown, ignored, full=True):
914 def walk(self, match, subrepos, unknown, ignored, full=True):
915 """
915 """
916 Walk recursively through the directory tree, finding all files
916 Walk recursively through the directory tree, finding all files
917 matched by match.
917 matched by match.
918
918
919 If full is False, maybe skip some known-clean files.
919 If full is False, maybe skip some known-clean files.
920
920
921 Return a dict mapping filename to stat-like object (either
921 Return a dict mapping filename to stat-like object (either
922 mercurial.osutil.stat instance or return value of os.stat()).
922 mercurial.osutil.stat instance or return value of os.stat()).
923
923
924 """
924 """
925 # full is a flag that extensions that hook into walk can use -- this
925 # full is a flag that extensions that hook into walk can use -- this
926 # implementation doesn't use it at all. This satisfies the contract
926 # implementation doesn't use it at all. This satisfies the contract
927 # because we only guarantee a "maybe".
927 # because we only guarantee a "maybe".
928
928
929 if ignored:
929 if ignored:
930 ignore = util.never
930 ignore = util.never
931 dirignore = util.never
931 dirignore = util.never
932 elif unknown:
932 elif unknown:
933 ignore = self._ignore
933 ignore = self._ignore
934 dirignore = self._dirignore
934 dirignore = self._dirignore
935 else:
935 else:
936 # if not unknown and not ignored, drop dir recursion and step 2
936 # if not unknown and not ignored, drop dir recursion and step 2
937 ignore = util.always
937 ignore = util.always
938 dirignore = util.always
938 dirignore = util.always
939
939
940 matchfn = match.matchfn
940 matchfn = match.matchfn
941 matchalways = match.always()
941 matchalways = match.always()
942 matchtdir = match.traversedir
942 matchtdir = match.traversedir
943 dmap = self._map
943 dmap = self._map
944 listdir = util.listdir
944 listdir = util.listdir
945 lstat = os.lstat
945 lstat = os.lstat
946 dirkind = stat.S_IFDIR
946 dirkind = stat.S_IFDIR
947 regkind = stat.S_IFREG
947 regkind = stat.S_IFREG
948 lnkkind = stat.S_IFLNK
948 lnkkind = stat.S_IFLNK
949 join = self._join
949 join = self._join
950
950
951 exact = skipstep3 = False
951 exact = skipstep3 = False
952 if match.isexact(): # match.exact
952 if match.isexact(): # match.exact
953 exact = True
953 exact = True
954 dirignore = util.always # skip step 2
954 dirignore = util.always # skip step 2
955 elif match.prefix(): # match.match, no patterns
955 elif match.prefix(): # match.match, no patterns
956 skipstep3 = True
956 skipstep3 = True
957
957
958 if not exact and self._checkcase:
958 if not exact and self._checkcase:
959 normalize = self._normalize
959 normalize = self._normalize
960 normalizefile = self._normalizefile
960 normalizefile = self._normalizefile
961 skipstep3 = False
961 skipstep3 = False
962 else:
962 else:
963 normalize = self._normalize
963 normalize = self._normalize
964 normalizefile = None
964 normalizefile = None
965
965
966 # step 1: find all explicit files
966 # step 1: find all explicit files
967 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
967 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
968 if matchtdir:
968 if matchtdir:
969 for d in work:
969 for d in work:
970 matchtdir(d[0])
970 matchtdir(d[0])
971 for d in dirsnotfound:
971 for d in dirsnotfound:
972 matchtdir(d)
972 matchtdir(d)
973
973
974 skipstep3 = skipstep3 and not (work or dirsnotfound)
974 skipstep3 = skipstep3 and not (work or dirsnotfound)
975 work = [d for d in work if not dirignore(d[0])]
975 work = [d for d in work if not dirignore(d[0])]
976
976
977 # step 2: visit subdirectories
977 # step 2: visit subdirectories
978 def traverse(work, alreadynormed):
978 def traverse(work, alreadynormed):
979 wadd = work.append
979 wadd = work.append
980 while work:
980 while work:
981 tracing.counter('dirstate.walk work', len(work))
981 tracing.counter('dirstate.walk work', len(work))
982 nd = work.pop()
982 nd = work.pop()
983 visitentries = match.visitchildrenset(nd)
983 visitentries = match.visitchildrenset(nd)
984 if not visitentries:
984 if not visitentries:
985 continue
985 continue
986 if visitentries == b'this' or visitentries == b'all':
986 if visitentries == b'this' or visitentries == b'all':
987 visitentries = None
987 visitentries = None
988 skip = None
988 skip = None
989 if nd != b'':
989 if nd != b'':
990 skip = b'.hg'
990 skip = b'.hg'
991 try:
991 try:
992 with tracing.log('dirstate.walk.traverse listdir %s', nd):
992 with tracing.log('dirstate.walk.traverse listdir %s', nd):
993 entries = listdir(join(nd), stat=True, skip=skip)
993 entries = listdir(join(nd), stat=True, skip=skip)
994 except OSError as inst:
994 except OSError as inst:
995 if inst.errno in (errno.EACCES, errno.ENOENT):
995 if inst.errno in (errno.EACCES, errno.ENOENT):
996 match.bad(
996 match.bad(
997 self.pathto(nd), encoding.strtolocal(inst.strerror)
997 self.pathto(nd), encoding.strtolocal(inst.strerror)
998 )
998 )
999 continue
999 continue
1000 raise
1000 raise
1001 for f, kind, st in entries:
1001 for f, kind, st in entries:
1002 # Some matchers may return files in the visitentries set,
1002 # Some matchers may return files in the visitentries set,
1003 # instead of 'this', if the matcher explicitly mentions them
1003 # instead of 'this', if the matcher explicitly mentions them
1004 # and is not an exactmatcher. This is acceptable; we do not
1004 # and is not an exactmatcher. This is acceptable; we do not
1005 # make any hard assumptions about file-or-directory below
1005 # make any hard assumptions about file-or-directory below
1006 # based on the presence of `f` in visitentries. If
1006 # based on the presence of `f` in visitentries. If
1007 # visitchildrenset returned a set, we can always skip the
1007 # visitchildrenset returned a set, we can always skip the
1008 # entries *not* in the set it provided regardless of whether
1008 # entries *not* in the set it provided regardless of whether
1009 # they're actually a file or a directory.
1009 # they're actually a file or a directory.
1010 if visitentries and f not in visitentries:
1010 if visitentries and f not in visitentries:
1011 continue
1011 continue
1012 if normalizefile:
1012 if normalizefile:
1013 # even though f might be a directory, we're only
1013 # even though f might be a directory, we're only
1014 # interested in comparing it to files currently in the
1014 # interested in comparing it to files currently in the
1015 # dmap -- therefore normalizefile is enough
1015 # dmap -- therefore normalizefile is enough
1016 nf = normalizefile(
1016 nf = normalizefile(
1017 nd and (nd + b"/" + f) or f, True, True
1017 nd and (nd + b"/" + f) or f, True, True
1018 )
1018 )
1019 else:
1019 else:
1020 nf = nd and (nd + b"/" + f) or f
1020 nf = nd and (nd + b"/" + f) or f
1021 if nf not in results:
1021 if nf not in results:
1022 if kind == dirkind:
1022 if kind == dirkind:
1023 if not ignore(nf):
1023 if not ignore(nf):
1024 if matchtdir:
1024 if matchtdir:
1025 matchtdir(nf)
1025 matchtdir(nf)
1026 wadd(nf)
1026 wadd(nf)
1027 if nf in dmap and (matchalways or matchfn(nf)):
1027 if nf in dmap and (matchalways or matchfn(nf)):
1028 results[nf] = None
1028 results[nf] = None
1029 elif kind == regkind or kind == lnkkind:
1029 elif kind == regkind or kind == lnkkind:
1030 if nf in dmap:
1030 if nf in dmap:
1031 if matchalways or matchfn(nf):
1031 if matchalways or matchfn(nf):
1032 results[nf] = st
1032 results[nf] = st
1033 elif (matchalways or matchfn(nf)) and not ignore(
1033 elif (matchalways or matchfn(nf)) and not ignore(
1034 nf
1034 nf
1035 ):
1035 ):
1036 # unknown file -- normalize if necessary
1036 # unknown file -- normalize if necessary
1037 if not alreadynormed:
1037 if not alreadynormed:
1038 nf = normalize(nf, False, True)
1038 nf = normalize(nf, False, True)
1039 results[nf] = st
1039 results[nf] = st
1040 elif nf in dmap and (matchalways or matchfn(nf)):
1040 elif nf in dmap and (matchalways or matchfn(nf)):
1041 results[nf] = None
1041 results[nf] = None
1042
1042
1043 for nd, d in work:
1043 for nd, d in work:
1044 # alreadynormed means that processwork doesn't have to do any
1044 # alreadynormed means that processwork doesn't have to do any
1045 # expensive directory normalization
1045 # expensive directory normalization
1046 alreadynormed = not normalize or nd == d
1046 alreadynormed = not normalize or nd == d
1047 traverse([d], alreadynormed)
1047 traverse([d], alreadynormed)
1048
1048
1049 for s in subrepos:
1049 for s in subrepos:
1050 del results[s]
1050 del results[s]
1051 del results[b'.hg']
1051 del results[b'.hg']
1052
1052
1053 # step 3: visit remaining files from dmap
1053 # step 3: visit remaining files from dmap
1054 if not skipstep3 and not exact:
1054 if not skipstep3 and not exact:
1055 # If a dmap file is not in results yet, it was either
1055 # If a dmap file is not in results yet, it was either
1056 # a) not matching matchfn b) ignored, c) missing, or d) under a
1056 # a) not matching matchfn b) ignored, c) missing, or d) under a
1057 # symlink directory.
1057 # symlink directory.
1058 if not results and matchalways:
1058 if not results and matchalways:
1059 visit = [f for f in dmap]
1059 visit = [f for f in dmap]
1060 else:
1060 else:
1061 visit = [f for f in dmap if f not in results and matchfn(f)]
1061 visit = [f for f in dmap if f not in results and matchfn(f)]
1062 visit.sort()
1062 visit.sort()
1063
1063
1064 if unknown:
1064 if unknown:
1065 # unknown == True means we walked all dirs under the roots
1065 # unknown == True means we walked all dirs under the roots
1066 # that wasn't ignored, and everything that matched was stat'ed
1066 # that wasn't ignored, and everything that matched was stat'ed
1067 # and is already in results.
1067 # and is already in results.
1068 # The rest must thus be ignored or under a symlink.
1068 # The rest must thus be ignored or under a symlink.
1069 audit_path = pathutil.pathauditor(self._root, cached=True)
1069 audit_path = pathutil.pathauditor(self._root, cached=True)
1070
1070
1071 for nf in iter(visit):
1071 for nf in iter(visit):
1072 # If a stat for the same file was already added with a
1072 # If a stat for the same file was already added with a
1073 # different case, don't add one for this, since that would
1073 # different case, don't add one for this, since that would
1074 # make it appear as if the file exists under both names
1074 # make it appear as if the file exists under both names
1075 # on disk.
1075 # on disk.
1076 if (
1076 if (
1077 normalizefile
1077 normalizefile
1078 and normalizefile(nf, True, True) in results
1078 and normalizefile(nf, True, True) in results
1079 ):
1079 ):
1080 results[nf] = None
1080 results[nf] = None
1081 # Report ignored items in the dmap as long as they are not
1081 # Report ignored items in the dmap as long as they are not
1082 # under a symlink directory.
1082 # under a symlink directory.
1083 elif audit_path.check(nf):
1083 elif audit_path.check(nf):
1084 try:
1084 try:
1085 results[nf] = lstat(join(nf))
1085 results[nf] = lstat(join(nf))
1086 # file was just ignored, no links, and exists
1086 # file was just ignored, no links, and exists
1087 except OSError:
1087 except OSError:
1088 # file doesn't exist
1088 # file doesn't exist
1089 results[nf] = None
1089 results[nf] = None
1090 else:
1090 else:
1091 # It's either missing or under a symlink directory
1091 # It's either missing or under a symlink directory
1092 # which we in this case report as missing
1092 # which we in this case report as missing
1093 results[nf] = None
1093 results[nf] = None
1094 else:
1094 else:
1095 # We may not have walked the full directory tree above,
1095 # We may not have walked the full directory tree above,
1096 # so stat and check everything we missed.
1096 # so stat and check everything we missed.
1097 iv = iter(visit)
1097 iv = iter(visit)
1098 for st in util.statfiles([join(i) for i in visit]):
1098 for st in util.statfiles([join(i) for i in visit]):
1099 results[next(iv)] = st
1099 results[next(iv)] = st
1100 return results
1100 return results
1101
1101
1102 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1102 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1103 # Force Rayon (Rust parallelism library) to respect the number of
1103 # Force Rayon (Rust parallelism library) to respect the number of
1104 # workers. This is a temporary workaround until Rust code knows
1104 # workers. This is a temporary workaround until Rust code knows
1105 # how to read the config file.
1105 # how to read the config file.
1106 numcpus = self._ui.configint(b"worker", b"numcpus")
1106 numcpus = self._ui.configint(b"worker", b"numcpus")
1107 if numcpus is not None:
1107 if numcpus is not None:
1108 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1108 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1109
1109
1110 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1110 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1111 if not workers_enabled:
1111 if not workers_enabled:
1112 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1112 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1113
1113
1114 (
1114 (
1115 lookup,
1115 lookup,
1116 modified,
1116 modified,
1117 added,
1117 added,
1118 removed,
1118 removed,
1119 deleted,
1119 deleted,
1120 clean,
1120 clean,
1121 ignored,
1121 ignored,
1122 unknown,
1122 unknown,
1123 warnings,
1123 warnings,
1124 bad,
1124 bad,
1125 traversed,
1125 traversed,
1126 ) = rustmod.status(
1126 ) = rustmod.status(
1127 self._map._rustmap,
1127 self._map._rustmap,
1128 matcher,
1128 matcher,
1129 self._rootdir,
1129 self._rootdir,
1130 self._ignorefiles(),
1130 self._ignorefiles(),
1131 self._checkexec,
1131 self._checkexec,
1132 self._lastnormaltime,
1132 self._lastnormaltime,
1133 bool(list_clean),
1133 bool(list_clean),
1134 bool(list_ignored),
1134 bool(list_ignored),
1135 bool(list_unknown),
1135 bool(list_unknown),
1136 bool(matcher.traversedir),
1136 bool(matcher.traversedir),
1137 )
1137 )
1138
1138
1139 if matcher.traversedir:
1139 if matcher.traversedir:
1140 for dir in traversed:
1140 for dir in traversed:
1141 matcher.traversedir(dir)
1141 matcher.traversedir(dir)
1142
1142
1143 if self._ui.warn:
1143 if self._ui.warn:
1144 for item in warnings:
1144 for item in warnings:
1145 if isinstance(item, tuple):
1145 if isinstance(item, tuple):
1146 file_path, syntax = item
1146 file_path, syntax = item
1147 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1147 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1148 file_path,
1148 file_path,
1149 syntax,
1149 syntax,
1150 )
1150 )
1151 self._ui.warn(msg)
1151 self._ui.warn(msg)
1152 else:
1152 else:
1153 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1153 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1154 self._ui.warn(
1154 self._ui.warn(
1155 msg
1155 msg
1156 % (
1156 % (
1157 pathutil.canonpath(
1157 pathutil.canonpath(
1158 self._rootdir, self._rootdir, item
1158 self._rootdir, self._rootdir, item
1159 ),
1159 ),
1160 b"No such file or directory",
1160 b"No such file or directory",
1161 )
1161 )
1162 )
1162 )
1163
1163
1164 for (fn, message) in bad:
1164 for (fn, message) in bad:
1165 matcher.bad(fn, encoding.strtolocal(message))
1165 matcher.bad(fn, encoding.strtolocal(message))
1166
1166
1167 status = scmutil.status(
1167 status = scmutil.status(
1168 modified=modified,
1168 modified=modified,
1169 added=added,
1169 added=added,
1170 removed=removed,
1170 removed=removed,
1171 deleted=deleted,
1171 deleted=deleted,
1172 unknown=unknown,
1172 unknown=unknown,
1173 ignored=ignored,
1173 ignored=ignored,
1174 clean=clean,
1174 clean=clean,
1175 )
1175 )
1176 return (lookup, status)
1176 return (lookup, status)
1177
1177
1178 def status(self, match, subrepos, ignored, clean, unknown):
1178 def status(self, match, subrepos, ignored, clean, unknown):
1179 """Determine the status of the working copy relative to the
1179 """Determine the status of the working copy relative to the
1180 dirstate and return a pair of (unsure, status), where status is of type
1180 dirstate and return a pair of (unsure, status), where status is of type
1181 scmutil.status and:
1181 scmutil.status and:
1182
1182
1183 unsure:
1183 unsure:
1184 files that might have been modified since the dirstate was
1184 files that might have been modified since the dirstate was
1185 written, but need to be read to be sure (size is the same
1185 written, but need to be read to be sure (size is the same
1186 but mtime differs)
1186 but mtime differs)
1187 status.modified:
1187 status.modified:
1188 files that have definitely been modified since the dirstate
1188 files that have definitely been modified since the dirstate
1189 was written (different size or mode)
1189 was written (different size or mode)
1190 status.clean:
1190 status.clean:
1191 files that have definitely not been modified since the
1191 files that have definitely not been modified since the
1192 dirstate was written
1192 dirstate was written
1193 """
1193 """
1194 listignored, listclean, listunknown = ignored, clean, unknown
1194 listignored, listclean, listunknown = ignored, clean, unknown
1195 lookup, modified, added, unknown, ignored = [], [], [], [], []
1195 lookup, modified, added, unknown, ignored = [], [], [], [], []
1196 removed, deleted, clean = [], [], []
1196 removed, deleted, clean = [], [], []
1197
1197
1198 dmap = self._map
1198 dmap = self._map
1199 dmap.preload()
1199 dmap.preload()
1200
1200
1201 use_rust = True
1201 use_rust = True
1202
1202
1203 allowed_matchers = (
1203 allowed_matchers = (
1204 matchmod.alwaysmatcher,
1204 matchmod.alwaysmatcher,
1205 matchmod.exactmatcher,
1205 matchmod.exactmatcher,
1206 matchmod.includematcher,
1206 matchmod.includematcher,
1207 )
1207 )
1208
1208
1209 if rustmod is None:
1209 if rustmod is None:
1210 use_rust = False
1210 use_rust = False
1211 elif self._checkcase:
1211 elif self._checkcase:
1212 # Case-insensitive filesystems are not handled yet
1212 # Case-insensitive filesystems are not handled yet
1213 use_rust = False
1213 use_rust = False
1214 elif subrepos:
1214 elif subrepos:
1215 use_rust = False
1215 use_rust = False
1216 elif sparse.enabled:
1216 elif sparse.enabled:
1217 use_rust = False
1217 use_rust = False
1218 elif not isinstance(match, allowed_matchers):
1218 elif not isinstance(match, allowed_matchers):
1219 # Some matchers have yet to be implemented
1219 # Some matchers have yet to be implemented
1220 use_rust = False
1220 use_rust = False
1221
1221
1222 if use_rust:
1222 if use_rust:
1223 try:
1223 try:
1224 return self._rust_status(
1224 return self._rust_status(
1225 match, listclean, listignored, listunknown
1225 match, listclean, listignored, listunknown
1226 )
1226 )
1227 except rustmod.FallbackError:
1227 except rustmod.FallbackError:
1228 pass
1228 pass
1229
1229
1230 def noop(f):
1230 def noop(f):
1231 pass
1231 pass
1232
1232
1233 dcontains = dmap.__contains__
1233 dcontains = dmap.__contains__
1234 dget = dmap.__getitem__
1234 dget = dmap.__getitem__
1235 ladd = lookup.append # aka "unsure"
1235 ladd = lookup.append # aka "unsure"
1236 madd = modified.append
1236 madd = modified.append
1237 aadd = added.append
1237 aadd = added.append
1238 uadd = unknown.append if listunknown else noop
1238 uadd = unknown.append if listunknown else noop
1239 iadd = ignored.append if listignored else noop
1239 iadd = ignored.append if listignored else noop
1240 radd = removed.append
1240 radd = removed.append
1241 dadd = deleted.append
1241 dadd = deleted.append
1242 cadd = clean.append if listclean else noop
1242 cadd = clean.append if listclean else noop
1243 mexact = match.exact
1243 mexact = match.exact
1244 dirignore = self._dirignore
1244 dirignore = self._dirignore
1245 checkexec = self._checkexec
1245 checkexec = self._checkexec
1246 copymap = self._map.copymap
1246 copymap = self._map.copymap
1247 lastnormaltime = self._lastnormaltime
1247 lastnormaltime = self._lastnormaltime
1248
1248
1249 # We need to do full walks when either
1249 # We need to do full walks when either
1250 # - we're listing all clean files, or
1250 # - we're listing all clean files, or
1251 # - match.traversedir does something, because match.traversedir should
1251 # - match.traversedir does something, because match.traversedir should
1252 # be called for every dir in the working dir
1252 # be called for every dir in the working dir
1253 full = listclean or match.traversedir is not None
1253 full = listclean or match.traversedir is not None
1254 for fn, st in pycompat.iteritems(
1254 for fn, st in pycompat.iteritems(
1255 self.walk(match, subrepos, listunknown, listignored, full=full)
1255 self.walk(match, subrepos, listunknown, listignored, full=full)
1256 ):
1256 ):
1257 if not dcontains(fn):
1257 if not dcontains(fn):
1258 if (listignored or mexact(fn)) and dirignore(fn):
1258 if (listignored or mexact(fn)) and dirignore(fn):
1259 if listignored:
1259 if listignored:
1260 iadd(fn)
1260 iadd(fn)
1261 else:
1261 else:
1262 uadd(fn)
1262 uadd(fn)
1263 continue
1263 continue
1264
1264
1265 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1265 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1266 # written like that for performance reasons. dmap[fn] is not a
1266 # written like that for performance reasons. dmap[fn] is not a
1267 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1267 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1268 # opcode has fast paths when the value to be unpacked is a tuple or
1268 # opcode has fast paths when the value to be unpacked is a tuple or
1269 # a list, but falls back to creating a full-fledged iterator in
1269 # a list, but falls back to creating a full-fledged iterator in
1270 # general. That is much slower than simply accessing and storing the
1270 # general. That is much slower than simply accessing and storing the
1271 # tuple members one by one.
1271 # tuple members one by one.
1272 t = dget(fn)
1272 t = dget(fn)
1273 state = t[0]
1273 state = t[0]
1274 mode = t[1]
1274 mode = t[1]
1275 size = t[2]
1275 size = t[2]
1276 time = t[3]
1276 time = t[3]
1277
1277
1278 if not st and state in b"nma":
1278 if not st and state in b"nma":
1279 dadd(fn)
1279 dadd(fn)
1280 elif state == b'n':
1280 elif state == b'n':
1281 if (
1281 if (
1282 size >= 0
1282 size >= 0
1283 and (
1283 and (
1284 (size != st.st_size and size != st.st_size & _rangemask)
1284 (size != st.st_size and size != st.st_size & _rangemask)
1285 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1285 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1286 )
1286 )
1287 or size == -2 # other parent
1287 or size == -2 # other parent
1288 or fn in copymap
1288 or fn in copymap
1289 ):
1289 ):
1290 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1290 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1291 # issue6456: Size returned may be longer due to
1291 # issue6456: Size returned may be longer due to
1292 # encryption on EXT-4 fscrypt, undecided.
1292 # encryption on EXT-4 fscrypt, undecided.
1293 ladd(fn)
1293 ladd(fn)
1294 else:
1294 else:
1295 madd(fn)
1295 madd(fn)
1296 elif (
1296 elif (
1297 time != st[stat.ST_MTIME]
1297 time != st[stat.ST_MTIME]
1298 and time != st[stat.ST_MTIME] & _rangemask
1298 and time != st[stat.ST_MTIME] & _rangemask
1299 ):
1299 ):
1300 ladd(fn)
1300 ladd(fn)
1301 elif st[stat.ST_MTIME] == lastnormaltime:
1301 elif st[stat.ST_MTIME] == lastnormaltime:
1302 # fn may have just been marked as normal and it may have
1302 # fn may have just been marked as normal and it may have
1303 # changed in the same second without changing its size.
1303 # changed in the same second without changing its size.
1304 # This can happen if we quickly do multiple commits.
1304 # This can happen if we quickly do multiple commits.
1305 # Force lookup, so we don't miss such a racy file change.
1305 # Force lookup, so we don't miss such a racy file change.
1306 ladd(fn)
1306 ladd(fn)
1307 elif listclean:
1307 elif listclean:
1308 cadd(fn)
1308 cadd(fn)
1309 elif state == b'm':
1309 elif state == b'm':
1310 madd(fn)
1310 madd(fn)
1311 elif state == b'a':
1311 elif state == b'a':
1312 aadd(fn)
1312 aadd(fn)
1313 elif state == b'r':
1313 elif state == b'r':
1314 radd(fn)
1314 radd(fn)
1315 status = scmutil.status(
1315 status = scmutil.status(
1316 modified, added, removed, deleted, unknown, ignored, clean
1316 modified, added, removed, deleted, unknown, ignored, clean
1317 )
1317 )
1318 return (lookup, status)
1318 return (lookup, status)
1319
1319
1320 def matches(self, match):
1320 def matches(self, match):
1321 """
1321 """
1322 return files in the dirstate (in whatever state) filtered by match
1322 return files in the dirstate (in whatever state) filtered by match
1323 """
1323 """
1324 dmap = self._map
1324 dmap = self._map
1325 if rustmod is not None:
1325 if rustmod is not None:
1326 dmap = self._map._rustmap
1326 dmap = self._map._rustmap
1327
1327
1328 if match.always():
1328 if match.always():
1329 return dmap.keys()
1329 return dmap.keys()
1330 files = match.files()
1330 files = match.files()
1331 if match.isexact():
1331 if match.isexact():
1332 # fast path -- filter the other way around, since typically files is
1332 # fast path -- filter the other way around, since typically files is
1333 # much smaller than dmap
1333 # much smaller than dmap
1334 return [f for f in files if f in dmap]
1334 return [f for f in files if f in dmap]
1335 if match.prefix() and all(fn in dmap for fn in files):
1335 if match.prefix() and all(fn in dmap for fn in files):
1336 # fast path -- all the values are known to be files, so just return
1336 # fast path -- all the values are known to be files, so just return
1337 # that
1337 # that
1338 return list(files)
1338 return list(files)
1339 return [f for f in dmap if match(f)]
1339 return [f for f in dmap if match(f)]
1340
1340
1341 def _actualfilename(self, tr):
1341 def _actualfilename(self, tr):
1342 if tr:
1342 if tr:
1343 return self._pendingfilename
1343 return self._pendingfilename
1344 else:
1344 else:
1345 return self._filename
1345 return self._filename
1346
1346
1347 def savebackup(self, tr, backupname):
1347 def savebackup(self, tr, backupname):
1348 '''Save current dirstate into backup file'''
1348 '''Save current dirstate into backup file'''
1349 filename = self._actualfilename(tr)
1349 filename = self._actualfilename(tr)
1350 assert backupname != filename
1350 assert backupname != filename
1351
1351
1352 # use '_writedirstate' instead of 'write' to write changes certainly,
1352 # use '_writedirstate' instead of 'write' to write changes certainly,
1353 # because the latter omits writing out if transaction is running.
1353 # because the latter omits writing out if transaction is running.
1354 # output file will be used to create backup of dirstate at this point.
1354 # output file will be used to create backup of dirstate at this point.
1355 if self._dirty or not self._opener.exists(filename):
1355 if self._dirty or not self._opener.exists(filename):
1356 self._writedirstate(
1356 self._writedirstate(
1357 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1357 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1358 )
1358 )
1359
1359
1360 if tr:
1360 if tr:
1361 # ensure that subsequent tr.writepending returns True for
1361 # ensure that subsequent tr.writepending returns True for
1362 # changes written out above, even if dirstate is never
1362 # changes written out above, even if dirstate is never
1363 # changed after this
1363 # changed after this
1364 tr.addfilegenerator(
1364 tr.addfilegenerator(
1365 b'dirstate',
1365 b'dirstate',
1366 (self._filename,),
1366 (self._filename,),
1367 self._writedirstate,
1367 self._writedirstate,
1368 location=b'plain',
1368 location=b'plain',
1369 )
1369 )
1370
1370
1371 # ensure that pending file written above is unlinked at
1371 # ensure that pending file written above is unlinked at
1372 # failure, even if tr.writepending isn't invoked until the
1372 # failure, even if tr.writepending isn't invoked until the
1373 # end of this transaction
1373 # end of this transaction
1374 tr.registertmp(filename, location=b'plain')
1374 tr.registertmp(filename, location=b'plain')
1375
1375
1376 self._opener.tryunlink(backupname)
1376 self._opener.tryunlink(backupname)
1377 # hardlink backup is okay because _writedirstate is always called
1377 # hardlink backup is okay because _writedirstate is always called
1378 # with an "atomictemp=True" file.
1378 # with an "atomictemp=True" file.
1379 util.copyfile(
1379 util.copyfile(
1380 self._opener.join(filename),
1380 self._opener.join(filename),
1381 self._opener.join(backupname),
1381 self._opener.join(backupname),
1382 hardlink=True,
1382 hardlink=True,
1383 )
1383 )
1384
1384
1385 def restorebackup(self, tr, backupname):
1385 def restorebackup(self, tr, backupname):
1386 '''Restore dirstate by backup file'''
1386 '''Restore dirstate by backup file'''
1387 # this "invalidate()" prevents "wlock.release()" from writing
1387 # this "invalidate()" prevents "wlock.release()" from writing
1388 # changes of dirstate out after restoring from backup file
1388 # changes of dirstate out after restoring from backup file
1389 self.invalidate()
1389 self.invalidate()
1390 filename = self._actualfilename(tr)
1390 filename = self._actualfilename(tr)
1391 o = self._opener
1391 o = self._opener
1392 if util.samefile(o.join(backupname), o.join(filename)):
1392 if util.samefile(o.join(backupname), o.join(filename)):
1393 o.unlink(backupname)
1393 o.unlink(backupname)
1394 else:
1394 else:
1395 o.rename(backupname, filename, checkambig=True)
1395 o.rename(backupname, filename, checkambig=True)
1396
1396
1397 def clearbackup(self, tr, backupname):
1397 def clearbackup(self, tr, backupname):
1398 '''Clear backup file'''
1398 '''Clear backup file'''
1399 self._opener.unlink(backupname)
1399 self._opener.unlink(backupname)
1400
1400
1401
1401
1402 class dirstatemap(object):
1402 class dirstatemap(object):
1403 """Map encapsulating the dirstate's contents.
1403 """Map encapsulating the dirstate's contents.
1404
1404
1405 The dirstate contains the following state:
1405 The dirstate contains the following state:
1406
1406
1407 - `identity` is the identity of the dirstate file, which can be used to
1407 - `identity` is the identity of the dirstate file, which can be used to
1408 detect when changes have occurred to the dirstate file.
1408 detect when changes have occurred to the dirstate file.
1409
1409
1410 - `parents` is a pair containing the parents of the working copy. The
1410 - `parents` is a pair containing the parents of the working copy. The
1411 parents are updated by calling `setparents`.
1411 parents are updated by calling `setparents`.
1412
1412
1413 - the state map maps filenames to tuples of (state, mode, size, mtime),
1413 - the state map maps filenames to tuples of (state, mode, size, mtime),
1414 where state is a single character representing 'normal', 'added',
1414 where state is a single character representing 'normal', 'added',
1415 'removed', or 'merged'. It is read by treating the dirstate as a
1415 'removed', or 'merged'. It is read by treating the dirstate as a
1416 dict. File state is updated by calling the `addfile`, `removefile` and
1416 dict. File state is updated by calling the `addfile`, `removefile` and
1417 `dropfile` methods.
1417 `dropfile` methods.
1418
1418
1419 - `copymap` maps destination filenames to their source filename.
1419 - `copymap` maps destination filenames to their source filename.
1420
1420
1421 The dirstate also provides the following views onto the state:
1421 The dirstate also provides the following views onto the state:
1422
1422
1423 - `nonnormalset` is a set of the filenames that have state other
1423 - `nonnormalset` is a set of the filenames that have state other
1424 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1424 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1425
1425
1426 - `otherparentset` is a set of the filenames that are marked as coming
1426 - `otherparentset` is a set of the filenames that are marked as coming
1427 from the second parent when the dirstate is currently being merged.
1427 from the second parent when the dirstate is currently being merged.
1428
1428
1429 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1429 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1430 form that they appear as in the dirstate.
1430 form that they appear as in the dirstate.
1431
1431
1432 - `dirfoldmap` is a dict mapping normalized directory names to the
1432 - `dirfoldmap` is a dict mapping normalized directory names to the
1433 denormalized form that they appear as in the dirstate.
1433 denormalized form that they appear as in the dirstate.
1434 """
1434 """
1435
1435
1436 def __init__(self, ui, opener, root, nodeconstants):
1436 def __init__(self, ui, opener, root, nodeconstants):
1437 self._ui = ui
1437 self._ui = ui
1438 self._opener = opener
1438 self._opener = opener
1439 self._root = root
1439 self._root = root
1440 self._filename = b'dirstate'
1440 self._filename = b'dirstate'
1441 self._nodelen = 20
1441 self._nodelen = 20
1442 self._nodeconstants = nodeconstants
1442 self._nodeconstants = nodeconstants
1443
1443
1444 self._parents = None
1444 self._parents = None
1445 self._dirtyparents = False
1445 self._dirtyparents = False
1446
1446
1447 # for consistent view between _pl() and _read() invocations
1447 # for consistent view between _pl() and _read() invocations
1448 self._pendingmode = None
1448 self._pendingmode = None
1449
1449
1450 @propertycache
1450 @propertycache
1451 def _map(self):
1451 def _map(self):
1452 self._map = {}
1452 self._map = {}
1453 self.read()
1453 self.read()
1454 return self._map
1454 return self._map
1455
1455
1456 @propertycache
1456 @propertycache
1457 def copymap(self):
1457 def copymap(self):
1458 self.copymap = {}
1458 self.copymap = {}
1459 self._map
1459 self._map
1460 return self.copymap
1460 return self.copymap
1461
1461
1462 def clear(self):
1462 def clear(self):
1463 self._map.clear()
1463 self._map.clear()
1464 self.copymap.clear()
1464 self.copymap.clear()
1465 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1465 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
1466 util.clearcachedproperty(self, b"_dirs")
1466 util.clearcachedproperty(self, b"_dirs")
1467 util.clearcachedproperty(self, b"_alldirs")
1467 util.clearcachedproperty(self, b"_alldirs")
1468 util.clearcachedproperty(self, b"filefoldmap")
1468 util.clearcachedproperty(self, b"filefoldmap")
1469 util.clearcachedproperty(self, b"dirfoldmap")
1469 util.clearcachedproperty(self, b"dirfoldmap")
1470 util.clearcachedproperty(self, b"nonnormalset")
1470 util.clearcachedproperty(self, b"nonnormalset")
1471 util.clearcachedproperty(self, b"otherparentset")
1471 util.clearcachedproperty(self, b"otherparentset")
1472
1472
1473 def items(self):
1473 def items(self):
1474 return pycompat.iteritems(self._map)
1474 return pycompat.iteritems(self._map)
1475
1475
1476 # forward for python2,3 compat
1476 # forward for python2,3 compat
1477 iteritems = items
1477 iteritems = items
1478
1478
1479 def __len__(self):
1479 def __len__(self):
1480 return len(self._map)
1480 return len(self._map)
1481
1481
1482 def __iter__(self):
1482 def __iter__(self):
1483 return iter(self._map)
1483 return iter(self._map)
1484
1484
1485 def get(self, key, default=None):
1485 def get(self, key, default=None):
1486 return self._map.get(key, default)
1486 return self._map.get(key, default)
1487
1487
1488 def __contains__(self, key):
1488 def __contains__(self, key):
1489 return key in self._map
1489 return key in self._map
1490
1490
1491 def __getitem__(self, key):
1491 def __getitem__(self, key):
1492 return self._map[key]
1492 return self._map[key]
1493
1493
1494 def keys(self):
1494 def keys(self):
1495 return self._map.keys()
1495 return self._map.keys()
1496
1496
1497 def preload(self):
1497 def preload(self):
1498 """Loads the underlying data, if it's not already loaded"""
1498 """Loads the underlying data, if it's not already loaded"""
1499 self._map
1499 self._map
1500
1500
1501 def addfile(self, f, oldstate, state, mode, size, mtime):
1501 def addfile(self, f, oldstate, state, mode, size, mtime):
1502 """Add a tracked file to the dirstate."""
1502 """Add a tracked file to the dirstate."""
1503 if oldstate in b"?r" and "_dirs" in self.__dict__:
1503 if oldstate in b"?r" and "_dirs" in self.__dict__:
1504 self._dirs.addpath(f)
1504 self._dirs.addpath(f)
1505 if oldstate == b"?" and "_alldirs" in self.__dict__:
1505 if oldstate == b"?" and "_alldirs" in self.__dict__:
1506 self._alldirs.addpath(f)
1506 self._alldirs.addpath(f)
1507 self._map[f] = dirstatetuple(state, mode, size, mtime)
1507 self._map[f] = dirstatetuple(state, mode, size, mtime)
1508 if state != b'n' or mtime == -1:
1508 if state != b'n' or mtime == -1:
1509 self.nonnormalset.add(f)
1509 self.nonnormalset.add(f)
1510 if size == -2:
1510 if size == -2:
1511 self.otherparentset.add(f)
1511 self.otherparentset.add(f)
1512
1512
1513 def removefile(self, f, oldstate, size):
1513 def removefile(self, f, oldstate, size):
1514 """
1514 """
1515 Mark a file as removed in the dirstate.
1515 Mark a file as removed in the dirstate.
1516
1516
1517 The `size` parameter is used to store sentinel values that indicate
1517 The `size` parameter is used to store sentinel values that indicate
1518 the file's previous state. In the future, we should refactor this
1518 the file's previous state. In the future, we should refactor this
1519 to be more explicit about what that state is.
1519 to be more explicit about what that state is.
1520 """
1520 """
1521 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1521 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1522 self._dirs.delpath(f)
1522 self._dirs.delpath(f)
1523 if oldstate == b"?" and "_alldirs" in self.__dict__:
1523 if oldstate == b"?" and "_alldirs" in self.__dict__:
1524 self._alldirs.addpath(f)
1524 self._alldirs.addpath(f)
1525 if "filefoldmap" in self.__dict__:
1525 if "filefoldmap" in self.__dict__:
1526 normed = util.normcase(f)
1526 normed = util.normcase(f)
1527 self.filefoldmap.pop(normed, None)
1527 self.filefoldmap.pop(normed, None)
1528 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1528 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1529 self.nonnormalset.add(f)
1529 self.nonnormalset.add(f)
1530
1530
1531 def dropfile(self, f, oldstate):
1531 def dropfile(self, f, oldstate):
1532 """
1532 """
1533 Remove a file from the dirstate. Returns True if the file was
1533 Remove a file from the dirstate. Returns True if the file was
1534 previously recorded.
1534 previously recorded.
1535 """
1535 """
1536 exists = self._map.pop(f, None) is not None
1536 exists = self._map.pop(f, None) is not None
1537 if exists:
1537 if exists:
1538 if oldstate != b"r" and "_dirs" in self.__dict__:
1538 if oldstate != b"r" and "_dirs" in self.__dict__:
1539 self._dirs.delpath(f)
1539 self._dirs.delpath(f)
1540 if "_alldirs" in self.__dict__:
1540 if "_alldirs" in self.__dict__:
1541 self._alldirs.delpath(f)
1541 self._alldirs.delpath(f)
1542 if "filefoldmap" in self.__dict__:
1542 if "filefoldmap" in self.__dict__:
1543 normed = util.normcase(f)
1543 normed = util.normcase(f)
1544 self.filefoldmap.pop(normed, None)
1544 self.filefoldmap.pop(normed, None)
1545 self.nonnormalset.discard(f)
1545 self.nonnormalset.discard(f)
1546 return exists
1546 return exists
1547
1547
1548 def clearambiguoustimes(self, files, now):
1548 def clearambiguoustimes(self, files, now):
1549 for f in files:
1549 for f in files:
1550 e = self.get(f)
1550 e = self.get(f)
1551 if e is not None and e[0] == b'n' and e[3] == now:
1551 if e is not None and e[0] == b'n' and e[3] == now:
1552 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1552 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1553 self.nonnormalset.add(f)
1553 self.nonnormalset.add(f)
1554
1554
1555 def nonnormalentries(self):
1555 def nonnormalentries(self):
1556 '''Compute the nonnormal dirstate entries from the dmap'''
1556 '''Compute the nonnormal dirstate entries from the dmap'''
1557 try:
1557 try:
1558 return parsers.nonnormalotherparententries(self._map)
1558 return parsers.nonnormalotherparententries(self._map)
1559 except AttributeError:
1559 except AttributeError:
1560 nonnorm = set()
1560 nonnorm = set()
1561 otherparent = set()
1561 otherparent = set()
1562 for fname, e in pycompat.iteritems(self._map):
1562 for fname, e in pycompat.iteritems(self._map):
1563 if e[0] != b'n' or e[3] == -1:
1563 if e[0] != b'n' or e[3] == -1:
1564 nonnorm.add(fname)
1564 nonnorm.add(fname)
1565 if e[0] == b'n' and e[2] == -2:
1565 if e[0] == b'n' and e[2] == -2:
1566 otherparent.add(fname)
1566 otherparent.add(fname)
1567 return nonnorm, otherparent
1567 return nonnorm, otherparent
1568
1568
1569 @propertycache
1569 @propertycache
1570 def filefoldmap(self):
1570 def filefoldmap(self):
1571 """Returns a dictionary mapping normalized case paths to their
1571 """Returns a dictionary mapping normalized case paths to their
1572 non-normalized versions.
1572 non-normalized versions.
1573 """
1573 """
1574 try:
1574 try:
1575 makefilefoldmap = parsers.make_file_foldmap
1575 makefilefoldmap = parsers.make_file_foldmap
1576 except AttributeError:
1576 except AttributeError:
1577 pass
1577 pass
1578 else:
1578 else:
1579 return makefilefoldmap(
1579 return makefilefoldmap(
1580 self._map, util.normcasespec, util.normcasefallback
1580 self._map, util.normcasespec, util.normcasefallback
1581 )
1581 )
1582
1582
1583 f = {}
1583 f = {}
1584 normcase = util.normcase
1584 normcase = util.normcase
1585 for name, s in pycompat.iteritems(self._map):
1585 for name, s in pycompat.iteritems(self._map):
1586 if s[0] != b'r':
1586 if s[0] != b'r':
1587 f[normcase(name)] = name
1587 f[normcase(name)] = name
1588 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1588 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1589 return f
1589 return f
1590
1590
1591 def hastrackeddir(self, d):
1591 def hastrackeddir(self, d):
1592 """
1592 """
1593 Returns True if the dirstate contains a tracked (not removed) file
1593 Returns True if the dirstate contains a tracked (not removed) file
1594 in this directory.
1594 in this directory.
1595 """
1595 """
1596 return d in self._dirs
1596 return d in self._dirs
1597
1597
1598 def hasdir(self, d):
1598 def hasdir(self, d):
1599 """
1599 """
1600 Returns True if the dirstate contains a file (tracked or removed)
1600 Returns True if the dirstate contains a file (tracked or removed)
1601 in this directory.
1601 in this directory.
1602 """
1602 """
1603 return d in self._alldirs
1603 return d in self._alldirs
1604
1604
1605 @propertycache
1605 @propertycache
1606 def _dirs(self):
1606 def _dirs(self):
1607 return pathutil.dirs(self._map, b'r')
1607 return pathutil.dirs(self._map, b'r')
1608
1608
1609 @propertycache
1609 @propertycache
1610 def _alldirs(self):
1610 def _alldirs(self):
1611 return pathutil.dirs(self._map)
1611 return pathutil.dirs(self._map)
1612
1612
1613 def _opendirstatefile(self):
1613 def _opendirstatefile(self):
1614 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1614 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1615 if self._pendingmode is not None and self._pendingmode != mode:
1615 if self._pendingmode is not None and self._pendingmode != mode:
1616 fp.close()
1616 fp.close()
1617 raise error.Abort(
1617 raise error.Abort(
1618 _(b'working directory state may be changed parallelly')
1618 _(b'working directory state may be changed parallelly')
1619 )
1619 )
1620 self._pendingmode = mode
1620 self._pendingmode = mode
1621 return fp
1621 return fp
1622
1622
1623 def parents(self):
1623 def parents(self):
1624 if not self._parents:
1624 if not self._parents:
1625 try:
1625 try:
1626 fp = self._opendirstatefile()
1626 fp = self._opendirstatefile()
1627 st = fp.read(2 * self._nodelen)
1627 st = fp.read(2 * self._nodelen)
1628 fp.close()
1628 fp.close()
1629 except IOError as err:
1629 except IOError as err:
1630 if err.errno != errno.ENOENT:
1630 if err.errno != errno.ENOENT:
1631 raise
1631 raise
1632 # File doesn't exist, so the current state is empty
1632 # File doesn't exist, so the current state is empty
1633 st = b''
1633 st = b''
1634
1634
1635 l = len(st)
1635 l = len(st)
1636 if l == self._nodelen * 2:
1636 if l == self._nodelen * 2:
1637 self._parents = (
1637 self._parents = (
1638 st[: self._nodelen],
1638 st[: self._nodelen],
1639 st[self._nodelen : 2 * self._nodelen],
1639 st[self._nodelen : 2 * self._nodelen],
1640 )
1640 )
1641 elif l == 0:
1641 elif l == 0:
1642 self._parents = (
1642 self._parents = (
1643 self._nodeconstants.nullid,
1643 self._nodeconstants.nullid,
1644 self._nodeconstants.nullid,
1644 self._nodeconstants.nullid,
1645 )
1645 )
1646 else:
1646 else:
1647 raise error.Abort(
1647 raise error.Abort(
1648 _(b'working directory state appears damaged!')
1648 _(b'working directory state appears damaged!')
1649 )
1649 )
1650
1650
1651 return self._parents
1651 return self._parents
1652
1652
1653 def setparents(self, p1, p2):
1653 def setparents(self, p1, p2):
1654 self._parents = (p1, p2)
1654 self._parents = (p1, p2)
1655 self._dirtyparents = True
1655 self._dirtyparents = True
1656
1656
1657 def read(self):
1657 def read(self):
1658 # ignore HG_PENDING because identity is used only for writing
1658 # ignore HG_PENDING because identity is used only for writing
1659 self.identity = util.filestat.frompath(
1659 self.identity = util.filestat.frompath(
1660 self._opener.join(self._filename)
1660 self._opener.join(self._filename)
1661 )
1661 )
1662
1662
1663 try:
1663 try:
1664 fp = self._opendirstatefile()
1664 fp = self._opendirstatefile()
1665 try:
1665 try:
1666 st = fp.read()
1666 st = fp.read()
1667 finally:
1667 finally:
1668 fp.close()
1668 fp.close()
1669 except IOError as err:
1669 except IOError as err:
1670 if err.errno != errno.ENOENT:
1670 if err.errno != errno.ENOENT:
1671 raise
1671 raise
1672 return
1672 return
1673 if not st:
1673 if not st:
1674 return
1674 return
1675
1675
1676 if util.safehasattr(parsers, b'dict_new_presized'):
1676 if util.safehasattr(parsers, b'dict_new_presized'):
1677 # Make an estimate of the number of files in the dirstate based on
1677 # Make an estimate of the number of files in the dirstate based on
1678 # its size. This trades wasting some memory for avoiding costly
1678 # its size. This trades wasting some memory for avoiding costly
1679 # resizes. Each entry have a prefix of 17 bytes followed by one or
1679 # resizes. Each entry have a prefix of 17 bytes followed by one or
1680 # two path names. Studies on various large-scale real-world repositories
1680 # two path names. Studies on various large-scale real-world repositories
1681 # found 54 bytes a reasonable upper limit for the average path names.
1681 # found 54 bytes a reasonable upper limit for the average path names.
1682 # Copy entries are ignored for the sake of this estimate.
1682 # Copy entries are ignored for the sake of this estimate.
1683 self._map = parsers.dict_new_presized(len(st) // 71)
1683 self._map = parsers.dict_new_presized(len(st) // 71)
1684
1684
1685 # Python's garbage collector triggers a GC each time a certain number
1685 # Python's garbage collector triggers a GC each time a certain number
1686 # of container objects (the number being defined by
1686 # of container objects (the number being defined by
1687 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1687 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1688 # for each file in the dirstate. The C version then immediately marks
1688 # for each file in the dirstate. The C version then immediately marks
1689 # them as not to be tracked by the collector. However, this has no
1689 # them as not to be tracked by the collector. However, this has no
1690 # effect on when GCs are triggered, only on what objects the GC looks
1690 # effect on when GCs are triggered, only on what objects the GC looks
1691 # into. This means that O(number of files) GCs are unavoidable.
1691 # into. This means that O(number of files) GCs are unavoidable.
1692 # Depending on when in the process's lifetime the dirstate is parsed,
1692 # Depending on when in the process's lifetime the dirstate is parsed,
1693 # this can get very expensive. As a workaround, disable GC while
1693 # this can get very expensive. As a workaround, disable GC while
1694 # parsing the dirstate.
1694 # parsing the dirstate.
1695 #
1695 #
1696 # (we cannot decorate the function directly since it is in a C module)
1696 # (we cannot decorate the function directly since it is in a C module)
1697 parse_dirstate = util.nogc(parsers.parse_dirstate)
1697 parse_dirstate = util.nogc(parsers.parse_dirstate)
1698 p = parse_dirstate(self._map, self.copymap, st)
1698 p = parse_dirstate(self._map, self.copymap, st)
1699 if not self._dirtyparents:
1699 if not self._dirtyparents:
1700 self.setparents(*p)
1700 self.setparents(*p)
1701
1701
1702 # Avoid excess attribute lookups by fast pathing certain checks
1702 # Avoid excess attribute lookups by fast pathing certain checks
1703 self.__contains__ = self._map.__contains__
1703 self.__contains__ = self._map.__contains__
1704 self.__getitem__ = self._map.__getitem__
1704 self.__getitem__ = self._map.__getitem__
1705 self.get = self._map.get
1705 self.get = self._map.get
1706
1706
1707 def write(self, st, now):
1707 def write(self, st, now):
1708 st.write(
1708 st.write(
1709 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1709 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1710 )
1710 )
1711 st.close()
1711 st.close()
1712 self._dirtyparents = False
1712 self._dirtyparents = False
1713 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1713 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1714
1714
1715 @propertycache
1715 @propertycache
1716 def nonnormalset(self):
1716 def nonnormalset(self):
1717 nonnorm, otherparents = self.nonnormalentries()
1717 nonnorm, otherparents = self.nonnormalentries()
1718 self.otherparentset = otherparents
1718 self.otherparentset = otherparents
1719 return nonnorm
1719 return nonnorm
1720
1720
1721 @propertycache
1721 @propertycache
1722 def otherparentset(self):
1722 def otherparentset(self):
1723 nonnorm, otherparents = self.nonnormalentries()
1723 nonnorm, otherparents = self.nonnormalentries()
1724 self.nonnormalset = nonnorm
1724 self.nonnormalset = nonnorm
1725 return otherparents
1725 return otherparents
1726
1726
1727 def non_normal_or_other_parent_paths(self):
1727 def non_normal_or_other_parent_paths(self):
1728 return self.nonnormalset.union(self.otherparentset)
1728 return self.nonnormalset.union(self.otherparentset)
1729
1729
1730 @propertycache
1730 @propertycache
1731 def identity(self):
1731 def identity(self):
1732 self._map
1732 self._map
1733 return self.identity
1733 return self.identity
1734
1734
1735 @propertycache
1735 @propertycache
1736 def dirfoldmap(self):
1736 def dirfoldmap(self):
1737 f = {}
1737 f = {}
1738 normcase = util.normcase
1738 normcase = util.normcase
1739 for name in self._dirs:
1739 for name in self._dirs:
1740 f[normcase(name)] = name
1740 f[normcase(name)] = name
1741 return f
1741 return f
1742
1742
1743
1743
1744 if rustmod is not None:
1744 if rustmod is not None:
1745
1745
1746 class dirstatemap(object):
1746 class dirstatemap(object):
1747 def __init__(self, ui, opener, root, nodeconstants):
1747 def __init__(self, ui, opener, root, nodeconstants):
1748 self._nodeconstants = nodeconstants
1748 self._nodeconstants = nodeconstants
1749 self._ui = ui
1749 self._ui = ui
1750 self._opener = opener
1750 self._opener = opener
1751 self._root = root
1751 self._root = root
1752 self._filename = b'dirstate'
1752 self._filename = b'dirstate'
1753 self._nodelen = 20
1753 self._nodelen = 20
1754 self._parents = None
1754 self._parents = None
1755 self._dirtyparents = False
1755 self._dirtyparents = False
1756
1756
1757 # for consistent view between _pl() and _read() invocations
1757 # for consistent view between _pl() and _read() invocations
1758 self._pendingmode = None
1758 self._pendingmode = None
1759
1759
1760 def addfile(self, *args, **kwargs):
1760 def addfile(self, *args, **kwargs):
1761 return self._rustmap.addfile(*args, **kwargs)
1761 return self._rustmap.addfile(*args, **kwargs)
1762
1762
1763 def removefile(self, *args, **kwargs):
1763 def removefile(self, *args, **kwargs):
1764 return self._rustmap.removefile(*args, **kwargs)
1764 return self._rustmap.removefile(*args, **kwargs)
1765
1765
1766 def dropfile(self, *args, **kwargs):
1766 def dropfile(self, *args, **kwargs):
1767 return self._rustmap.dropfile(*args, **kwargs)
1767 return self._rustmap.dropfile(*args, **kwargs)
1768
1768
1769 def clearambiguoustimes(self, *args, **kwargs):
1769 def clearambiguoustimes(self, *args, **kwargs):
1770 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1770 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1771
1771
1772 def nonnormalentries(self):
1772 def nonnormalentries(self):
1773 return self._rustmap.nonnormalentries()
1773 return self._rustmap.nonnormalentries()
1774
1774
1775 def get(self, *args, **kwargs):
1775 def get(self, *args, **kwargs):
1776 return self._rustmap.get(*args, **kwargs)
1776 return self._rustmap.get(*args, **kwargs)
1777
1777
1778 @propertycache
1779 def _rustmap(self):
1780 """
1781 Fills the Dirstatemap when called.
1782 """
1783 use_dirstate_tree = self._ui.configbool(
1784 b"experimental",
1785 b"dirstate-tree.in-memory",
1786 False,
1787 )
1788 self._rustmap = rustmod.DirstateMap(use_dirstate_tree)
1789 self.read()
1790 return self._rustmap
1791
1792 @property
1778 @property
1793 def copymap(self):
1779 def copymap(self):
1794 return self._rustmap.copymap()
1780 return self._rustmap.copymap()
1795
1781
1796 def preload(self):
1782 def preload(self):
1797 self._rustmap
1783 self._rustmap
1798
1784
1799 def clear(self):
1785 def clear(self):
1800 self._rustmap.clear()
1786 self._rustmap.clear()
1801 self.setparents(
1787 self.setparents(
1802 self._nodeconstants.nullid, self._nodeconstants.nullid
1788 self._nodeconstants.nullid, self._nodeconstants.nullid
1803 )
1789 )
1804 util.clearcachedproperty(self, b"_dirs")
1790 util.clearcachedproperty(self, b"_dirs")
1805 util.clearcachedproperty(self, b"_alldirs")
1791 util.clearcachedproperty(self, b"_alldirs")
1806 util.clearcachedproperty(self, b"dirfoldmap")
1792 util.clearcachedproperty(self, b"dirfoldmap")
1807
1793
1808 def items(self):
1794 def items(self):
1809 return self._rustmap.items()
1795 return self._rustmap.items()
1810
1796
1811 def keys(self):
1797 def keys(self):
1812 return iter(self._rustmap)
1798 return iter(self._rustmap)
1813
1799
1814 def __contains__(self, key):
1800 def __contains__(self, key):
1815 return key in self._rustmap
1801 return key in self._rustmap
1816
1802
1817 def __getitem__(self, item):
1803 def __getitem__(self, item):
1818 return self._rustmap[item]
1804 return self._rustmap[item]
1819
1805
1820 def __len__(self):
1806 def __len__(self):
1821 return len(self._rustmap)
1807 return len(self._rustmap)
1822
1808
1823 def __iter__(self):
1809 def __iter__(self):
1824 return iter(self._rustmap)
1810 return iter(self._rustmap)
1825
1811
1826 # forward for python2,3 compat
1812 # forward for python2,3 compat
1827 iteritems = items
1813 iteritems = items
1828
1814
1829 def _opendirstatefile(self):
1815 def _opendirstatefile(self):
1830 fp, mode = txnutil.trypending(
1816 fp, mode = txnutil.trypending(
1831 self._root, self._opener, self._filename
1817 self._root, self._opener, self._filename
1832 )
1818 )
1833 if self._pendingmode is not None and self._pendingmode != mode:
1819 if self._pendingmode is not None and self._pendingmode != mode:
1834 fp.close()
1820 fp.close()
1835 raise error.Abort(
1821 raise error.Abort(
1836 _(b'working directory state may be changed parallelly')
1822 _(b'working directory state may be changed parallelly')
1837 )
1823 )
1838 self._pendingmode = mode
1824 self._pendingmode = mode
1839 return fp
1825 return fp
1840
1826
1841 def setparents(self, p1, p2):
1827 def setparents(self, p1, p2):
1842 self._parents = (p1, p2)
1828 self._parents = (p1, p2)
1843 self._dirtyparents = True
1829 self._dirtyparents = True
1844
1830
1845 def parents(self):
1831 def parents(self):
1846 if not self._parents:
1832 if not self._parents:
1847 try:
1833 try:
1848 fp = self._opendirstatefile()
1834 fp = self._opendirstatefile()
1849 st = fp.read(40)
1835 st = fp.read(40)
1850 fp.close()
1836 fp.close()
1851 except IOError as err:
1837 except IOError as err:
1852 if err.errno != errno.ENOENT:
1838 if err.errno != errno.ENOENT:
1853 raise
1839 raise
1854 # File doesn't exist, so the current state is empty
1840 # File doesn't exist, so the current state is empty
1855 st = b''
1841 st = b''
1856
1842
1857 l = len(st)
1843 l = len(st)
1858 if l == self._nodelen * 2:
1844 if l == self._nodelen * 2:
1859 self._parents = (
1845 self._parents = (
1860 st[: self._nodelen],
1846 st[: self._nodelen],
1861 st[self._nodelen : 2 * self._nodelen],
1847 st[self._nodelen : 2 * self._nodelen],
1862 )
1848 )
1863 elif l == 0:
1849 elif l == 0:
1864 self._parents = (
1850 self._parents = (
1865 self._nodeconstants.nullid,
1851 self._nodeconstants.nullid,
1866 self._nodeconstants.nullid,
1852 self._nodeconstants.nullid,
1867 )
1853 )
1868 else:
1854 else:
1869 raise error.Abort(
1855 raise error.Abort(
1870 _(b'working directory state appears damaged!')
1856 _(b'working directory state appears damaged!')
1871 )
1857 )
1872
1858
1873 return self._parents
1859 return self._parents
1874
1860
1875 def read(self):
1861 @propertycache
1862 def _rustmap(self):
1863 """
1864 Fills the Dirstatemap when called.
1865 """
1876 # ignore HG_PENDING because identity is used only for writing
1866 # ignore HG_PENDING because identity is used only for writing
1877 self.identity = util.filestat.frompath(
1867 self.identity = util.filestat.frompath(
1878 self._opener.join(self._filename)
1868 self._opener.join(self._filename)
1879 )
1869 )
1880
1870
1881 try:
1871 try:
1882 fp = self._opendirstatefile()
1872 fp = self._opendirstatefile()
1883 try:
1873 try:
1884 st = fp.read()
1874 st = fp.read()
1885 finally:
1875 finally:
1886 fp.close()
1876 fp.close()
1887 except IOError as err:
1877 except IOError as err:
1888 if err.errno != errno.ENOENT:
1878 if err.errno != errno.ENOENT:
1889 raise
1879 raise
1890 return
1880 st = b''
1891 if not st:
1892 return
1893
1881
1894 parse_dirstate = util.nogc(self._rustmap.read)
1882 use_dirstate_tree = self._ui.configbool(
1895 parents = parse_dirstate(st)
1883 b"experimental",
1884 b"dirstate-tree.in-memory",
1885 False,
1886 )
1887 self._rustmap, parents = rustmod.DirstateMap.new(
1888 use_dirstate_tree, st
1889 )
1890
1896 if parents and not self._dirtyparents:
1891 if parents and not self._dirtyparents:
1897 self.setparents(*parents)
1892 self.setparents(*parents)
1898
1893
1899 self.__contains__ = self._rustmap.__contains__
1894 self.__contains__ = self._rustmap.__contains__
1900 self.__getitem__ = self._rustmap.__getitem__
1895 self.__getitem__ = self._rustmap.__getitem__
1901 self.get = self._rustmap.get
1896 self.get = self._rustmap.get
1897 return self._rustmap
1902
1898
1903 def write(self, st, now):
1899 def write(self, st, now):
1904 parents = self.parents()
1900 parents = self.parents()
1905 st.write(self._rustmap.write(parents[0], parents[1], now))
1901 st.write(self._rustmap.write(parents[0], parents[1], now))
1906 st.close()
1902 st.close()
1907 self._dirtyparents = False
1903 self._dirtyparents = False
1908
1904
1909 @propertycache
1905 @propertycache
1910 def filefoldmap(self):
1906 def filefoldmap(self):
1911 """Returns a dictionary mapping normalized case paths to their
1907 """Returns a dictionary mapping normalized case paths to their
1912 non-normalized versions.
1908 non-normalized versions.
1913 """
1909 """
1914 return self._rustmap.filefoldmapasdict()
1910 return self._rustmap.filefoldmapasdict()
1915
1911
1916 def hastrackeddir(self, d):
1912 def hastrackeddir(self, d):
1917 self._dirs # Trigger Python's propertycache
1913 self._dirs # Trigger Python's propertycache
1918 return self._rustmap.hastrackeddir(d)
1914 return self._rustmap.hastrackeddir(d)
1919
1915
1920 def hasdir(self, d):
1916 def hasdir(self, d):
1921 self._dirs # Trigger Python's propertycache
1917 self._dirs # Trigger Python's propertycache
1922 return self._rustmap.hasdir(d)
1918 return self._rustmap.hasdir(d)
1923
1919
1924 @propertycache
1920 @propertycache
1925 def _dirs(self):
1921 def _dirs(self):
1926 return self._rustmap.getdirs()
1922 return self._rustmap.getdirs()
1927
1923
1928 @propertycache
1924 @propertycache
1929 def _alldirs(self):
1925 def _alldirs(self):
1930 return self._rustmap.getalldirs()
1926 return self._rustmap.getalldirs()
1931
1927
1932 @propertycache
1928 @propertycache
1933 def identity(self):
1929 def identity(self):
1934 self._rustmap
1930 self._rustmap
1935 return self.identity
1931 return self.identity
1936
1932
1937 @property
1933 @property
1938 def nonnormalset(self):
1934 def nonnormalset(self):
1939 nonnorm = self._rustmap.non_normal_entries()
1935 nonnorm = self._rustmap.non_normal_entries()
1940 return nonnorm
1936 return nonnorm
1941
1937
1942 @propertycache
1938 @propertycache
1943 def otherparentset(self):
1939 def otherparentset(self):
1944 otherparents = self._rustmap.other_parent_entries()
1940 otherparents = self._rustmap.other_parent_entries()
1945 return otherparents
1941 return otherparents
1946
1942
1947 def non_normal_or_other_parent_paths(self):
1943 def non_normal_or_other_parent_paths(self):
1948 return self._rustmap.non_normal_or_other_parent_paths()
1944 return self._rustmap.non_normal_or_other_parent_paths()
1949
1945
1950 @propertycache
1946 @propertycache
1951 def dirfoldmap(self):
1947 def dirfoldmap(self):
1952 f = {}
1948 f = {}
1953 normcase = util.normcase
1949 normcase = util.normcase
1954 for name in self._dirs:
1950 for name in self._dirs:
1955 f[normcase(name)] = name
1951 f[normcase(name)] = name
1956 return f
1952 return f
@@ -1,553 +1,551
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10
10
11 use std::cell::{RefCell, RefMut};
11 use std::cell::{RefCell, RefMut};
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13
13
14 use cpython::{
14 use cpython::{
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
17 UnsafePyLeaked,
17 UnsafePyLeaked,
18 };
18 };
19
19
20 use crate::{
20 use crate::{
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 dirstate::non_normal_entries::{
22 dirstate::non_normal_entries::{
23 NonNormalEntries, NonNormalEntriesIterator,
23 NonNormalEntries, NonNormalEntriesIterator,
24 },
24 },
25 dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
25 dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
26 parsers::dirstate_parents_to_pytuple,
26 parsers::dirstate_parents_to_pytuple,
27 };
27 };
28 use hg::{
28 use hg::{
29 dirstate::parsers::Timestamp,
29 dirstate::parsers::Timestamp,
30 dirstate_tree::dispatch::DirstateMapMethods,
30 dirstate_tree::dispatch::DirstateMapMethods,
31 errors::HgError,
31 errors::HgError,
32 revlog::Node,
32 revlog::Node,
33 utils::files::normalize_case,
33 utils::files::normalize_case,
34 utils::hg_path::{HgPath, HgPathBuf},
34 utils::hg_path::{HgPath, HgPathBuf},
35 DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap,
35 DirsMultiset, DirstateEntry, DirstateError,
36 DirstateMapError, DirstateParents, EntryState, StateMapIter,
36 DirstateMap as RustDirstateMap, DirstateMapError, DirstateParents,
37 EntryState, StateMapIter,
37 };
38 };
38
39
39 // TODO
40 // TODO
40 // This object needs to share references to multiple members of its Rust
41 // This object needs to share references to multiple members of its Rust
41 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
42 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
42 // Right now `CopyMap` is done, but it needs to have an explicit reference
43 // Right now `CopyMap` is done, but it needs to have an explicit reference
43 // to `RustDirstateMap` which itself needs to have an encapsulation for
44 // to `RustDirstateMap` which itself needs to have an encapsulation for
44 // every method in `CopyMap` (copymapcopy, etc.).
45 // every method in `CopyMap` (copymapcopy, etc.).
45 // This is ugly and hard to maintain.
46 // This is ugly and hard to maintain.
46 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
47 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
47 // `py_class!` is already implemented and does not mention
48 // `py_class!` is already implemented and does not mention
48 // `RustDirstateMap`, rightfully so.
49 // `RustDirstateMap`, rightfully so.
49 // All attributes also have to have a separate refcount data attribute for
50 // All attributes also have to have a separate refcount data attribute for
50 // leaks, with all methods that go along for reference sharing.
51 // leaks, with all methods that go along for reference sharing.
51 py_class!(pub class DirstateMap |py| {
52 py_class!(pub class DirstateMap |py| {
52 @shared data inner: Box<dyn DirstateMapMethods + Send>;
53 @shared data inner: Box<dyn DirstateMapMethods + Send>;
53
54
54 def __new__(_cls, use_dirstate_tree: bool) -> PyResult<Self> {
55 /// Returns a `(dirstate_map, parents)` tuple
55 let inner = if use_dirstate_tree {
56 @staticmethod
56 Box::new(hg::dirstate_tree::dirstate_map::DirstateMap::new()) as _
57 def new(use_dirstate_tree: bool, on_disk: PyBytes) -> PyResult<PyObject> {
58 let dirstate_error = |_: DirstateError| {
59 PyErr::new::<exc::OSError, _>(py, "Dirstate error".to_string())
60 };
61 let bytes = on_disk.data(py);
62 let (inner, parents) = if use_dirstate_tree {
63 let mut map = hg::dirstate_tree::dirstate_map::DirstateMap::new();
64 let parents = map.read(bytes).map_err(dirstate_error)?;
65 (Box::new(map) as _, parents)
57 } else {
66 } else {
58 Box::new(RustDirstateMap::default()) as _
67 let mut map = RustDirstateMap::default();
68 let parents = map.read(bytes).map_err(dirstate_error)?;
69 (Box::new(map) as _, parents)
59 };
70 };
60 Self::create_instance(py, inner)
71 let map = Self::create_instance(py, inner)?;
72 let parents = parents.map(|p| dirstate_parents_to_pytuple(py, &p));
73 Ok((map, parents).to_py_object(py).into_object())
61 }
74 }
62
75
63 def clear(&self) -> PyResult<PyObject> {
76 def clear(&self) -> PyResult<PyObject> {
64 self.inner(py).borrow_mut().clear();
77 self.inner(py).borrow_mut().clear();
65 Ok(py.None())
78 Ok(py.None())
66 }
79 }
67
80
68 def get(
81 def get(
69 &self,
82 &self,
70 key: PyObject,
83 key: PyObject,
71 default: Option<PyObject> = None
84 default: Option<PyObject> = None
72 ) -> PyResult<Option<PyObject>> {
85 ) -> PyResult<Option<PyObject>> {
73 let key = key.extract::<PyBytes>(py)?;
86 let key = key.extract::<PyBytes>(py)?;
74 match self.inner(py).borrow().get(HgPath::new(key.data(py))) {
87 match self.inner(py).borrow().get(HgPath::new(key.data(py))) {
75 Some(entry) => {
88 Some(entry) => {
76 Ok(Some(make_dirstate_tuple(py, entry)?))
89 Ok(Some(make_dirstate_tuple(py, entry)?))
77 },
90 },
78 None => Ok(default)
91 None => Ok(default)
79 }
92 }
80 }
93 }
81
94
82 def addfile(
95 def addfile(
83 &self,
96 &self,
84 f: PyObject,
97 f: PyObject,
85 oldstate: PyObject,
98 oldstate: PyObject,
86 state: PyObject,
99 state: PyObject,
87 mode: PyObject,
100 mode: PyObject,
88 size: PyObject,
101 size: PyObject,
89 mtime: PyObject
102 mtime: PyObject
90 ) -> PyResult<PyObject> {
103 ) -> PyResult<PyObject> {
91 self.inner(py).borrow_mut().add_file(
104 self.inner(py).borrow_mut().add_file(
92 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
105 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
93 oldstate.extract::<PyBytes>(py)?.data(py)[0]
106 oldstate.extract::<PyBytes>(py)?.data(py)[0]
94 .try_into()
107 .try_into()
95 .map_err(|e: HgError| {
108 .map_err(|e: HgError| {
96 PyErr::new::<exc::ValueError, _>(py, e.to_string())
109 PyErr::new::<exc::ValueError, _>(py, e.to_string())
97 })?,
110 })?,
98 DirstateEntry {
111 DirstateEntry {
99 state: state.extract::<PyBytes>(py)?.data(py)[0]
112 state: state.extract::<PyBytes>(py)?.data(py)[0]
100 .try_into()
113 .try_into()
101 .map_err(|e: HgError| {
114 .map_err(|e: HgError| {
102 PyErr::new::<exc::ValueError, _>(py, e.to_string())
115 PyErr::new::<exc::ValueError, _>(py, e.to_string())
103 })?,
116 })?,
104 mode: mode.extract(py)?,
117 mode: mode.extract(py)?,
105 size: size.extract(py)?,
118 size: size.extract(py)?,
106 mtime: mtime.extract(py)?,
119 mtime: mtime.extract(py)?,
107 },
120 },
108 ).and(Ok(py.None())).or_else(|e: DirstateMapError| {
121 ).and(Ok(py.None())).or_else(|e: DirstateMapError| {
109 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
122 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
110 })
123 })
111 }
124 }
112
125
113 def removefile(
126 def removefile(
114 &self,
127 &self,
115 f: PyObject,
128 f: PyObject,
116 oldstate: PyObject,
129 oldstate: PyObject,
117 size: PyObject
130 size: PyObject
118 ) -> PyResult<PyObject> {
131 ) -> PyResult<PyObject> {
119 self.inner(py).borrow_mut()
132 self.inner(py).borrow_mut()
120 .remove_file(
133 .remove_file(
121 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
134 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
122 oldstate.extract::<PyBytes>(py)?.data(py)[0]
135 oldstate.extract::<PyBytes>(py)?.data(py)[0]
123 .try_into()
136 .try_into()
124 .map_err(|e: HgError| {
137 .map_err(|e: HgError| {
125 PyErr::new::<exc::ValueError, _>(py, e.to_string())
138 PyErr::new::<exc::ValueError, _>(py, e.to_string())
126 })?,
139 })?,
127 size.extract(py)?,
140 size.extract(py)?,
128 )
141 )
129 .or_else(|_| {
142 .or_else(|_| {
130 Err(PyErr::new::<exc::OSError, _>(
143 Err(PyErr::new::<exc::OSError, _>(
131 py,
144 py,
132 "Dirstate error".to_string(),
145 "Dirstate error".to_string(),
133 ))
146 ))
134 })?;
147 })?;
135 Ok(py.None())
148 Ok(py.None())
136 }
149 }
137
150
138 def dropfile(
151 def dropfile(
139 &self,
152 &self,
140 f: PyObject,
153 f: PyObject,
141 oldstate: PyObject
154 oldstate: PyObject
142 ) -> PyResult<PyBool> {
155 ) -> PyResult<PyBool> {
143 self.inner(py).borrow_mut()
156 self.inner(py).borrow_mut()
144 .drop_file(
157 .drop_file(
145 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
158 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
146 oldstate.extract::<PyBytes>(py)?.data(py)[0]
159 oldstate.extract::<PyBytes>(py)?.data(py)[0]
147 .try_into()
160 .try_into()
148 .map_err(|e: HgError| {
161 .map_err(|e: HgError| {
149 PyErr::new::<exc::ValueError, _>(py, e.to_string())
162 PyErr::new::<exc::ValueError, _>(py, e.to_string())
150 })?,
163 })?,
151 )
164 )
152 .and_then(|b| Ok(b.to_py_object(py)))
165 .and_then(|b| Ok(b.to_py_object(py)))
153 .or_else(|e| {
166 .or_else(|e| {
154 Err(PyErr::new::<exc::OSError, _>(
167 Err(PyErr::new::<exc::OSError, _>(
155 py,
168 py,
156 format!("Dirstate error: {}", e.to_string()),
169 format!("Dirstate error: {}", e.to_string()),
157 ))
170 ))
158 })
171 })
159 }
172 }
160
173
161 def clearambiguoustimes(
174 def clearambiguoustimes(
162 &self,
175 &self,
163 files: PyObject,
176 files: PyObject,
164 now: PyObject
177 now: PyObject
165 ) -> PyResult<PyObject> {
178 ) -> PyResult<PyObject> {
166 let files: PyResult<Vec<HgPathBuf>> = files
179 let files: PyResult<Vec<HgPathBuf>> = files
167 .iter(py)?
180 .iter(py)?
168 .map(|filename| {
181 .map(|filename| {
169 Ok(HgPathBuf::from_bytes(
182 Ok(HgPathBuf::from_bytes(
170 filename?.extract::<PyBytes>(py)?.data(py),
183 filename?.extract::<PyBytes>(py)?.data(py),
171 ))
184 ))
172 })
185 })
173 .collect();
186 .collect();
174 self.inner(py).borrow_mut()
187 self.inner(py).borrow_mut()
175 .clear_ambiguous_times(files?, now.extract(py)?);
188 .clear_ambiguous_times(files?, now.extract(py)?);
176 Ok(py.None())
189 Ok(py.None())
177 }
190 }
178
191
179 def other_parent_entries(&self) -> PyResult<PyObject> {
192 def other_parent_entries(&self) -> PyResult<PyObject> {
180 let mut inner_shared = self.inner(py).borrow_mut();
193 let mut inner_shared = self.inner(py).borrow_mut();
181 let set = PySet::empty(py)?;
194 let set = PySet::empty(py)?;
182 for path in inner_shared.iter_other_parent_paths() {
195 for path in inner_shared.iter_other_parent_paths() {
183 set.add(py, PyBytes::new(py, path.as_bytes()))?;
196 set.add(py, PyBytes::new(py, path.as_bytes()))?;
184 }
197 }
185 Ok(set.into_object())
198 Ok(set.into_object())
186 }
199 }
187
200
188 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
201 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
189 NonNormalEntries::from_inner(py, self.clone_ref(py))
202 NonNormalEntries::from_inner(py, self.clone_ref(py))
190 }
203 }
191
204
192 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
205 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
193 let key = key.extract::<PyBytes>(py)?;
206 let key = key.extract::<PyBytes>(py)?;
194 Ok(self
207 Ok(self
195 .inner(py)
208 .inner(py)
196 .borrow_mut()
209 .borrow_mut()
197 .non_normal_entries_contains(HgPath::new(key.data(py))))
210 .non_normal_entries_contains(HgPath::new(key.data(py))))
198 }
211 }
199
212
200 def non_normal_entries_display(&self) -> PyResult<PyString> {
213 def non_normal_entries_display(&self) -> PyResult<PyString> {
201 Ok(
214 Ok(
202 PyString::new(
215 PyString::new(
203 py,
216 py,
204 &format!(
217 &format!(
205 "NonNormalEntries: {}",
218 "NonNormalEntries: {}",
206 hg::utils::join_display(
219 hg::utils::join_display(
207 self
220 self
208 .inner(py)
221 .inner(py)
209 .borrow_mut()
222 .borrow_mut()
210 .iter_non_normal_paths(),
223 .iter_non_normal_paths(),
211 ", "
224 ", "
212 )
225 )
213 )
226 )
214 )
227 )
215 )
228 )
216 }
229 }
217
230
218 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
231 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
219 let key = key.extract::<PyBytes>(py)?;
232 let key = key.extract::<PyBytes>(py)?;
220 self
233 self
221 .inner(py)
234 .inner(py)
222 .borrow_mut()
235 .borrow_mut()
223 .non_normal_entries_remove(HgPath::new(key.data(py)));
236 .non_normal_entries_remove(HgPath::new(key.data(py)));
224 Ok(py.None())
237 Ok(py.None())
225 }
238 }
226
239
227 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
240 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
228 let mut inner = self.inner(py).borrow_mut();
241 let mut inner = self.inner(py).borrow_mut();
229
242
230 let ret = PyList::new(py, &[]);
243 let ret = PyList::new(py, &[]);
231 for filename in inner.non_normal_or_other_parent_paths() {
244 for filename in inner.non_normal_or_other_parent_paths() {
232 let as_pystring = PyBytes::new(py, filename.as_bytes());
245 let as_pystring = PyBytes::new(py, filename.as_bytes());
233 ret.append(py, as_pystring.into_object());
246 ret.append(py, as_pystring.into_object());
234 }
247 }
235 Ok(ret)
248 Ok(ret)
236 }
249 }
237
250
238 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
251 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
239 // Make sure the sets are defined before we no longer have a mutable
252 // Make sure the sets are defined before we no longer have a mutable
240 // reference to the dmap.
253 // reference to the dmap.
241 self.inner(py)
254 self.inner(py)
242 .borrow_mut()
255 .borrow_mut()
243 .set_non_normal_other_parent_entries(false);
256 .set_non_normal_other_parent_entries(false);
244
257
245 let leaked_ref = self.inner(py).leak_immutable();
258 let leaked_ref = self.inner(py).leak_immutable();
246
259
247 NonNormalEntriesIterator::from_inner(py, unsafe {
260 NonNormalEntriesIterator::from_inner(py, unsafe {
248 leaked_ref.map(py, |o| {
261 leaked_ref.map(py, |o| {
249 o.iter_non_normal_paths_panic()
262 o.iter_non_normal_paths_panic()
250 })
263 })
251 })
264 })
252 }
265 }
253
266
254 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
267 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
255 let d = d.extract::<PyBytes>(py)?;
268 let d = d.extract::<PyBytes>(py)?;
256 Ok(self.inner(py).borrow_mut()
269 Ok(self.inner(py).borrow_mut()
257 .has_tracked_dir(HgPath::new(d.data(py)))
270 .has_tracked_dir(HgPath::new(d.data(py)))
258 .map_err(|e| {
271 .map_err(|e| {
259 PyErr::new::<exc::ValueError, _>(py, e.to_string())
272 PyErr::new::<exc::ValueError, _>(py, e.to_string())
260 })?
273 })?
261 .to_py_object(py))
274 .to_py_object(py))
262 }
275 }
263
276
264 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
277 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
265 let d = d.extract::<PyBytes>(py)?;
278 let d = d.extract::<PyBytes>(py)?;
266 Ok(self.inner(py).borrow_mut()
279 Ok(self.inner(py).borrow_mut()
267 .has_dir(HgPath::new(d.data(py)))
280 .has_dir(HgPath::new(d.data(py)))
268 .map_err(|e| {
281 .map_err(|e| {
269 PyErr::new::<exc::ValueError, _>(py, e.to_string())
282 PyErr::new::<exc::ValueError, _>(py, e.to_string())
270 })?
283 })?
271 .to_py_object(py))
284 .to_py_object(py))
272 }
285 }
273
286
274 def read(&self, st: PyObject) -> PyResult<Option<PyObject>> {
275 match self.inner(py).borrow_mut()
276 .read(st.extract::<PyBytes>(py)?.data(py))
277 {
278 Ok(Some(parents)) => Ok(Some(
279 dirstate_parents_to_pytuple(py, parents)
280 .into_object()
281 )),
282 Ok(None) => Ok(Some(py.None())),
283 Err(_) => Err(PyErr::new::<exc::OSError, _>(
284 py,
285 "Dirstate error".to_string(),
286 )),
287 }
288 }
289 def write(
287 def write(
290 &self,
288 &self,
291 p1: PyObject,
289 p1: PyObject,
292 p2: PyObject,
290 p2: PyObject,
293 now: PyObject
291 now: PyObject
294 ) -> PyResult<PyBytes> {
292 ) -> PyResult<PyBytes> {
295 let now = Timestamp(now.extract(py)?);
293 let now = Timestamp(now.extract(py)?);
296 let parents = DirstateParents {
294 let parents = DirstateParents {
297 p1: extract_node_id(py, &p1)?,
295 p1: extract_node_id(py, &p1)?,
298 p2: extract_node_id(py, &p2)?,
296 p2: extract_node_id(py, &p2)?,
299 };
297 };
300
298
301 match self.inner(py).borrow_mut().pack(parents, now) {
299 match self.inner(py).borrow_mut().pack(parents, now) {
302 Ok(packed) => Ok(PyBytes::new(py, &packed)),
300 Ok(packed) => Ok(PyBytes::new(py, &packed)),
303 Err(_) => Err(PyErr::new::<exc::OSError, _>(
301 Err(_) => Err(PyErr::new::<exc::OSError, _>(
304 py,
302 py,
305 "Dirstate error".to_string(),
303 "Dirstate error".to_string(),
306 )),
304 )),
307 }
305 }
308 }
306 }
309
307
310 def filefoldmapasdict(&self) -> PyResult<PyDict> {
308 def filefoldmapasdict(&self) -> PyResult<PyDict> {
311 let dict = PyDict::new(py);
309 let dict = PyDict::new(py);
312 for (path, entry) in self.inner(py).borrow_mut().iter() {
310 for (path, entry) in self.inner(py).borrow_mut().iter() {
313 if entry.state != EntryState::Removed {
311 if entry.state != EntryState::Removed {
314 let key = normalize_case(path);
312 let key = normalize_case(path);
315 let value = path;
313 let value = path;
316 dict.set_item(
314 dict.set_item(
317 py,
315 py,
318 PyBytes::new(py, key.as_bytes()).into_object(),
316 PyBytes::new(py, key.as_bytes()).into_object(),
319 PyBytes::new(py, value.as_bytes()).into_object(),
317 PyBytes::new(py, value.as_bytes()).into_object(),
320 )?;
318 )?;
321 }
319 }
322 }
320 }
323 Ok(dict)
321 Ok(dict)
324 }
322 }
325
323
326 def __len__(&self) -> PyResult<usize> {
324 def __len__(&self) -> PyResult<usize> {
327 Ok(self.inner(py).borrow().len())
325 Ok(self.inner(py).borrow().len())
328 }
326 }
329
327
330 def __contains__(&self, key: PyObject) -> PyResult<bool> {
328 def __contains__(&self, key: PyObject) -> PyResult<bool> {
331 let key = key.extract::<PyBytes>(py)?;
329 let key = key.extract::<PyBytes>(py)?;
332 Ok(self.inner(py).borrow().contains_key(HgPath::new(key.data(py))))
330 Ok(self.inner(py).borrow().contains_key(HgPath::new(key.data(py))))
333 }
331 }
334
332
335 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
333 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
336 let key = key.extract::<PyBytes>(py)?;
334 let key = key.extract::<PyBytes>(py)?;
337 let key = HgPath::new(key.data(py));
335 let key = HgPath::new(key.data(py));
338 match self.inner(py).borrow().get(key) {
336 match self.inner(py).borrow().get(key) {
339 Some(entry) => {
337 Some(entry) => {
340 Ok(make_dirstate_tuple(py, entry)?)
338 Ok(make_dirstate_tuple(py, entry)?)
341 },
339 },
342 None => Err(PyErr::new::<exc::KeyError, _>(
340 None => Err(PyErr::new::<exc::KeyError, _>(
343 py,
341 py,
344 String::from_utf8_lossy(key.as_bytes()),
342 String::from_utf8_lossy(key.as_bytes()),
345 )),
343 )),
346 }
344 }
347 }
345 }
348
346
349 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
347 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
350 let leaked_ref = self.inner(py).leak_immutable();
348 let leaked_ref = self.inner(py).leak_immutable();
351 DirstateMapKeysIterator::from_inner(
349 DirstateMapKeysIterator::from_inner(
352 py,
350 py,
353 unsafe { leaked_ref.map(py, |o| o.iter()) },
351 unsafe { leaked_ref.map(py, |o| o.iter()) },
354 )
352 )
355 }
353 }
356
354
357 def items(&self) -> PyResult<DirstateMapItemsIterator> {
355 def items(&self) -> PyResult<DirstateMapItemsIterator> {
358 let leaked_ref = self.inner(py).leak_immutable();
356 let leaked_ref = self.inner(py).leak_immutable();
359 DirstateMapItemsIterator::from_inner(
357 DirstateMapItemsIterator::from_inner(
360 py,
358 py,
361 unsafe { leaked_ref.map(py, |o| o.iter()) },
359 unsafe { leaked_ref.map(py, |o| o.iter()) },
362 )
360 )
363 }
361 }
364
362
365 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
363 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
366 let leaked_ref = self.inner(py).leak_immutable();
364 let leaked_ref = self.inner(py).leak_immutable();
367 DirstateMapKeysIterator::from_inner(
365 DirstateMapKeysIterator::from_inner(
368 py,
366 py,
369 unsafe { leaked_ref.map(py, |o| o.iter()) },
367 unsafe { leaked_ref.map(py, |o| o.iter()) },
370 )
368 )
371 }
369 }
372
370
373 def getdirs(&self) -> PyResult<Dirs> {
371 def getdirs(&self) -> PyResult<Dirs> {
374 // TODO don't copy, share the reference
372 // TODO don't copy, share the reference
375 self.inner(py).borrow_mut().set_dirs()
373 self.inner(py).borrow_mut().set_dirs()
376 .map_err(|e| {
374 .map_err(|e| {
377 PyErr::new::<exc::ValueError, _>(py, e.to_string())
375 PyErr::new::<exc::ValueError, _>(py, e.to_string())
378 })?;
376 })?;
379 Dirs::from_inner(
377 Dirs::from_inner(
380 py,
378 py,
381 DirsMultiset::from_dirstate(
379 DirsMultiset::from_dirstate(
382 self.inner(py).borrow().iter(),
380 self.inner(py).borrow().iter(),
383 Some(EntryState::Removed),
381 Some(EntryState::Removed),
384 )
382 )
385 .map_err(|e| {
383 .map_err(|e| {
386 PyErr::new::<exc::ValueError, _>(py, e.to_string())
384 PyErr::new::<exc::ValueError, _>(py, e.to_string())
387 })?,
385 })?,
388 )
386 )
389 }
387 }
390 def getalldirs(&self) -> PyResult<Dirs> {
388 def getalldirs(&self) -> PyResult<Dirs> {
391 // TODO don't copy, share the reference
389 // TODO don't copy, share the reference
392 self.inner(py).borrow_mut().set_all_dirs()
390 self.inner(py).borrow_mut().set_all_dirs()
393 .map_err(|e| {
391 .map_err(|e| {
394 PyErr::new::<exc::ValueError, _>(py, e.to_string())
392 PyErr::new::<exc::ValueError, _>(py, e.to_string())
395 })?;
393 })?;
396 Dirs::from_inner(
394 Dirs::from_inner(
397 py,
395 py,
398 DirsMultiset::from_dirstate(
396 DirsMultiset::from_dirstate(
399 self.inner(py).borrow().iter(),
397 self.inner(py).borrow().iter(),
400 None,
398 None,
401 ).map_err(|e| {
399 ).map_err(|e| {
402 PyErr::new::<exc::ValueError, _>(py, e.to_string())
400 PyErr::new::<exc::ValueError, _>(py, e.to_string())
403 })?,
401 })?,
404 )
402 )
405 }
403 }
406
404
407 // TODO all copymap* methods, see docstring above
405 // TODO all copymap* methods, see docstring above
408 def copymapcopy(&self) -> PyResult<PyDict> {
406 def copymapcopy(&self) -> PyResult<PyDict> {
409 let dict = PyDict::new(py);
407 let dict = PyDict::new(py);
410 for (key, value) in self.inner(py).borrow().copy_map_iter() {
408 for (key, value) in self.inner(py).borrow().copy_map_iter() {
411 dict.set_item(
409 dict.set_item(
412 py,
410 py,
413 PyBytes::new(py, key.as_bytes()),
411 PyBytes::new(py, key.as_bytes()),
414 PyBytes::new(py, value.as_bytes()),
412 PyBytes::new(py, value.as_bytes()),
415 )?;
413 )?;
416 }
414 }
417 Ok(dict)
415 Ok(dict)
418 }
416 }
419
417
420 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
418 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
421 let key = key.extract::<PyBytes>(py)?;
419 let key = key.extract::<PyBytes>(py)?;
422 match self.inner(py).borrow().copy_map_get(HgPath::new(key.data(py))) {
420 match self.inner(py).borrow().copy_map_get(HgPath::new(key.data(py))) {
423 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
421 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
424 None => Err(PyErr::new::<exc::KeyError, _>(
422 None => Err(PyErr::new::<exc::KeyError, _>(
425 py,
423 py,
426 String::from_utf8_lossy(key.data(py)),
424 String::from_utf8_lossy(key.data(py)),
427 )),
425 )),
428 }
426 }
429 }
427 }
430 def copymap(&self) -> PyResult<CopyMap> {
428 def copymap(&self) -> PyResult<CopyMap> {
431 CopyMap::from_inner(py, self.clone_ref(py))
429 CopyMap::from_inner(py, self.clone_ref(py))
432 }
430 }
433
431
434 def copymaplen(&self) -> PyResult<usize> {
432 def copymaplen(&self) -> PyResult<usize> {
435 Ok(self.inner(py).borrow().copy_map_len())
433 Ok(self.inner(py).borrow().copy_map_len())
436 }
434 }
437 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
435 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
438 let key = key.extract::<PyBytes>(py)?;
436 let key = key.extract::<PyBytes>(py)?;
439 Ok(self
437 Ok(self
440 .inner(py)
438 .inner(py)
441 .borrow()
439 .borrow()
442 .copy_map_contains_key(HgPath::new(key.data(py))))
440 .copy_map_contains_key(HgPath::new(key.data(py))))
443 }
441 }
444 def copymapget(
442 def copymapget(
445 &self,
443 &self,
446 key: PyObject,
444 key: PyObject,
447 default: Option<PyObject>
445 default: Option<PyObject>
448 ) -> PyResult<Option<PyObject>> {
446 ) -> PyResult<Option<PyObject>> {
449 let key = key.extract::<PyBytes>(py)?;
447 let key = key.extract::<PyBytes>(py)?;
450 match self
448 match self
451 .inner(py)
449 .inner(py)
452 .borrow()
450 .borrow()
453 .copy_map_get(HgPath::new(key.data(py)))
451 .copy_map_get(HgPath::new(key.data(py)))
454 {
452 {
455 Some(copy) => Ok(Some(
453 Some(copy) => Ok(Some(
456 PyBytes::new(py, copy.as_bytes()).into_object(),
454 PyBytes::new(py, copy.as_bytes()).into_object(),
457 )),
455 )),
458 None => Ok(default),
456 None => Ok(default),
459 }
457 }
460 }
458 }
461 def copymapsetitem(
459 def copymapsetitem(
462 &self,
460 &self,
463 key: PyObject,
461 key: PyObject,
464 value: PyObject
462 value: PyObject
465 ) -> PyResult<PyObject> {
463 ) -> PyResult<PyObject> {
466 let key = key.extract::<PyBytes>(py)?;
464 let key = key.extract::<PyBytes>(py)?;
467 let value = value.extract::<PyBytes>(py)?;
465 let value = value.extract::<PyBytes>(py)?;
468 self.inner(py).borrow_mut().copy_map_insert(
466 self.inner(py).borrow_mut().copy_map_insert(
469 HgPathBuf::from_bytes(key.data(py)),
467 HgPathBuf::from_bytes(key.data(py)),
470 HgPathBuf::from_bytes(value.data(py)),
468 HgPathBuf::from_bytes(value.data(py)),
471 );
469 );
472 Ok(py.None())
470 Ok(py.None())
473 }
471 }
474 def copymappop(
472 def copymappop(
475 &self,
473 &self,
476 key: PyObject,
474 key: PyObject,
477 default: Option<PyObject>
475 default: Option<PyObject>
478 ) -> PyResult<Option<PyObject>> {
476 ) -> PyResult<Option<PyObject>> {
479 let key = key.extract::<PyBytes>(py)?;
477 let key = key.extract::<PyBytes>(py)?;
480 match self
478 match self
481 .inner(py)
479 .inner(py)
482 .borrow_mut()
480 .borrow_mut()
483 .copy_map_remove(HgPath::new(key.data(py)))
481 .copy_map_remove(HgPath::new(key.data(py)))
484 {
482 {
485 Some(_) => Ok(None),
483 Some(_) => Ok(None),
486 None => Ok(default),
484 None => Ok(default),
487 }
485 }
488 }
486 }
489
487
490 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
488 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
491 let leaked_ref = self.inner(py).leak_immutable();
489 let leaked_ref = self.inner(py).leak_immutable();
492 CopyMapKeysIterator::from_inner(
490 CopyMapKeysIterator::from_inner(
493 py,
491 py,
494 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
492 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
495 )
493 )
496 }
494 }
497
495
498 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
496 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
499 let leaked_ref = self.inner(py).leak_immutable();
497 let leaked_ref = self.inner(py).leak_immutable();
500 CopyMapItemsIterator::from_inner(
498 CopyMapItemsIterator::from_inner(
501 py,
499 py,
502 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
500 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
503 )
501 )
504 }
502 }
505
503
506 });
504 });
507
505
508 impl DirstateMap {
506 impl DirstateMap {
509 pub fn get_inner_mut<'a>(
507 pub fn get_inner_mut<'a>(
510 &'a self,
508 &'a self,
511 py: Python<'a>,
509 py: Python<'a>,
512 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
510 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
513 self.inner(py).borrow_mut()
511 self.inner(py).borrow_mut()
514 }
512 }
515 fn translate_key(
513 fn translate_key(
516 py: Python,
514 py: Python,
517 res: (&HgPathBuf, &DirstateEntry),
515 res: (&HgPathBuf, &DirstateEntry),
518 ) -> PyResult<Option<PyBytes>> {
516 ) -> PyResult<Option<PyBytes>> {
519 Ok(Some(PyBytes::new(py, res.0.as_bytes())))
517 Ok(Some(PyBytes::new(py, res.0.as_bytes())))
520 }
518 }
521 fn translate_key_value(
519 fn translate_key_value(
522 py: Python,
520 py: Python,
523 res: (&HgPathBuf, &DirstateEntry),
521 res: (&HgPathBuf, &DirstateEntry),
524 ) -> PyResult<Option<(PyBytes, PyObject)>> {
522 ) -> PyResult<Option<(PyBytes, PyObject)>> {
525 let (f, entry) = res;
523 let (f, entry) = res;
526 Ok(Some((
524 Ok(Some((
527 PyBytes::new(py, f.as_bytes()),
525 PyBytes::new(py, f.as_bytes()),
528 make_dirstate_tuple(py, &entry)?,
526 make_dirstate_tuple(py, &entry)?,
529 )))
527 )))
530 }
528 }
531 }
529 }
532
530
533 py_shared_iterator!(
531 py_shared_iterator!(
534 DirstateMapKeysIterator,
532 DirstateMapKeysIterator,
535 UnsafePyLeaked<StateMapIter<'static>>,
533 UnsafePyLeaked<StateMapIter<'static>>,
536 DirstateMap::translate_key,
534 DirstateMap::translate_key,
537 Option<PyBytes>
535 Option<PyBytes>
538 );
536 );
539
537
540 py_shared_iterator!(
538 py_shared_iterator!(
541 DirstateMapItemsIterator,
539 DirstateMapItemsIterator,
542 UnsafePyLeaked<StateMapIter<'static>>,
540 UnsafePyLeaked<StateMapIter<'static>>,
543 DirstateMap::translate_key_value,
541 DirstateMap::translate_key_value,
544 Option<(PyBytes, PyObject)>
542 Option<(PyBytes, PyObject)>
545 );
543 );
546
544
547 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
545 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
548 let bytes = obj.extract::<PyBytes>(py)?;
546 let bytes = obj.extract::<PyBytes>(py)?;
549 match bytes.data(py).try_into() {
547 match bytes.data(py).try_into() {
550 Ok(s) => Ok(s),
548 Ok(s) => Ok(s),
551 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
549 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
552 }
550 }
553 }
551 }
General Comments 0
You need to be logged in to leave comments. Login now