##// END OF EJS Templates
rust-dirstatemap: add `NonNormalEntries` class...
Raphaël Gomès -
r44836:71e13cfd stable
parent child Browse files
Show More
@@ -0,0 +1,52 b''
1 // non_normal_other_parent_entries.rs
2 //
3 // Copyright 2020 Raphaël Gomès <rgomes@octobus.net>
4 //
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
7
8 use cpython::{
9 exc::NotImplementedError, CompareOp, ObjectProtocol, PyErr, PyList,
10 PyObject, PyResult, PyString, Python, PythonObject, ToPyObject,
11 };
12
13 use crate::dirstate::DirstateMap;
14
15 py_class!(pub class NonNormalEntries |py| {
16 data dmap: DirstateMap;
17
18 def __contains__(&self, key: PyObject) -> PyResult<bool> {
19 self.dmap(py).non_normal_entries_contains(py, key)
20 }
21 def remove(&self, key: PyObject) -> PyResult<PyObject> {
22 self.dmap(py).non_normal_entries_remove(py, key)
23 }
24 def union(&self, other: PyObject) -> PyResult<PyList> {
25 self.dmap(py).non_normal_entries_union(py, other)
26 }
27 def __richcmp__(&self, other: PyObject, op: CompareOp) -> PyResult<bool> {
28 match op {
29 CompareOp::Eq => self.is_equal_to(py, other),
30 CompareOp::Ne => Ok(!self.is_equal_to(py, other)?),
31 _ => Err(PyErr::new::<NotImplementedError, _>(py, ""))
32 }
33 }
34 def __repr__(&self) -> PyResult<PyString> {
35 self.dmap(py).non_normal_entries_display(py)
36 }
37 });
38
39 impl NonNormalEntries {
40 pub fn from_inner(py: Python, dm: DirstateMap) -> PyResult<Self> {
41 Self::create_instance(py, dm)
42 }
43
44 fn is_equal_to(&self, py: Python, other: PyObject) -> PyResult<bool> {
45 for item in other.iter(py)? {
46 if !self.dmap(py).non_normal_entries_contains(py, item?)? {
47 return Ok(false);
48 }
49 }
50 Ok(true)
51 }
52 }
@@ -1,1863 +1,1863 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
76 '''Create a new dirstate object.
77
77
78 opener is an open()-like callable that can be used to open the
78 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
79 dirstate file; root is the root of the directory tracked by
80 the dirstate.
80 the dirstate.
81 '''
81 '''
82 self._opener = opener
82 self._opener = opener
83 self._validate = validate
83 self._validate = validate
84 self._root = root
84 self._root = root
85 self._sparsematchfn = sparsematchfn
85 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
87 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
88 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
89 self._dirty = False
90 self._lastnormaltime = 0
90 self._lastnormaltime = 0
91 self._ui = ui
91 self._ui = ui
92 self._filecache = {}
92 self._filecache = {}
93 self._parentwriters = 0
93 self._parentwriters = 0
94 self._filename = b'dirstate'
94 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
97 self._origpl = None
97 self._origpl = None
98 self._updatedfiles = set()
98 self._updatedfiles = set()
99 self._mapcls = dirstatemap
99 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
100 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
101 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
102 # raises an exception).
103 self._cwd
103 self._cwd
104
104
105 @contextlib.contextmanager
105 @contextlib.contextmanager
106 def parentchange(self):
106 def parentchange(self):
107 '''Context manager for handling dirstate parents.
107 '''Context manager for handling dirstate parents.
108
108
109 If an exception occurs in the scope of the context manager,
109 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
110 the incoherent dirstate won't be written when wlock is
111 released.
111 released.
112 '''
112 '''
113 self._parentwriters += 1
113 self._parentwriters += 1
114 yield
114 yield
115 # Typically we want the "undo" step of a context manager in a
115 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
116 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
117 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
118 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
119 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache(b'branch')
147 @repocache(b'branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read(b"branch").strip() or b"default"
150 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return b"default"
154 return b"default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def hasdir(self, d):
160 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
161 return self._map.hastrackeddir(d)
162
162
163 @rootcache(b'.hgignore')
163 @rootcache(b'.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never()
167 return matchmod.never()
168
168
169 pats = [b'include:%s' % f for f in files]
169 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
186 return not util.fscasesensitive(self._join(b'.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195
195
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return b'l'
200 return b'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return b'x'
202 return b'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return b''
205 return b''
206
206
207 return f
207 return f
208
208
209 fallback = buildfallback()
209 fallback = buildfallback()
210 if self._checklink:
210 if self._checklink:
211
211
212 def f(x):
212 def f(x):
213 if os.path.islink(self._join(x)):
213 if os.path.islink(self._join(x)):
214 return b'l'
214 return b'l'
215 if b'x' in fallback(x):
215 if b'x' in fallback(x):
216 return b'x'
216 return b'x'
217 return b''
217 return b''
218
218
219 return f
219 return f
220 if self._checkexec:
220 if self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 if b'l' in fallback(x):
223 if b'l' in fallback(x):
224 return b'l'
224 return b'l'
225 if util.isexec(self._join(x)):
225 if util.isexec(self._join(x)):
226 return b'x'
226 return b'x'
227 return b''
227 return b''
228
228
229 return f
229 return f
230 else:
230 else:
231 return fallback
231 return fallback
232
232
233 @propertycache
233 @propertycache
234 def _cwd(self):
234 def _cwd(self):
235 # internal config: ui.forcecwd
235 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
237 if forcecwd:
238 return forcecwd
238 return forcecwd
239 return encoding.getcwd()
239 return encoding.getcwd()
240
240
241 def getcwd(self):
241 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
242 '''Return the path from which a canonical path is calculated.
243
243
244 This path should be used to resolve file patterns or to convert
244 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
245 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
246 used to get real file paths. Use vfs functions instead.
247 '''
247 '''
248 cwd = self._cwd
248 cwd = self._cwd
249 if cwd == self._root:
249 if cwd == self._root:
250 return b''
250 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
252 rootsep = self._root
253 if not util.endswithsep(rootsep):
253 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
254 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
255 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
256 return cwd[len(rootsep) :]
257 else:
257 else:
258 # we're outside the repo. return an absolute path.
258 # we're outside the repo. return an absolute path.
259 return cwd
259 return cwd
260
260
261 def pathto(self, f, cwd=None):
261 def pathto(self, f, cwd=None):
262 if cwd is None:
262 if cwd is None:
263 cwd = self.getcwd()
263 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
264 path = util.pathto(self._root, cwd, f)
265 if self._slash:
265 if self._slash:
266 return util.pconvert(path)
266 return util.pconvert(path)
267 return path
267 return path
268
268
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
270 '''Return the current state of key (a filename) in the dirstate.
271
271
272 States are:
272 States are:
273 n normal
273 n normal
274 m needs merging
274 m needs merging
275 r marked for removal
275 r marked for removal
276 a marked for addition
276 a marked for addition
277 ? not tracked
277 ? not tracked
278 '''
278 '''
279 return self._map.get(key, (b"?",))[0]
279 return self._map.get(key, (b"?",))[0]
280
280
281 def __contains__(self, key):
281 def __contains__(self, key):
282 return key in self._map
282 return key in self._map
283
283
284 def __iter__(self):
284 def __iter__(self):
285 return iter(sorted(self._map))
285 return iter(sorted(self._map))
286
286
287 def items(self):
287 def items(self):
288 return pycompat.iteritems(self._map)
288 return pycompat.iteritems(self._map)
289
289
290 iteritems = items
290 iteritems = items
291
291
292 def parents(self):
292 def parents(self):
293 return [self._validate(p) for p in self._pl]
293 return [self._validate(p) for p in self._pl]
294
294
295 def p1(self):
295 def p1(self):
296 return self._validate(self._pl[0])
296 return self._validate(self._pl[0])
297
297
298 def p2(self):
298 def p2(self):
299 return self._validate(self._pl[1])
299 return self._validate(self._pl[1])
300
300
301 def branch(self):
301 def branch(self):
302 return encoding.tolocal(self._branch)
302 return encoding.tolocal(self._branch)
303
303
304 def setparents(self, p1, p2=nullid):
304 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
305 """Set dirstate parents to p1 and p2.
306
306
307 When moving from two parents to one, 'm' merged entries a
307 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
308 adjusted to normal and previous copy records discarded and
309 returned by the call.
309 returned by the call.
310
310
311 See localrepo.setparents()
311 See localrepo.setparents()
312 """
312 """
313 if self._parentwriters == 0:
313 if self._parentwriters == 0:
314 raise ValueError(
314 raise ValueError(
315 b"cannot set dirstate parent outside of "
315 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
316 b"dirstate.parentchange context manager"
317 )
317 )
318
318
319 self._dirty = True
319 self._dirty = True
320 oldp2 = self._pl[1]
320 oldp2 = self._pl[1]
321 if self._origpl is None:
321 if self._origpl is None:
322 self._origpl = self._pl
322 self._origpl = self._pl
323 self._map.setparents(p1, p2)
323 self._map.setparents(p1, p2)
324 copies = {}
324 copies = {}
325 if oldp2 != nullid and p2 == nullid:
325 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
326 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
327 self._map.otherparentset
328 )
328 )
329 for f in candidatefiles:
329 for f in candidatefiles:
330 s = self._map.get(f)
330 s = self._map.get(f)
331 if s is None:
331 if s is None:
332 continue
332 continue
333
333
334 # Discard 'm' markers when moving away from a merge state
334 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
335 if s[0] == b'm':
336 source = self._map.copymap.get(f)
336 source = self._map.copymap.get(f)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 self.normallookup(f)
339 self.normallookup(f)
340 # Also fix up otherparent markers
340 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
341 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
342 source = self._map.copymap.get(f)
343 if source:
343 if source:
344 copies[f] = source
344 copies[f] = source
345 self.add(f)
345 self.add(f)
346 return copies
346 return copies
347
347
348 def setbranch(self, branch):
348 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
351 try:
352 f.write(self._branch + b'\n')
352 f.write(self._branch + b'\n')
353 f.close()
353 f.close()
354
354
355 # make sure filecache has the correct stat info for _branch after
355 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
356 # replacing the underlying file
357 ce = self._filecache[b'_branch']
357 ce = self._filecache[b'_branch']
358 if ce:
358 if ce:
359 ce.refresh()
359 ce.refresh()
360 except: # re-raises
360 except: # re-raises
361 f.discard()
361 f.discard()
362 raise
362 raise
363
363
364 def invalidate(self):
364 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
365 '''Causes the next access to reread the dirstate.
366
366
367 This is different from localrepo.invalidatedirstate() because it always
367 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
369 check whether the dirstate has changed before rereading it.'''
370
370
371 for a in ("_map", "_branch", "_ignore"):
371 for a in ("_map", "_branch", "_ignore"):
372 if a in self.__dict__:
372 if a in self.__dict__:
373 delattr(self, a)
373 delattr(self, a)
374 self._lastnormaltime = 0
374 self._lastnormaltime = 0
375 self._dirty = False
375 self._dirty = False
376 self._updatedfiles.clear()
376 self._updatedfiles.clear()
377 self._parentwriters = 0
377 self._parentwriters = 0
378 self._origpl = None
378 self._origpl = None
379
379
380 def copy(self, source, dest):
380 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
382 if source == dest:
383 return
383 return
384 self._dirty = True
384 self._dirty = True
385 if source is not None:
385 if source is not None:
386 self._map.copymap[dest] = source
386 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
388 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
389 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
390 self._updatedfiles.add(dest)
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self._map.copymap.get(file, None)
393 return self._map.copymap.get(file, None)
394
394
395 def copies(self):
395 def copies(self):
396 return self._map.copymap
396 return self._map.copymap
397
397
398 def _addpath(self, f, state, mode, size, mtime):
398 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
399 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
400 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
401 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
402 if self._map.hastrackeddir(f):
403 raise error.Abort(
403 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
405 )
406 # shadows
406 # shadows
407 for d in pathutil.finddirs(f):
407 for d in pathutil.finddirs(f):
408 if self._map.hastrackeddir(d):
408 if self._map.hastrackeddir(d):
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
411 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
412 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
413 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
415 )
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
419
420 def normal(self, f, parentfiledata=None):
420 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
421 '''Mark a file normal and clean.
422
422
423 parentfiledata: (mode, size, mtime) of the clean file
423 parentfiledata: (mode, size, mtime) of the clean file
424
424
425 parentfiledata should be computed from memory (for mode,
425 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
426 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
427 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
428 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
429 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
430 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
431 (mode, size, mtime) = parentfiledata
432 else:
432 else:
433 s = os.lstat(self._join(f))
433 s = os.lstat(self._join(f))
434 mode = s.st_mode
434 mode = s.st_mode
435 size = s.st_size
435 size = s.st_size
436 mtime = s[stat.ST_MTIME]
436 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
438 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
439 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
440 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
441 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
442 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
443 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
444 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
445 self._lastnormaltime = mtime
446
446
447 def normallookup(self, f):
447 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
448 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
450 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
452 # being removed, restore that state.
453 entry = self._map.get(f)
453 entry = self._map.get(f)
454 if entry is not None:
454 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
456 source = self._map.copymap.get(f)
457 if entry[2] == -1:
457 if entry[2] == -1:
458 self.merge(f)
458 self.merge(f)
459 elif entry[2] == -2:
459 elif entry[2] == -2:
460 self.otherparent(f)
460 self.otherparent(f)
461 if source:
461 if source:
462 self.copy(source, f)
462 self.copy(source, f)
463 return
463 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
465 return
466 self._addpath(f, b'n', 0, -1, -1)
466 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
467 self._map.copymap.pop(f, None)
468
468
469 def otherparent(self, f):
469 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
470 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
471 if self._pl[1] == nullid:
472 raise error.Abort(
472 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
473 _(b"setting %r to other parent only allowed in merges") % f
474 )
474 )
475 if f in self and self[f] == b'n':
475 if f in self and self[f] == b'n':
476 # merge-like
476 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
477 self._addpath(f, b'm', 0, -2, -1)
478 else:
478 else:
479 # add-like
479 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
480 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
481 self._map.copymap.pop(f, None)
482
482
483 def add(self, f):
483 def add(self, f):
484 '''Mark a file added.'''
484 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
485 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
486 self._map.copymap.pop(f, None)
487
487
488 def remove(self, f):
488 def remove(self, f):
489 '''Mark a file removed.'''
489 '''Mark a file removed.'''
490 self._dirty = True
490 self._dirty = True
491 oldstate = self[f]
491 oldstate = self[f]
492 size = 0
492 size = 0
493 if self._pl[1] != nullid:
493 if self._pl[1] != nullid:
494 entry = self._map.get(f)
494 entry = self._map.get(f)
495 if entry is not None:
495 if entry is not None:
496 # backup the previous state
496 # backup the previous state
497 if entry[0] == b'm': # merge
497 if entry[0] == b'm': # merge
498 size = -1
498 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
500 size = -2
501 self._map.otherparentset.add(f)
501 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
503 self._map.removefile(f, oldstate, size)
504 if size == 0:
504 if size == 0:
505 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
506
506
507 def merge(self, f):
507 def merge(self, f):
508 '''Mark a file merged.'''
508 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
509 if self._pl[1] == nullid:
510 return self.normallookup(f)
510 return self.normallookup(f)
511 return self.otherparent(f)
511 return self.otherparent(f)
512
512
513 def drop(self, f):
513 def drop(self, f):
514 '''Drop a file from the dirstate'''
514 '''Drop a file from the dirstate'''
515 oldstate = self[f]
515 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
516 if self._map.dropfile(f, oldstate):
517 self._dirty = True
517 self._dirty = True
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
526 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
527 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
529 folded = d + b"/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if b'/' in normed:
536 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
537 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
539 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
540 folded = d + b"/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(
554 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
556 )
557 return folded
557 return folded
558
558
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
560 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
561 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
562 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
563 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
564 if folded is None:
565 if isknown:
565 if isknown:
566 folded = path
566 folded = path
567 else:
567 else:
568 # store discovered result in dirfoldmap so that future
568 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
569 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def normalize(self, path, isknown=False, ignoremissing=False):
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
576 '''
577 normalize the case of a pathname when on a casefolding filesystem
577 normalize the case of a pathname when on a casefolding filesystem
578
578
579 isknown specifies whether the filename came from walking the
579 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
580 disk, to avoid extra filesystem access.
581
581
582 If ignoremissing is True, missing path are returned
582 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
583 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
584 existing path components.
585
585
586 The normalized case is determined based on the following precedence:
586 The normalized case is determined based on the following precedence:
587
587
588 - version of name already stored in the dirstate
588 - version of name already stored in the dirstate
589 - version of name stored on disk
589 - version of name stored on disk
590 - version provided via command arguments
590 - version provided via command arguments
591 '''
591 '''
592
592
593 if self._checkcase:
593 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
594 return self._normalize(path, isknown, ignoremissing)
595 return path
595 return path
596
596
597 def clear(self):
597 def clear(self):
598 self._map.clear()
598 self._map.clear()
599 self._lastnormaltime = 0
599 self._lastnormaltime = 0
600 self._updatedfiles.clear()
600 self._updatedfiles.clear()
601 self._dirty = True
601 self._dirty = True
602
602
603 def rebuild(self, parent, allfiles, changedfiles=None):
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
604 if changedfiles is None:
605 # Rebuild entire dirstate
605 # Rebuild entire dirstate
606 to_lookup = allfiles
606 to_lookup = allfiles
607 to_drop = []
607 to_drop = []
608 lastnormaltime = self._lastnormaltime
608 lastnormaltime = self._lastnormaltime
609 self.clear()
609 self.clear()
610 self._lastnormaltime = lastnormaltime
610 self._lastnormaltime = lastnormaltime
611 elif len(changedfiles) < 10:
611 elif len(changedfiles) < 10:
612 # Avoid turning allfiles into a set, which can be expensive if it's
612 # Avoid turning allfiles into a set, which can be expensive if it's
613 # large.
613 # large.
614 to_lookup = []
614 to_lookup = []
615 to_drop = []
615 to_drop = []
616 for f in changedfiles:
616 for f in changedfiles:
617 if f in allfiles:
617 if f in allfiles:
618 to_lookup.append(f)
618 to_lookup.append(f)
619 else:
619 else:
620 to_drop.append(f)
620 to_drop.append(f)
621 else:
621 else:
622 changedfilesset = set(changedfiles)
622 changedfilesset = set(changedfiles)
623 to_lookup = changedfilesset & set(allfiles)
623 to_lookup = changedfilesset & set(allfiles)
624 to_drop = changedfilesset - to_lookup
624 to_drop = changedfilesset - to_lookup
625
625
626 if self._origpl is None:
626 if self._origpl is None:
627 self._origpl = self._pl
627 self._origpl = self._pl
628 self._map.setparents(parent, nullid)
628 self._map.setparents(parent, nullid)
629
629
630 for f in to_lookup:
630 for f in to_lookup:
631 self.normallookup(f)
631 self.normallookup(f)
632 for f in to_drop:
632 for f in to_drop:
633 self.drop(f)
633 self.drop(f)
634
634
635 self._dirty = True
635 self._dirty = True
636
636
637 def identity(self):
637 def identity(self):
638 '''Return identity of dirstate itself to detect changing in storage
638 '''Return identity of dirstate itself to detect changing in storage
639
639
640 If identity of previous dirstate is equal to this, writing
640 If identity of previous dirstate is equal to this, writing
641 changes based on the former dirstate out can keep consistency.
641 changes based on the former dirstate out can keep consistency.
642 '''
642 '''
643 return self._map.identity
643 return self._map.identity
644
644
645 def write(self, tr):
645 def write(self, tr):
646 if not self._dirty:
646 if not self._dirty:
647 return
647 return
648
648
649 filename = self._filename
649 filename = self._filename
650 if tr:
650 if tr:
651 # 'dirstate.write()' is not only for writing in-memory
651 # 'dirstate.write()' is not only for writing in-memory
652 # changes out, but also for dropping ambiguous timestamp.
652 # changes out, but also for dropping ambiguous timestamp.
653 # delayed writing re-raise "ambiguous timestamp issue".
653 # delayed writing re-raise "ambiguous timestamp issue".
654 # See also the wiki page below for detail:
654 # See also the wiki page below for detail:
655 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
655 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
656
656
657 # emulate dropping timestamp in 'parsers.pack_dirstate'
657 # emulate dropping timestamp in 'parsers.pack_dirstate'
658 now = _getfsnow(self._opener)
658 now = _getfsnow(self._opener)
659 self._map.clearambiguoustimes(self._updatedfiles, now)
659 self._map.clearambiguoustimes(self._updatedfiles, now)
660
660
661 # emulate that all 'dirstate.normal' results are written out
661 # emulate that all 'dirstate.normal' results are written out
662 self._lastnormaltime = 0
662 self._lastnormaltime = 0
663 self._updatedfiles.clear()
663 self._updatedfiles.clear()
664
664
665 # delay writing in-memory changes out
665 # delay writing in-memory changes out
666 tr.addfilegenerator(
666 tr.addfilegenerator(
667 b'dirstate',
667 b'dirstate',
668 (self._filename,),
668 (self._filename,),
669 self._writedirstate,
669 self._writedirstate,
670 location=b'plain',
670 location=b'plain',
671 )
671 )
672 return
672 return
673
673
674 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
674 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
675 self._writedirstate(st)
675 self._writedirstate(st)
676
676
677 def addparentchangecallback(self, category, callback):
677 def addparentchangecallback(self, category, callback):
678 """add a callback to be called when the wd parents are changed
678 """add a callback to be called when the wd parents are changed
679
679
680 Callback will be called with the following arguments:
680 Callback will be called with the following arguments:
681 dirstate, (oldp1, oldp2), (newp1, newp2)
681 dirstate, (oldp1, oldp2), (newp1, newp2)
682
682
683 Category is a unique identifier to allow overwriting an old callback
683 Category is a unique identifier to allow overwriting an old callback
684 with a newer callback.
684 with a newer callback.
685 """
685 """
686 self._plchangecallbacks[category] = callback
686 self._plchangecallbacks[category] = callback
687
687
688 def _writedirstate(self, st):
688 def _writedirstate(self, st):
689 # notify callbacks about parents change
689 # notify callbacks about parents change
690 if self._origpl is not None and self._origpl != self._pl:
690 if self._origpl is not None and self._origpl != self._pl:
691 for c, callback in sorted(
691 for c, callback in sorted(
692 pycompat.iteritems(self._plchangecallbacks)
692 pycompat.iteritems(self._plchangecallbacks)
693 ):
693 ):
694 callback(self, self._origpl, self._pl)
694 callback(self, self._origpl, self._pl)
695 self._origpl = None
695 self._origpl = None
696 # use the modification time of the newly created temporary file as the
696 # use the modification time of the newly created temporary file as the
697 # filesystem's notion of 'now'
697 # filesystem's notion of 'now'
698 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
698 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
699
699
700 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
700 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
701 # timestamp of each entries in dirstate, because of 'now > mtime'
701 # timestamp of each entries in dirstate, because of 'now > mtime'
702 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
702 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
703 if delaywrite > 0:
703 if delaywrite > 0:
704 # do we have any files to delay for?
704 # do we have any files to delay for?
705 for f, e in pycompat.iteritems(self._map):
705 for f, e in pycompat.iteritems(self._map):
706 if e[0] == b'n' and e[3] == now:
706 if e[0] == b'n' and e[3] == now:
707 import time # to avoid useless import
707 import time # to avoid useless import
708
708
709 # rather than sleep n seconds, sleep until the next
709 # rather than sleep n seconds, sleep until the next
710 # multiple of n seconds
710 # multiple of n seconds
711 clock = time.time()
711 clock = time.time()
712 start = int(clock) - (int(clock) % delaywrite)
712 start = int(clock) - (int(clock) % delaywrite)
713 end = start + delaywrite
713 end = start + delaywrite
714 time.sleep(end - clock)
714 time.sleep(end - clock)
715 now = end # trust our estimate that the end is near now
715 now = end # trust our estimate that the end is near now
716 break
716 break
717
717
718 self._map.write(st, now)
718 self._map.write(st, now)
719 self._lastnormaltime = 0
719 self._lastnormaltime = 0
720 self._dirty = False
720 self._dirty = False
721
721
722 def _dirignore(self, f):
722 def _dirignore(self, f):
723 if self._ignore(f):
723 if self._ignore(f):
724 return True
724 return True
725 for p in pathutil.finddirs(f):
725 for p in pathutil.finddirs(f):
726 if self._ignore(p):
726 if self._ignore(p):
727 return True
727 return True
728 return False
728 return False
729
729
730 def _ignorefiles(self):
730 def _ignorefiles(self):
731 files = []
731 files = []
732 if os.path.exists(self._join(b'.hgignore')):
732 if os.path.exists(self._join(b'.hgignore')):
733 files.append(self._join(b'.hgignore'))
733 files.append(self._join(b'.hgignore'))
734 for name, path in self._ui.configitems(b"ui"):
734 for name, path in self._ui.configitems(b"ui"):
735 if name == b'ignore' or name.startswith(b'ignore.'):
735 if name == b'ignore' or name.startswith(b'ignore.'):
736 # we need to use os.path.join here rather than self._join
736 # we need to use os.path.join here rather than self._join
737 # because path is arbitrary and user-specified
737 # because path is arbitrary and user-specified
738 files.append(os.path.join(self._rootdir, util.expandpath(path)))
738 files.append(os.path.join(self._rootdir, util.expandpath(path)))
739 return files
739 return files
740
740
741 def _ignorefileandline(self, f):
741 def _ignorefileandline(self, f):
742 files = collections.deque(self._ignorefiles())
742 files = collections.deque(self._ignorefiles())
743 visited = set()
743 visited = set()
744 while files:
744 while files:
745 i = files.popleft()
745 i = files.popleft()
746 patterns = matchmod.readpatternfile(
746 patterns = matchmod.readpatternfile(
747 i, self._ui.warn, sourceinfo=True
747 i, self._ui.warn, sourceinfo=True
748 )
748 )
749 for pattern, lineno, line in patterns:
749 for pattern, lineno, line in patterns:
750 kind, p = matchmod._patsplit(pattern, b'glob')
750 kind, p = matchmod._patsplit(pattern, b'glob')
751 if kind == b"subinclude":
751 if kind == b"subinclude":
752 if p not in visited:
752 if p not in visited:
753 files.append(p)
753 files.append(p)
754 continue
754 continue
755 m = matchmod.match(
755 m = matchmod.match(
756 self._root, b'', [], [pattern], warn=self._ui.warn
756 self._root, b'', [], [pattern], warn=self._ui.warn
757 )
757 )
758 if m(f):
758 if m(f):
759 return (i, lineno, line)
759 return (i, lineno, line)
760 visited.add(i)
760 visited.add(i)
761 return (None, -1, b"")
761 return (None, -1, b"")
762
762
763 def _walkexplicit(self, match, subrepos):
763 def _walkexplicit(self, match, subrepos):
764 '''Get stat data about the files explicitly specified by match.
764 '''Get stat data about the files explicitly specified by match.
765
765
766 Return a triple (results, dirsfound, dirsnotfound).
766 Return a triple (results, dirsfound, dirsnotfound).
767 - results is a mapping from filename to stat result. It also contains
767 - results is a mapping from filename to stat result. It also contains
768 listings mapping subrepos and .hg to None.
768 listings mapping subrepos and .hg to None.
769 - dirsfound is a list of files found to be directories.
769 - dirsfound is a list of files found to be directories.
770 - dirsnotfound is a list of files that the dirstate thinks are
770 - dirsnotfound is a list of files that the dirstate thinks are
771 directories and that were not found.'''
771 directories and that were not found.'''
772
772
773 def badtype(mode):
773 def badtype(mode):
774 kind = _(b'unknown')
774 kind = _(b'unknown')
775 if stat.S_ISCHR(mode):
775 if stat.S_ISCHR(mode):
776 kind = _(b'character device')
776 kind = _(b'character device')
777 elif stat.S_ISBLK(mode):
777 elif stat.S_ISBLK(mode):
778 kind = _(b'block device')
778 kind = _(b'block device')
779 elif stat.S_ISFIFO(mode):
779 elif stat.S_ISFIFO(mode):
780 kind = _(b'fifo')
780 kind = _(b'fifo')
781 elif stat.S_ISSOCK(mode):
781 elif stat.S_ISSOCK(mode):
782 kind = _(b'socket')
782 kind = _(b'socket')
783 elif stat.S_ISDIR(mode):
783 elif stat.S_ISDIR(mode):
784 kind = _(b'directory')
784 kind = _(b'directory')
785 return _(b'unsupported file type (type is %s)') % kind
785 return _(b'unsupported file type (type is %s)') % kind
786
786
787 badfn = match.bad
787 badfn = match.bad
788 dmap = self._map
788 dmap = self._map
789 lstat = os.lstat
789 lstat = os.lstat
790 getkind = stat.S_IFMT
790 getkind = stat.S_IFMT
791 dirkind = stat.S_IFDIR
791 dirkind = stat.S_IFDIR
792 regkind = stat.S_IFREG
792 regkind = stat.S_IFREG
793 lnkkind = stat.S_IFLNK
793 lnkkind = stat.S_IFLNK
794 join = self._join
794 join = self._join
795 dirsfound = []
795 dirsfound = []
796 foundadd = dirsfound.append
796 foundadd = dirsfound.append
797 dirsnotfound = []
797 dirsnotfound = []
798 notfoundadd = dirsnotfound.append
798 notfoundadd = dirsnotfound.append
799
799
800 if not match.isexact() and self._checkcase:
800 if not match.isexact() and self._checkcase:
801 normalize = self._normalize
801 normalize = self._normalize
802 else:
802 else:
803 normalize = None
803 normalize = None
804
804
805 files = sorted(match.files())
805 files = sorted(match.files())
806 subrepos.sort()
806 subrepos.sort()
807 i, j = 0, 0
807 i, j = 0, 0
808 while i < len(files) and j < len(subrepos):
808 while i < len(files) and j < len(subrepos):
809 subpath = subrepos[j] + b"/"
809 subpath = subrepos[j] + b"/"
810 if files[i] < subpath:
810 if files[i] < subpath:
811 i += 1
811 i += 1
812 continue
812 continue
813 while i < len(files) and files[i].startswith(subpath):
813 while i < len(files) and files[i].startswith(subpath):
814 del files[i]
814 del files[i]
815 j += 1
815 j += 1
816
816
817 if not files or b'' in files:
817 if not files or b'' in files:
818 files = [b'']
818 files = [b'']
819 # constructing the foldmap is expensive, so don't do it for the
819 # constructing the foldmap is expensive, so don't do it for the
820 # common case where files is ['']
820 # common case where files is ['']
821 normalize = None
821 normalize = None
822 results = dict.fromkeys(subrepos)
822 results = dict.fromkeys(subrepos)
823 results[b'.hg'] = None
823 results[b'.hg'] = None
824
824
825 for ff in files:
825 for ff in files:
826 if normalize:
826 if normalize:
827 nf = normalize(ff, False, True)
827 nf = normalize(ff, False, True)
828 else:
828 else:
829 nf = ff
829 nf = ff
830 if nf in results:
830 if nf in results:
831 continue
831 continue
832
832
833 try:
833 try:
834 st = lstat(join(nf))
834 st = lstat(join(nf))
835 kind = getkind(st.st_mode)
835 kind = getkind(st.st_mode)
836 if kind == dirkind:
836 if kind == dirkind:
837 if nf in dmap:
837 if nf in dmap:
838 # file replaced by dir on disk but still in dirstate
838 # file replaced by dir on disk but still in dirstate
839 results[nf] = None
839 results[nf] = None
840 foundadd((nf, ff))
840 foundadd((nf, ff))
841 elif kind == regkind or kind == lnkkind:
841 elif kind == regkind or kind == lnkkind:
842 results[nf] = st
842 results[nf] = st
843 else:
843 else:
844 badfn(ff, badtype(kind))
844 badfn(ff, badtype(kind))
845 if nf in dmap:
845 if nf in dmap:
846 results[nf] = None
846 results[nf] = None
847 except OSError as inst: # nf not found on disk - it is dirstate only
847 except OSError as inst: # nf not found on disk - it is dirstate only
848 if nf in dmap: # does it exactly match a missing file?
848 if nf in dmap: # does it exactly match a missing file?
849 results[nf] = None
849 results[nf] = None
850 else: # does it match a missing directory?
850 else: # does it match a missing directory?
851 if self._map.hasdir(nf):
851 if self._map.hasdir(nf):
852 notfoundadd(nf)
852 notfoundadd(nf)
853 else:
853 else:
854 badfn(ff, encoding.strtolocal(inst.strerror))
854 badfn(ff, encoding.strtolocal(inst.strerror))
855
855
856 # match.files() may contain explicitly-specified paths that shouldn't
856 # match.files() may contain explicitly-specified paths that shouldn't
857 # be taken; drop them from the list of files found. dirsfound/notfound
857 # be taken; drop them from the list of files found. dirsfound/notfound
858 # aren't filtered here because they will be tested later.
858 # aren't filtered here because they will be tested later.
859 if match.anypats():
859 if match.anypats():
860 for f in list(results):
860 for f in list(results):
861 if f == b'.hg' or f in subrepos:
861 if f == b'.hg' or f in subrepos:
862 # keep sentinel to disable further out-of-repo walks
862 # keep sentinel to disable further out-of-repo walks
863 continue
863 continue
864 if not match(f):
864 if not match(f):
865 del results[f]
865 del results[f]
866
866
867 # Case insensitive filesystems cannot rely on lstat() failing to detect
867 # Case insensitive filesystems cannot rely on lstat() failing to detect
868 # a case-only rename. Prune the stat object for any file that does not
868 # a case-only rename. Prune the stat object for any file that does not
869 # match the case in the filesystem, if there are multiple files that
869 # match the case in the filesystem, if there are multiple files that
870 # normalize to the same path.
870 # normalize to the same path.
871 if match.isexact() and self._checkcase:
871 if match.isexact() and self._checkcase:
872 normed = {}
872 normed = {}
873
873
874 for f, st in pycompat.iteritems(results):
874 for f, st in pycompat.iteritems(results):
875 if st is None:
875 if st is None:
876 continue
876 continue
877
877
878 nc = util.normcase(f)
878 nc = util.normcase(f)
879 paths = normed.get(nc)
879 paths = normed.get(nc)
880
880
881 if paths is None:
881 if paths is None:
882 paths = set()
882 paths = set()
883 normed[nc] = paths
883 normed[nc] = paths
884
884
885 paths.add(f)
885 paths.add(f)
886
886
887 for norm, paths in pycompat.iteritems(normed):
887 for norm, paths in pycompat.iteritems(normed):
888 if len(paths) > 1:
888 if len(paths) > 1:
889 for path in paths:
889 for path in paths:
890 folded = self._discoverpath(
890 folded = self._discoverpath(
891 path, norm, True, None, self._map.dirfoldmap
891 path, norm, True, None, self._map.dirfoldmap
892 )
892 )
893 if path != folded:
893 if path != folded:
894 results[path] = None
894 results[path] = None
895
895
896 return results, dirsfound, dirsnotfound
896 return results, dirsfound, dirsnotfound
897
897
898 def walk(self, match, subrepos, unknown, ignored, full=True):
898 def walk(self, match, subrepos, unknown, ignored, full=True):
899 '''
899 '''
900 Walk recursively through the directory tree, finding all files
900 Walk recursively through the directory tree, finding all files
901 matched by match.
901 matched by match.
902
902
903 If full is False, maybe skip some known-clean files.
903 If full is False, maybe skip some known-clean files.
904
904
905 Return a dict mapping filename to stat-like object (either
905 Return a dict mapping filename to stat-like object (either
906 mercurial.osutil.stat instance or return value of os.stat()).
906 mercurial.osutil.stat instance or return value of os.stat()).
907
907
908 '''
908 '''
909 # full is a flag that extensions that hook into walk can use -- this
909 # full is a flag that extensions that hook into walk can use -- this
910 # implementation doesn't use it at all. This satisfies the contract
910 # implementation doesn't use it at all. This satisfies the contract
911 # because we only guarantee a "maybe".
911 # because we only guarantee a "maybe".
912
912
913 if ignored:
913 if ignored:
914 ignore = util.never
914 ignore = util.never
915 dirignore = util.never
915 dirignore = util.never
916 elif unknown:
916 elif unknown:
917 ignore = self._ignore
917 ignore = self._ignore
918 dirignore = self._dirignore
918 dirignore = self._dirignore
919 else:
919 else:
920 # if not unknown and not ignored, drop dir recursion and step 2
920 # if not unknown and not ignored, drop dir recursion and step 2
921 ignore = util.always
921 ignore = util.always
922 dirignore = util.always
922 dirignore = util.always
923
923
924 matchfn = match.matchfn
924 matchfn = match.matchfn
925 matchalways = match.always()
925 matchalways = match.always()
926 matchtdir = match.traversedir
926 matchtdir = match.traversedir
927 dmap = self._map
927 dmap = self._map
928 listdir = util.listdir
928 listdir = util.listdir
929 lstat = os.lstat
929 lstat = os.lstat
930 dirkind = stat.S_IFDIR
930 dirkind = stat.S_IFDIR
931 regkind = stat.S_IFREG
931 regkind = stat.S_IFREG
932 lnkkind = stat.S_IFLNK
932 lnkkind = stat.S_IFLNK
933 join = self._join
933 join = self._join
934
934
935 exact = skipstep3 = False
935 exact = skipstep3 = False
936 if match.isexact(): # match.exact
936 if match.isexact(): # match.exact
937 exact = True
937 exact = True
938 dirignore = util.always # skip step 2
938 dirignore = util.always # skip step 2
939 elif match.prefix(): # match.match, no patterns
939 elif match.prefix(): # match.match, no patterns
940 skipstep3 = True
940 skipstep3 = True
941
941
942 if not exact and self._checkcase:
942 if not exact and self._checkcase:
943 normalize = self._normalize
943 normalize = self._normalize
944 normalizefile = self._normalizefile
944 normalizefile = self._normalizefile
945 skipstep3 = False
945 skipstep3 = False
946 else:
946 else:
947 normalize = self._normalize
947 normalize = self._normalize
948 normalizefile = None
948 normalizefile = None
949
949
950 # step 1: find all explicit files
950 # step 1: find all explicit files
951 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
951 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
952 if matchtdir:
952 if matchtdir:
953 for d in work:
953 for d in work:
954 matchtdir(d[0])
954 matchtdir(d[0])
955 for d in dirsnotfound:
955 for d in dirsnotfound:
956 matchtdir(d)
956 matchtdir(d)
957
957
958 skipstep3 = skipstep3 and not (work or dirsnotfound)
958 skipstep3 = skipstep3 and not (work or dirsnotfound)
959 work = [d for d in work if not dirignore(d[0])]
959 work = [d for d in work if not dirignore(d[0])]
960
960
961 # step 2: visit subdirectories
961 # step 2: visit subdirectories
962 def traverse(work, alreadynormed):
962 def traverse(work, alreadynormed):
963 wadd = work.append
963 wadd = work.append
964 while work:
964 while work:
965 tracing.counter('dirstate.walk work', len(work))
965 tracing.counter('dirstate.walk work', len(work))
966 nd = work.pop()
966 nd = work.pop()
967 visitentries = match.visitchildrenset(nd)
967 visitentries = match.visitchildrenset(nd)
968 if not visitentries:
968 if not visitentries:
969 continue
969 continue
970 if visitentries == b'this' or visitentries == b'all':
970 if visitentries == b'this' or visitentries == b'all':
971 visitentries = None
971 visitentries = None
972 skip = None
972 skip = None
973 if nd != b'':
973 if nd != b'':
974 skip = b'.hg'
974 skip = b'.hg'
975 try:
975 try:
976 with tracing.log('dirstate.walk.traverse listdir %s', nd):
976 with tracing.log('dirstate.walk.traverse listdir %s', nd):
977 entries = listdir(join(nd), stat=True, skip=skip)
977 entries = listdir(join(nd), stat=True, skip=skip)
978 except OSError as inst:
978 except OSError as inst:
979 if inst.errno in (errno.EACCES, errno.ENOENT):
979 if inst.errno in (errno.EACCES, errno.ENOENT):
980 match.bad(
980 match.bad(
981 self.pathto(nd), encoding.strtolocal(inst.strerror)
981 self.pathto(nd), encoding.strtolocal(inst.strerror)
982 )
982 )
983 continue
983 continue
984 raise
984 raise
985 for f, kind, st in entries:
985 for f, kind, st in entries:
986 # Some matchers may return files in the visitentries set,
986 # Some matchers may return files in the visitentries set,
987 # instead of 'this', if the matcher explicitly mentions them
987 # instead of 'this', if the matcher explicitly mentions them
988 # and is not an exactmatcher. This is acceptable; we do not
988 # and is not an exactmatcher. This is acceptable; we do not
989 # make any hard assumptions about file-or-directory below
989 # make any hard assumptions about file-or-directory below
990 # based on the presence of `f` in visitentries. If
990 # based on the presence of `f` in visitentries. If
991 # visitchildrenset returned a set, we can always skip the
991 # visitchildrenset returned a set, we can always skip the
992 # entries *not* in the set it provided regardless of whether
992 # entries *not* in the set it provided regardless of whether
993 # they're actually a file or a directory.
993 # they're actually a file or a directory.
994 if visitentries and f not in visitentries:
994 if visitentries and f not in visitentries:
995 continue
995 continue
996 if normalizefile:
996 if normalizefile:
997 # even though f might be a directory, we're only
997 # even though f might be a directory, we're only
998 # interested in comparing it to files currently in the
998 # interested in comparing it to files currently in the
999 # dmap -- therefore normalizefile is enough
999 # dmap -- therefore normalizefile is enough
1000 nf = normalizefile(
1000 nf = normalizefile(
1001 nd and (nd + b"/" + f) or f, True, True
1001 nd and (nd + b"/" + f) or f, True, True
1002 )
1002 )
1003 else:
1003 else:
1004 nf = nd and (nd + b"/" + f) or f
1004 nf = nd and (nd + b"/" + f) or f
1005 if nf not in results:
1005 if nf not in results:
1006 if kind == dirkind:
1006 if kind == dirkind:
1007 if not ignore(nf):
1007 if not ignore(nf):
1008 if matchtdir:
1008 if matchtdir:
1009 matchtdir(nf)
1009 matchtdir(nf)
1010 wadd(nf)
1010 wadd(nf)
1011 if nf in dmap and (matchalways or matchfn(nf)):
1011 if nf in dmap and (matchalways or matchfn(nf)):
1012 results[nf] = None
1012 results[nf] = None
1013 elif kind == regkind or kind == lnkkind:
1013 elif kind == regkind or kind == lnkkind:
1014 if nf in dmap:
1014 if nf in dmap:
1015 if matchalways or matchfn(nf):
1015 if matchalways or matchfn(nf):
1016 results[nf] = st
1016 results[nf] = st
1017 elif (matchalways or matchfn(nf)) and not ignore(
1017 elif (matchalways or matchfn(nf)) and not ignore(
1018 nf
1018 nf
1019 ):
1019 ):
1020 # unknown file -- normalize if necessary
1020 # unknown file -- normalize if necessary
1021 if not alreadynormed:
1021 if not alreadynormed:
1022 nf = normalize(nf, False, True)
1022 nf = normalize(nf, False, True)
1023 results[nf] = st
1023 results[nf] = st
1024 elif nf in dmap and (matchalways or matchfn(nf)):
1024 elif nf in dmap and (matchalways or matchfn(nf)):
1025 results[nf] = None
1025 results[nf] = None
1026
1026
1027 for nd, d in work:
1027 for nd, d in work:
1028 # alreadynormed means that processwork doesn't have to do any
1028 # alreadynormed means that processwork doesn't have to do any
1029 # expensive directory normalization
1029 # expensive directory normalization
1030 alreadynormed = not normalize or nd == d
1030 alreadynormed = not normalize or nd == d
1031 traverse([d], alreadynormed)
1031 traverse([d], alreadynormed)
1032
1032
1033 for s in subrepos:
1033 for s in subrepos:
1034 del results[s]
1034 del results[s]
1035 del results[b'.hg']
1035 del results[b'.hg']
1036
1036
1037 # step 3: visit remaining files from dmap
1037 # step 3: visit remaining files from dmap
1038 if not skipstep3 and not exact:
1038 if not skipstep3 and not exact:
1039 # If a dmap file is not in results yet, it was either
1039 # If a dmap file is not in results yet, it was either
1040 # a) not matching matchfn b) ignored, c) missing, or d) under a
1040 # a) not matching matchfn b) ignored, c) missing, or d) under a
1041 # symlink directory.
1041 # symlink directory.
1042 if not results and matchalways:
1042 if not results and matchalways:
1043 visit = [f for f in dmap]
1043 visit = [f for f in dmap]
1044 else:
1044 else:
1045 visit = [f for f in dmap if f not in results and matchfn(f)]
1045 visit = [f for f in dmap if f not in results and matchfn(f)]
1046 visit.sort()
1046 visit.sort()
1047
1047
1048 if unknown:
1048 if unknown:
1049 # unknown == True means we walked all dirs under the roots
1049 # unknown == True means we walked all dirs under the roots
1050 # that wasn't ignored, and everything that matched was stat'ed
1050 # that wasn't ignored, and everything that matched was stat'ed
1051 # and is already in results.
1051 # and is already in results.
1052 # The rest must thus be ignored or under a symlink.
1052 # The rest must thus be ignored or under a symlink.
1053 audit_path = pathutil.pathauditor(self._root, cached=True)
1053 audit_path = pathutil.pathauditor(self._root, cached=True)
1054
1054
1055 for nf in iter(visit):
1055 for nf in iter(visit):
1056 # If a stat for the same file was already added with a
1056 # If a stat for the same file was already added with a
1057 # different case, don't add one for this, since that would
1057 # different case, don't add one for this, since that would
1058 # make it appear as if the file exists under both names
1058 # make it appear as if the file exists under both names
1059 # on disk.
1059 # on disk.
1060 if (
1060 if (
1061 normalizefile
1061 normalizefile
1062 and normalizefile(nf, True, True) in results
1062 and normalizefile(nf, True, True) in results
1063 ):
1063 ):
1064 results[nf] = None
1064 results[nf] = None
1065 # Report ignored items in the dmap as long as they are not
1065 # Report ignored items in the dmap as long as they are not
1066 # under a symlink directory.
1066 # under a symlink directory.
1067 elif audit_path.check(nf):
1067 elif audit_path.check(nf):
1068 try:
1068 try:
1069 results[nf] = lstat(join(nf))
1069 results[nf] = lstat(join(nf))
1070 # file was just ignored, no links, and exists
1070 # file was just ignored, no links, and exists
1071 except OSError:
1071 except OSError:
1072 # file doesn't exist
1072 # file doesn't exist
1073 results[nf] = None
1073 results[nf] = None
1074 else:
1074 else:
1075 # It's either missing or under a symlink directory
1075 # It's either missing or under a symlink directory
1076 # which we in this case report as missing
1076 # which we in this case report as missing
1077 results[nf] = None
1077 results[nf] = None
1078 else:
1078 else:
1079 # We may not have walked the full directory tree above,
1079 # We may not have walked the full directory tree above,
1080 # so stat and check everything we missed.
1080 # so stat and check everything we missed.
1081 iv = iter(visit)
1081 iv = iter(visit)
1082 for st in util.statfiles([join(i) for i in visit]):
1082 for st in util.statfiles([join(i) for i in visit]):
1083 results[next(iv)] = st
1083 results[next(iv)] = st
1084 return results
1084 return results
1085
1085
1086 def _rust_status(self, matcher, list_clean):
1086 def _rust_status(self, matcher, list_clean):
1087 # Force Rayon (Rust parallelism library) to respect the number of
1087 # Force Rayon (Rust parallelism library) to respect the number of
1088 # workers. This is a temporary workaround until Rust code knows
1088 # workers. This is a temporary workaround until Rust code knows
1089 # how to read the config file.
1089 # how to read the config file.
1090 numcpus = self._ui.configint(b"worker", b"numcpus")
1090 numcpus = self._ui.configint(b"worker", b"numcpus")
1091 if numcpus is not None:
1091 if numcpus is not None:
1092 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1092 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1093
1093
1094 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1094 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1095 if not workers_enabled:
1095 if not workers_enabled:
1096 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1096 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1097
1097
1098 (
1098 (
1099 lookup,
1099 lookup,
1100 modified,
1100 modified,
1101 added,
1101 added,
1102 removed,
1102 removed,
1103 deleted,
1103 deleted,
1104 unknown,
1104 unknown,
1105 clean,
1105 clean,
1106 ) = rustmod.status(
1106 ) = rustmod.status(
1107 self._map._rustmap,
1107 self._map._rustmap,
1108 matcher,
1108 matcher,
1109 self._rootdir,
1109 self._rootdir,
1110 bool(list_clean),
1110 bool(list_clean),
1111 self._lastnormaltime,
1111 self._lastnormaltime,
1112 self._checkexec,
1112 self._checkexec,
1113 )
1113 )
1114
1114
1115 status = scmutil.status(
1115 status = scmutil.status(
1116 modified=modified,
1116 modified=modified,
1117 added=added,
1117 added=added,
1118 removed=removed,
1118 removed=removed,
1119 deleted=deleted,
1119 deleted=deleted,
1120 unknown=unknown,
1120 unknown=unknown,
1121 ignored=[],
1121 ignored=[],
1122 clean=clean,
1122 clean=clean,
1123 )
1123 )
1124 return (lookup, status)
1124 return (lookup, status)
1125
1125
1126 def status(self, match, subrepos, ignored, clean, unknown):
1126 def status(self, match, subrepos, ignored, clean, unknown):
1127 '''Determine the status of the working copy relative to the
1127 '''Determine the status of the working copy relative to the
1128 dirstate and return a pair of (unsure, status), where status is of type
1128 dirstate and return a pair of (unsure, status), where status is of type
1129 scmutil.status and:
1129 scmutil.status and:
1130
1130
1131 unsure:
1131 unsure:
1132 files that might have been modified since the dirstate was
1132 files that might have been modified since the dirstate was
1133 written, but need to be read to be sure (size is the same
1133 written, but need to be read to be sure (size is the same
1134 but mtime differs)
1134 but mtime differs)
1135 status.modified:
1135 status.modified:
1136 files that have definitely been modified since the dirstate
1136 files that have definitely been modified since the dirstate
1137 was written (different size or mode)
1137 was written (different size or mode)
1138 status.clean:
1138 status.clean:
1139 files that have definitely not been modified since the
1139 files that have definitely not been modified since the
1140 dirstate was written
1140 dirstate was written
1141 '''
1141 '''
1142 listignored, listclean, listunknown = ignored, clean, unknown
1142 listignored, listclean, listunknown = ignored, clean, unknown
1143 lookup, modified, added, unknown, ignored = [], [], [], [], []
1143 lookup, modified, added, unknown, ignored = [], [], [], [], []
1144 removed, deleted, clean = [], [], []
1144 removed, deleted, clean = [], [], []
1145
1145
1146 dmap = self._map
1146 dmap = self._map
1147 dmap.preload()
1147 dmap.preload()
1148
1148
1149 use_rust = True
1149 use_rust = True
1150
1150
1151 allowed_matchers = (matchmod.alwaysmatcher, matchmod.exactmatcher)
1151 allowed_matchers = (matchmod.alwaysmatcher, matchmod.exactmatcher)
1152
1152
1153 if rustmod is None:
1153 if rustmod is None:
1154 use_rust = False
1154 use_rust = False
1155 elif subrepos:
1155 elif subrepos:
1156 use_rust = False
1156 use_rust = False
1157 elif bool(listunknown):
1157 elif bool(listunknown):
1158 # Pathauditor does not exist yet in Rust, unknown files
1158 # Pathauditor does not exist yet in Rust, unknown files
1159 # can't be trusted.
1159 # can't be trusted.
1160 use_rust = False
1160 use_rust = False
1161 elif self._ignorefiles() and listignored:
1161 elif self._ignorefiles() and listignored:
1162 # Rust has no ignore mechanism yet, so don't use Rust for
1162 # Rust has no ignore mechanism yet, so don't use Rust for
1163 # commands that need ignore.
1163 # commands that need ignore.
1164 use_rust = False
1164 use_rust = False
1165 elif not isinstance(match, allowed_matchers):
1165 elif not isinstance(match, allowed_matchers):
1166 # Matchers have yet to be implemented
1166 # Matchers have yet to be implemented
1167 use_rust = False
1167 use_rust = False
1168
1168
1169 if use_rust:
1169 if use_rust:
1170 return self._rust_status(match, listclean)
1170 return self._rust_status(match, listclean)
1171
1171
1172 def noop(f):
1172 def noop(f):
1173 pass
1173 pass
1174
1174
1175 dcontains = dmap.__contains__
1175 dcontains = dmap.__contains__
1176 dget = dmap.__getitem__
1176 dget = dmap.__getitem__
1177 ladd = lookup.append # aka "unsure"
1177 ladd = lookup.append # aka "unsure"
1178 madd = modified.append
1178 madd = modified.append
1179 aadd = added.append
1179 aadd = added.append
1180 uadd = unknown.append if listunknown else noop
1180 uadd = unknown.append if listunknown else noop
1181 iadd = ignored.append if listignored else noop
1181 iadd = ignored.append if listignored else noop
1182 radd = removed.append
1182 radd = removed.append
1183 dadd = deleted.append
1183 dadd = deleted.append
1184 cadd = clean.append if listclean else noop
1184 cadd = clean.append if listclean else noop
1185 mexact = match.exact
1185 mexact = match.exact
1186 dirignore = self._dirignore
1186 dirignore = self._dirignore
1187 checkexec = self._checkexec
1187 checkexec = self._checkexec
1188 copymap = self._map.copymap
1188 copymap = self._map.copymap
1189 lastnormaltime = self._lastnormaltime
1189 lastnormaltime = self._lastnormaltime
1190
1190
1191 # We need to do full walks when either
1191 # We need to do full walks when either
1192 # - we're listing all clean files, or
1192 # - we're listing all clean files, or
1193 # - match.traversedir does something, because match.traversedir should
1193 # - match.traversedir does something, because match.traversedir should
1194 # be called for every dir in the working dir
1194 # be called for every dir in the working dir
1195 full = listclean or match.traversedir is not None
1195 full = listclean or match.traversedir is not None
1196 for fn, st in pycompat.iteritems(
1196 for fn, st in pycompat.iteritems(
1197 self.walk(match, subrepos, listunknown, listignored, full=full)
1197 self.walk(match, subrepos, listunknown, listignored, full=full)
1198 ):
1198 ):
1199 if not dcontains(fn):
1199 if not dcontains(fn):
1200 if (listignored or mexact(fn)) and dirignore(fn):
1200 if (listignored or mexact(fn)) and dirignore(fn):
1201 if listignored:
1201 if listignored:
1202 iadd(fn)
1202 iadd(fn)
1203 else:
1203 else:
1204 uadd(fn)
1204 uadd(fn)
1205 continue
1205 continue
1206
1206
1207 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1207 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1208 # written like that for performance reasons. dmap[fn] is not a
1208 # written like that for performance reasons. dmap[fn] is not a
1209 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1209 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1210 # opcode has fast paths when the value to be unpacked is a tuple or
1210 # opcode has fast paths when the value to be unpacked is a tuple or
1211 # a list, but falls back to creating a full-fledged iterator in
1211 # a list, but falls back to creating a full-fledged iterator in
1212 # general. That is much slower than simply accessing and storing the
1212 # general. That is much slower than simply accessing and storing the
1213 # tuple members one by one.
1213 # tuple members one by one.
1214 t = dget(fn)
1214 t = dget(fn)
1215 state = t[0]
1215 state = t[0]
1216 mode = t[1]
1216 mode = t[1]
1217 size = t[2]
1217 size = t[2]
1218 time = t[3]
1218 time = t[3]
1219
1219
1220 if not st and state in b"nma":
1220 if not st and state in b"nma":
1221 dadd(fn)
1221 dadd(fn)
1222 elif state == b'n':
1222 elif state == b'n':
1223 if (
1223 if (
1224 size >= 0
1224 size >= 0
1225 and (
1225 and (
1226 (size != st.st_size and size != st.st_size & _rangemask)
1226 (size != st.st_size and size != st.st_size & _rangemask)
1227 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1227 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1228 )
1228 )
1229 or size == -2 # other parent
1229 or size == -2 # other parent
1230 or fn in copymap
1230 or fn in copymap
1231 ):
1231 ):
1232 madd(fn)
1232 madd(fn)
1233 elif (
1233 elif (
1234 time != st[stat.ST_MTIME]
1234 time != st[stat.ST_MTIME]
1235 and time != st[stat.ST_MTIME] & _rangemask
1235 and time != st[stat.ST_MTIME] & _rangemask
1236 ):
1236 ):
1237 ladd(fn)
1237 ladd(fn)
1238 elif st[stat.ST_MTIME] == lastnormaltime:
1238 elif st[stat.ST_MTIME] == lastnormaltime:
1239 # fn may have just been marked as normal and it may have
1239 # fn may have just been marked as normal and it may have
1240 # changed in the same second without changing its size.
1240 # changed in the same second without changing its size.
1241 # This can happen if we quickly do multiple commits.
1241 # This can happen if we quickly do multiple commits.
1242 # Force lookup, so we don't miss such a racy file change.
1242 # Force lookup, so we don't miss such a racy file change.
1243 ladd(fn)
1243 ladd(fn)
1244 elif listclean:
1244 elif listclean:
1245 cadd(fn)
1245 cadd(fn)
1246 elif state == b'm':
1246 elif state == b'm':
1247 madd(fn)
1247 madd(fn)
1248 elif state == b'a':
1248 elif state == b'a':
1249 aadd(fn)
1249 aadd(fn)
1250 elif state == b'r':
1250 elif state == b'r':
1251 radd(fn)
1251 radd(fn)
1252
1252
1253 return (
1253 return (
1254 lookup,
1254 lookup,
1255 scmutil.status(
1255 scmutil.status(
1256 modified, added, removed, deleted, unknown, ignored, clean
1256 modified, added, removed, deleted, unknown, ignored, clean
1257 ),
1257 ),
1258 )
1258 )
1259
1259
1260 def matches(self, match):
1260 def matches(self, match):
1261 '''
1261 '''
1262 return files in the dirstate (in whatever state) filtered by match
1262 return files in the dirstate (in whatever state) filtered by match
1263 '''
1263 '''
1264 dmap = self._map
1264 dmap = self._map
1265 if match.always():
1265 if match.always():
1266 return dmap.keys()
1266 return dmap.keys()
1267 files = match.files()
1267 files = match.files()
1268 if match.isexact():
1268 if match.isexact():
1269 # fast path -- filter the other way around, since typically files is
1269 # fast path -- filter the other way around, since typically files is
1270 # much smaller than dmap
1270 # much smaller than dmap
1271 return [f for f in files if f in dmap]
1271 return [f for f in files if f in dmap]
1272 if match.prefix() and all(fn in dmap for fn in files):
1272 if match.prefix() and all(fn in dmap for fn in files):
1273 # fast path -- all the values are known to be files, so just return
1273 # fast path -- all the values are known to be files, so just return
1274 # that
1274 # that
1275 return list(files)
1275 return list(files)
1276 return [f for f in dmap if match(f)]
1276 return [f for f in dmap if match(f)]
1277
1277
1278 def _actualfilename(self, tr):
1278 def _actualfilename(self, tr):
1279 if tr:
1279 if tr:
1280 return self._pendingfilename
1280 return self._pendingfilename
1281 else:
1281 else:
1282 return self._filename
1282 return self._filename
1283
1283
1284 def savebackup(self, tr, backupname):
1284 def savebackup(self, tr, backupname):
1285 '''Save current dirstate into backup file'''
1285 '''Save current dirstate into backup file'''
1286 filename = self._actualfilename(tr)
1286 filename = self._actualfilename(tr)
1287 assert backupname != filename
1287 assert backupname != filename
1288
1288
1289 # use '_writedirstate' instead of 'write' to write changes certainly,
1289 # use '_writedirstate' instead of 'write' to write changes certainly,
1290 # because the latter omits writing out if transaction is running.
1290 # because the latter omits writing out if transaction is running.
1291 # output file will be used to create backup of dirstate at this point.
1291 # output file will be used to create backup of dirstate at this point.
1292 if self._dirty or not self._opener.exists(filename):
1292 if self._dirty or not self._opener.exists(filename):
1293 self._writedirstate(
1293 self._writedirstate(
1294 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1294 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1295 )
1295 )
1296
1296
1297 if tr:
1297 if tr:
1298 # ensure that subsequent tr.writepending returns True for
1298 # ensure that subsequent tr.writepending returns True for
1299 # changes written out above, even if dirstate is never
1299 # changes written out above, even if dirstate is never
1300 # changed after this
1300 # changed after this
1301 tr.addfilegenerator(
1301 tr.addfilegenerator(
1302 b'dirstate',
1302 b'dirstate',
1303 (self._filename,),
1303 (self._filename,),
1304 self._writedirstate,
1304 self._writedirstate,
1305 location=b'plain',
1305 location=b'plain',
1306 )
1306 )
1307
1307
1308 # ensure that pending file written above is unlinked at
1308 # ensure that pending file written above is unlinked at
1309 # failure, even if tr.writepending isn't invoked until the
1309 # failure, even if tr.writepending isn't invoked until the
1310 # end of this transaction
1310 # end of this transaction
1311 tr.registertmp(filename, location=b'plain')
1311 tr.registertmp(filename, location=b'plain')
1312
1312
1313 self._opener.tryunlink(backupname)
1313 self._opener.tryunlink(backupname)
1314 # hardlink backup is okay because _writedirstate is always called
1314 # hardlink backup is okay because _writedirstate is always called
1315 # with an "atomictemp=True" file.
1315 # with an "atomictemp=True" file.
1316 util.copyfile(
1316 util.copyfile(
1317 self._opener.join(filename),
1317 self._opener.join(filename),
1318 self._opener.join(backupname),
1318 self._opener.join(backupname),
1319 hardlink=True,
1319 hardlink=True,
1320 )
1320 )
1321
1321
1322 def restorebackup(self, tr, backupname):
1322 def restorebackup(self, tr, backupname):
1323 '''Restore dirstate by backup file'''
1323 '''Restore dirstate by backup file'''
1324 # this "invalidate()" prevents "wlock.release()" from writing
1324 # this "invalidate()" prevents "wlock.release()" from writing
1325 # changes of dirstate out after restoring from backup file
1325 # changes of dirstate out after restoring from backup file
1326 self.invalidate()
1326 self.invalidate()
1327 filename = self._actualfilename(tr)
1327 filename = self._actualfilename(tr)
1328 o = self._opener
1328 o = self._opener
1329 if util.samefile(o.join(backupname), o.join(filename)):
1329 if util.samefile(o.join(backupname), o.join(filename)):
1330 o.unlink(backupname)
1330 o.unlink(backupname)
1331 else:
1331 else:
1332 o.rename(backupname, filename, checkambig=True)
1332 o.rename(backupname, filename, checkambig=True)
1333
1333
1334 def clearbackup(self, tr, backupname):
1334 def clearbackup(self, tr, backupname):
1335 '''Clear backup file'''
1335 '''Clear backup file'''
1336 self._opener.unlink(backupname)
1336 self._opener.unlink(backupname)
1337
1337
1338
1338
1339 class dirstatemap(object):
1339 class dirstatemap(object):
1340 """Map encapsulating the dirstate's contents.
1340 """Map encapsulating the dirstate's contents.
1341
1341
1342 The dirstate contains the following state:
1342 The dirstate contains the following state:
1343
1343
1344 - `identity` is the identity of the dirstate file, which can be used to
1344 - `identity` is the identity of the dirstate file, which can be used to
1345 detect when changes have occurred to the dirstate file.
1345 detect when changes have occurred to the dirstate file.
1346
1346
1347 - `parents` is a pair containing the parents of the working copy. The
1347 - `parents` is a pair containing the parents of the working copy. The
1348 parents are updated by calling `setparents`.
1348 parents are updated by calling `setparents`.
1349
1349
1350 - the state map maps filenames to tuples of (state, mode, size, mtime),
1350 - the state map maps filenames to tuples of (state, mode, size, mtime),
1351 where state is a single character representing 'normal', 'added',
1351 where state is a single character representing 'normal', 'added',
1352 'removed', or 'merged'. It is read by treating the dirstate as a
1352 'removed', or 'merged'. It is read by treating the dirstate as a
1353 dict. File state is updated by calling the `addfile`, `removefile` and
1353 dict. File state is updated by calling the `addfile`, `removefile` and
1354 `dropfile` methods.
1354 `dropfile` methods.
1355
1355
1356 - `copymap` maps destination filenames to their source filename.
1356 - `copymap` maps destination filenames to their source filename.
1357
1357
1358 The dirstate also provides the following views onto the state:
1358 The dirstate also provides the following views onto the state:
1359
1359
1360 - `nonnormalset` is a set of the filenames that have state other
1360 - `nonnormalset` is a set of the filenames that have state other
1361 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1361 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1362
1362
1363 - `otherparentset` is a set of the filenames that are marked as coming
1363 - `otherparentset` is a set of the filenames that are marked as coming
1364 from the second parent when the dirstate is currently being merged.
1364 from the second parent when the dirstate is currently being merged.
1365
1365
1366 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1366 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1367 form that they appear as in the dirstate.
1367 form that they appear as in the dirstate.
1368
1368
1369 - `dirfoldmap` is a dict mapping normalized directory names to the
1369 - `dirfoldmap` is a dict mapping normalized directory names to the
1370 denormalized form that they appear as in the dirstate.
1370 denormalized form that they appear as in the dirstate.
1371 """
1371 """
1372
1372
1373 def __init__(self, ui, opener, root):
1373 def __init__(self, ui, opener, root):
1374 self._ui = ui
1374 self._ui = ui
1375 self._opener = opener
1375 self._opener = opener
1376 self._root = root
1376 self._root = root
1377 self._filename = b'dirstate'
1377 self._filename = b'dirstate'
1378
1378
1379 self._parents = None
1379 self._parents = None
1380 self._dirtyparents = False
1380 self._dirtyparents = False
1381
1381
1382 # for consistent view between _pl() and _read() invocations
1382 # for consistent view between _pl() and _read() invocations
1383 self._pendingmode = None
1383 self._pendingmode = None
1384
1384
1385 @propertycache
1385 @propertycache
1386 def _map(self):
1386 def _map(self):
1387 self._map = {}
1387 self._map = {}
1388 self.read()
1388 self.read()
1389 return self._map
1389 return self._map
1390
1390
1391 @propertycache
1391 @propertycache
1392 def copymap(self):
1392 def copymap(self):
1393 self.copymap = {}
1393 self.copymap = {}
1394 self._map
1394 self._map
1395 return self.copymap
1395 return self.copymap
1396
1396
1397 def clear(self):
1397 def clear(self):
1398 self._map.clear()
1398 self._map.clear()
1399 self.copymap.clear()
1399 self.copymap.clear()
1400 self.setparents(nullid, nullid)
1400 self.setparents(nullid, nullid)
1401 util.clearcachedproperty(self, b"_dirs")
1401 util.clearcachedproperty(self, b"_dirs")
1402 util.clearcachedproperty(self, b"_alldirs")
1402 util.clearcachedproperty(self, b"_alldirs")
1403 util.clearcachedproperty(self, b"filefoldmap")
1403 util.clearcachedproperty(self, b"filefoldmap")
1404 util.clearcachedproperty(self, b"dirfoldmap")
1404 util.clearcachedproperty(self, b"dirfoldmap")
1405 util.clearcachedproperty(self, b"nonnormalset")
1405 util.clearcachedproperty(self, b"nonnormalset")
1406 util.clearcachedproperty(self, b"otherparentset")
1406 util.clearcachedproperty(self, b"otherparentset")
1407
1407
1408 def items(self):
1408 def items(self):
1409 return pycompat.iteritems(self._map)
1409 return pycompat.iteritems(self._map)
1410
1410
1411 # forward for python2,3 compat
1411 # forward for python2,3 compat
1412 iteritems = items
1412 iteritems = items
1413
1413
1414 def __len__(self):
1414 def __len__(self):
1415 return len(self._map)
1415 return len(self._map)
1416
1416
1417 def __iter__(self):
1417 def __iter__(self):
1418 return iter(self._map)
1418 return iter(self._map)
1419
1419
1420 def get(self, key, default=None):
1420 def get(self, key, default=None):
1421 return self._map.get(key, default)
1421 return self._map.get(key, default)
1422
1422
1423 def __contains__(self, key):
1423 def __contains__(self, key):
1424 return key in self._map
1424 return key in self._map
1425
1425
1426 def __getitem__(self, key):
1426 def __getitem__(self, key):
1427 return self._map[key]
1427 return self._map[key]
1428
1428
1429 def keys(self):
1429 def keys(self):
1430 return self._map.keys()
1430 return self._map.keys()
1431
1431
1432 def preload(self):
1432 def preload(self):
1433 """Loads the underlying data, if it's not already loaded"""
1433 """Loads the underlying data, if it's not already loaded"""
1434 self._map
1434 self._map
1435
1435
1436 def addfile(self, f, oldstate, state, mode, size, mtime):
1436 def addfile(self, f, oldstate, state, mode, size, mtime):
1437 """Add a tracked file to the dirstate."""
1437 """Add a tracked file to the dirstate."""
1438 if oldstate in b"?r" and "_dirs" in self.__dict__:
1438 if oldstate in b"?r" and "_dirs" in self.__dict__:
1439 self._dirs.addpath(f)
1439 self._dirs.addpath(f)
1440 if oldstate == b"?" and "_alldirs" in self.__dict__:
1440 if oldstate == b"?" and "_alldirs" in self.__dict__:
1441 self._alldirs.addpath(f)
1441 self._alldirs.addpath(f)
1442 self._map[f] = dirstatetuple(state, mode, size, mtime)
1442 self._map[f] = dirstatetuple(state, mode, size, mtime)
1443 if state != b'n' or mtime == -1:
1443 if state != b'n' or mtime == -1:
1444 self.nonnormalset.add(f)
1444 self.nonnormalset.add(f)
1445 if size == -2:
1445 if size == -2:
1446 self.otherparentset.add(f)
1446 self.otherparentset.add(f)
1447
1447
1448 def removefile(self, f, oldstate, size):
1448 def removefile(self, f, oldstate, size):
1449 """
1449 """
1450 Mark a file as removed in the dirstate.
1450 Mark a file as removed in the dirstate.
1451
1451
1452 The `size` parameter is used to store sentinel values that indicate
1452 The `size` parameter is used to store sentinel values that indicate
1453 the file's previous state. In the future, we should refactor this
1453 the file's previous state. In the future, we should refactor this
1454 to be more explicit about what that state is.
1454 to be more explicit about what that state is.
1455 """
1455 """
1456 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1456 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1457 self._dirs.delpath(f)
1457 self._dirs.delpath(f)
1458 if oldstate == b"?" and "_alldirs" in self.__dict__:
1458 if oldstate == b"?" and "_alldirs" in self.__dict__:
1459 self._alldirs.addpath(f)
1459 self._alldirs.addpath(f)
1460 if "filefoldmap" in self.__dict__:
1460 if "filefoldmap" in self.__dict__:
1461 normed = util.normcase(f)
1461 normed = util.normcase(f)
1462 self.filefoldmap.pop(normed, None)
1462 self.filefoldmap.pop(normed, None)
1463 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1463 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1464 self.nonnormalset.add(f)
1464 self.nonnormalset.add(f)
1465
1465
1466 def dropfile(self, f, oldstate):
1466 def dropfile(self, f, oldstate):
1467 """
1467 """
1468 Remove a file from the dirstate. Returns True if the file was
1468 Remove a file from the dirstate. Returns True if the file was
1469 previously recorded.
1469 previously recorded.
1470 """
1470 """
1471 exists = self._map.pop(f, None) is not None
1471 exists = self._map.pop(f, None) is not None
1472 if exists:
1472 if exists:
1473 if oldstate != b"r" and "_dirs" in self.__dict__:
1473 if oldstate != b"r" and "_dirs" in self.__dict__:
1474 self._dirs.delpath(f)
1474 self._dirs.delpath(f)
1475 if "_alldirs" in self.__dict__:
1475 if "_alldirs" in self.__dict__:
1476 self._alldirs.delpath(f)
1476 self._alldirs.delpath(f)
1477 if "filefoldmap" in self.__dict__:
1477 if "filefoldmap" in self.__dict__:
1478 normed = util.normcase(f)
1478 normed = util.normcase(f)
1479 self.filefoldmap.pop(normed, None)
1479 self.filefoldmap.pop(normed, None)
1480 self.nonnormalset.discard(f)
1480 self.nonnormalset.discard(f)
1481 return exists
1481 return exists
1482
1482
1483 def clearambiguoustimes(self, files, now):
1483 def clearambiguoustimes(self, files, now):
1484 for f in files:
1484 for f in files:
1485 e = self.get(f)
1485 e = self.get(f)
1486 if e is not None and e[0] == b'n' and e[3] == now:
1486 if e is not None and e[0] == b'n' and e[3] == now:
1487 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1487 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1488 self.nonnormalset.add(f)
1488 self.nonnormalset.add(f)
1489
1489
1490 def nonnormalentries(self):
1490 def nonnormalentries(self):
1491 '''Compute the nonnormal dirstate entries from the dmap'''
1491 '''Compute the nonnormal dirstate entries from the dmap'''
1492 try:
1492 try:
1493 return parsers.nonnormalotherparententries(self._map)
1493 return parsers.nonnormalotherparententries(self._map)
1494 except AttributeError:
1494 except AttributeError:
1495 nonnorm = set()
1495 nonnorm = set()
1496 otherparent = set()
1496 otherparent = set()
1497 for fname, e in pycompat.iteritems(self._map):
1497 for fname, e in pycompat.iteritems(self._map):
1498 if e[0] != b'n' or e[3] == -1:
1498 if e[0] != b'n' or e[3] == -1:
1499 nonnorm.add(fname)
1499 nonnorm.add(fname)
1500 if e[0] == b'n' and e[2] == -2:
1500 if e[0] == b'n' and e[2] == -2:
1501 otherparent.add(fname)
1501 otherparent.add(fname)
1502 return nonnorm, otherparent
1502 return nonnorm, otherparent
1503
1503
1504 @propertycache
1504 @propertycache
1505 def filefoldmap(self):
1505 def filefoldmap(self):
1506 """Returns a dictionary mapping normalized case paths to their
1506 """Returns a dictionary mapping normalized case paths to their
1507 non-normalized versions.
1507 non-normalized versions.
1508 """
1508 """
1509 try:
1509 try:
1510 makefilefoldmap = parsers.make_file_foldmap
1510 makefilefoldmap = parsers.make_file_foldmap
1511 except AttributeError:
1511 except AttributeError:
1512 pass
1512 pass
1513 else:
1513 else:
1514 return makefilefoldmap(
1514 return makefilefoldmap(
1515 self._map, util.normcasespec, util.normcasefallback
1515 self._map, util.normcasespec, util.normcasefallback
1516 )
1516 )
1517
1517
1518 f = {}
1518 f = {}
1519 normcase = util.normcase
1519 normcase = util.normcase
1520 for name, s in pycompat.iteritems(self._map):
1520 for name, s in pycompat.iteritems(self._map):
1521 if s[0] != b'r':
1521 if s[0] != b'r':
1522 f[normcase(name)] = name
1522 f[normcase(name)] = name
1523 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1523 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1524 return f
1524 return f
1525
1525
1526 def hastrackeddir(self, d):
1526 def hastrackeddir(self, d):
1527 """
1527 """
1528 Returns True if the dirstate contains a tracked (not removed) file
1528 Returns True if the dirstate contains a tracked (not removed) file
1529 in this directory.
1529 in this directory.
1530 """
1530 """
1531 return d in self._dirs
1531 return d in self._dirs
1532
1532
1533 def hasdir(self, d):
1533 def hasdir(self, d):
1534 """
1534 """
1535 Returns True if the dirstate contains a file (tracked or removed)
1535 Returns True if the dirstate contains a file (tracked or removed)
1536 in this directory.
1536 in this directory.
1537 """
1537 """
1538 return d in self._alldirs
1538 return d in self._alldirs
1539
1539
1540 @propertycache
1540 @propertycache
1541 def _dirs(self):
1541 def _dirs(self):
1542 return pathutil.dirs(self._map, b'r')
1542 return pathutil.dirs(self._map, b'r')
1543
1543
1544 @propertycache
1544 @propertycache
1545 def _alldirs(self):
1545 def _alldirs(self):
1546 return pathutil.dirs(self._map)
1546 return pathutil.dirs(self._map)
1547
1547
1548 def _opendirstatefile(self):
1548 def _opendirstatefile(self):
1549 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1549 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1550 if self._pendingmode is not None and self._pendingmode != mode:
1550 if self._pendingmode is not None and self._pendingmode != mode:
1551 fp.close()
1551 fp.close()
1552 raise error.Abort(
1552 raise error.Abort(
1553 _(b'working directory state may be changed parallelly')
1553 _(b'working directory state may be changed parallelly')
1554 )
1554 )
1555 self._pendingmode = mode
1555 self._pendingmode = mode
1556 return fp
1556 return fp
1557
1557
1558 def parents(self):
1558 def parents(self):
1559 if not self._parents:
1559 if not self._parents:
1560 try:
1560 try:
1561 fp = self._opendirstatefile()
1561 fp = self._opendirstatefile()
1562 st = fp.read(40)
1562 st = fp.read(40)
1563 fp.close()
1563 fp.close()
1564 except IOError as err:
1564 except IOError as err:
1565 if err.errno != errno.ENOENT:
1565 if err.errno != errno.ENOENT:
1566 raise
1566 raise
1567 # File doesn't exist, so the current state is empty
1567 # File doesn't exist, so the current state is empty
1568 st = b''
1568 st = b''
1569
1569
1570 l = len(st)
1570 l = len(st)
1571 if l == 40:
1571 if l == 40:
1572 self._parents = (st[:20], st[20:40])
1572 self._parents = (st[:20], st[20:40])
1573 elif l == 0:
1573 elif l == 0:
1574 self._parents = (nullid, nullid)
1574 self._parents = (nullid, nullid)
1575 else:
1575 else:
1576 raise error.Abort(
1576 raise error.Abort(
1577 _(b'working directory state appears damaged!')
1577 _(b'working directory state appears damaged!')
1578 )
1578 )
1579
1579
1580 return self._parents
1580 return self._parents
1581
1581
1582 def setparents(self, p1, p2):
1582 def setparents(self, p1, p2):
1583 self._parents = (p1, p2)
1583 self._parents = (p1, p2)
1584 self._dirtyparents = True
1584 self._dirtyparents = True
1585
1585
1586 def read(self):
1586 def read(self):
1587 # ignore HG_PENDING because identity is used only for writing
1587 # ignore HG_PENDING because identity is used only for writing
1588 self.identity = util.filestat.frompath(
1588 self.identity = util.filestat.frompath(
1589 self._opener.join(self._filename)
1589 self._opener.join(self._filename)
1590 )
1590 )
1591
1591
1592 try:
1592 try:
1593 fp = self._opendirstatefile()
1593 fp = self._opendirstatefile()
1594 try:
1594 try:
1595 st = fp.read()
1595 st = fp.read()
1596 finally:
1596 finally:
1597 fp.close()
1597 fp.close()
1598 except IOError as err:
1598 except IOError as err:
1599 if err.errno != errno.ENOENT:
1599 if err.errno != errno.ENOENT:
1600 raise
1600 raise
1601 return
1601 return
1602 if not st:
1602 if not st:
1603 return
1603 return
1604
1604
1605 if util.safehasattr(parsers, b'dict_new_presized'):
1605 if util.safehasattr(parsers, b'dict_new_presized'):
1606 # Make an estimate of the number of files in the dirstate based on
1606 # Make an estimate of the number of files in the dirstate based on
1607 # its size. From a linear regression on a set of real-world repos,
1607 # its size. From a linear regression on a set of real-world repos,
1608 # all over 10,000 files, the size of a dirstate entry is 85
1608 # all over 10,000 files, the size of a dirstate entry is 85
1609 # bytes. The cost of resizing is significantly higher than the cost
1609 # bytes. The cost of resizing is significantly higher than the cost
1610 # of filling in a larger presized dict, so subtract 20% from the
1610 # of filling in a larger presized dict, so subtract 20% from the
1611 # size.
1611 # size.
1612 #
1612 #
1613 # This heuristic is imperfect in many ways, so in a future dirstate
1613 # This heuristic is imperfect in many ways, so in a future dirstate
1614 # format update it makes sense to just record the number of entries
1614 # format update it makes sense to just record the number of entries
1615 # on write.
1615 # on write.
1616 self._map = parsers.dict_new_presized(len(st) // 71)
1616 self._map = parsers.dict_new_presized(len(st) // 71)
1617
1617
1618 # Python's garbage collector triggers a GC each time a certain number
1618 # Python's garbage collector triggers a GC each time a certain number
1619 # of container objects (the number being defined by
1619 # of container objects (the number being defined by
1620 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1620 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1621 # for each file in the dirstate. The C version then immediately marks
1621 # for each file in the dirstate. The C version then immediately marks
1622 # them as not to be tracked by the collector. However, this has no
1622 # them as not to be tracked by the collector. However, this has no
1623 # effect on when GCs are triggered, only on what objects the GC looks
1623 # effect on when GCs are triggered, only on what objects the GC looks
1624 # into. This means that O(number of files) GCs are unavoidable.
1624 # into. This means that O(number of files) GCs are unavoidable.
1625 # Depending on when in the process's lifetime the dirstate is parsed,
1625 # Depending on when in the process's lifetime the dirstate is parsed,
1626 # this can get very expensive. As a workaround, disable GC while
1626 # this can get very expensive. As a workaround, disable GC while
1627 # parsing the dirstate.
1627 # parsing the dirstate.
1628 #
1628 #
1629 # (we cannot decorate the function directly since it is in a C module)
1629 # (we cannot decorate the function directly since it is in a C module)
1630 parse_dirstate = util.nogc(parsers.parse_dirstate)
1630 parse_dirstate = util.nogc(parsers.parse_dirstate)
1631 p = parse_dirstate(self._map, self.copymap, st)
1631 p = parse_dirstate(self._map, self.copymap, st)
1632 if not self._dirtyparents:
1632 if not self._dirtyparents:
1633 self.setparents(*p)
1633 self.setparents(*p)
1634
1634
1635 # Avoid excess attribute lookups by fast pathing certain checks
1635 # Avoid excess attribute lookups by fast pathing certain checks
1636 self.__contains__ = self._map.__contains__
1636 self.__contains__ = self._map.__contains__
1637 self.__getitem__ = self._map.__getitem__
1637 self.__getitem__ = self._map.__getitem__
1638 self.get = self._map.get
1638 self.get = self._map.get
1639
1639
1640 def write(self, st, now):
1640 def write(self, st, now):
1641 st.write(
1641 st.write(
1642 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1642 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1643 )
1643 )
1644 st.close()
1644 st.close()
1645 self._dirtyparents = False
1645 self._dirtyparents = False
1646 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1646 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1647
1647
1648 @propertycache
1648 @propertycache
1649 def nonnormalset(self):
1649 def nonnormalset(self):
1650 nonnorm, otherparents = self.nonnormalentries()
1650 nonnorm, otherparents = self.nonnormalentries()
1651 self.otherparentset = otherparents
1651 self.otherparentset = otherparents
1652 return nonnorm
1652 return nonnorm
1653
1653
1654 @propertycache
1654 @propertycache
1655 def otherparentset(self):
1655 def otherparentset(self):
1656 nonnorm, otherparents = self.nonnormalentries()
1656 nonnorm, otherparents = self.nonnormalentries()
1657 self.nonnormalset = nonnorm
1657 self.nonnormalset = nonnorm
1658 return otherparents
1658 return otherparents
1659
1659
1660 @propertycache
1660 @propertycache
1661 def identity(self):
1661 def identity(self):
1662 self._map
1662 self._map
1663 return self.identity
1663 return self.identity
1664
1664
1665 @propertycache
1665 @propertycache
1666 def dirfoldmap(self):
1666 def dirfoldmap(self):
1667 f = {}
1667 f = {}
1668 normcase = util.normcase
1668 normcase = util.normcase
1669 for name in self._dirs:
1669 for name in self._dirs:
1670 f[normcase(name)] = name
1670 f[normcase(name)] = name
1671 return f
1671 return f
1672
1672
1673
1673
1674 if rustmod is not None:
1674 if rustmod is not None:
1675
1675
1676 class dirstatemap(object):
1676 class dirstatemap(object):
1677 def __init__(self, ui, opener, root):
1677 def __init__(self, ui, opener, root):
1678 self._ui = ui
1678 self._ui = ui
1679 self._opener = opener
1679 self._opener = opener
1680 self._root = root
1680 self._root = root
1681 self._filename = b'dirstate'
1681 self._filename = b'dirstate'
1682 self._parents = None
1682 self._parents = None
1683 self._dirtyparents = False
1683 self._dirtyparents = False
1684
1684
1685 # for consistent view between _pl() and _read() invocations
1685 # for consistent view between _pl() and _read() invocations
1686 self._pendingmode = None
1686 self._pendingmode = None
1687
1687
1688 def addfile(self, *args, **kwargs):
1688 def addfile(self, *args, **kwargs):
1689 return self._rustmap.addfile(*args, **kwargs)
1689 return self._rustmap.addfile(*args, **kwargs)
1690
1690
1691 def removefile(self, *args, **kwargs):
1691 def removefile(self, *args, **kwargs):
1692 return self._rustmap.removefile(*args, **kwargs)
1692 return self._rustmap.removefile(*args, **kwargs)
1693
1693
1694 def dropfile(self, *args, **kwargs):
1694 def dropfile(self, *args, **kwargs):
1695 return self._rustmap.dropfile(*args, **kwargs)
1695 return self._rustmap.dropfile(*args, **kwargs)
1696
1696
1697 def clearambiguoustimes(self, *args, **kwargs):
1697 def clearambiguoustimes(self, *args, **kwargs):
1698 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1698 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1699
1699
1700 def nonnormalentries(self):
1700 def nonnormalentries(self):
1701 return self._rustmap.nonnormalentries()
1701 return self._rustmap.nonnormalentries()
1702
1702
1703 def get(self, *args, **kwargs):
1703 def get(self, *args, **kwargs):
1704 return self._rustmap.get(*args, **kwargs)
1704 return self._rustmap.get(*args, **kwargs)
1705
1705
1706 @propertycache
1706 @propertycache
1707 def _rustmap(self):
1707 def _rustmap(self):
1708 self._rustmap = rustmod.DirstateMap(self._root)
1708 self._rustmap = rustmod.DirstateMap(self._root)
1709 self.read()
1709 self.read()
1710 return self._rustmap
1710 return self._rustmap
1711
1711
1712 @property
1712 @property
1713 def copymap(self):
1713 def copymap(self):
1714 return self._rustmap.copymap()
1714 return self._rustmap.copymap()
1715
1715
1716 def preload(self):
1716 def preload(self):
1717 self._rustmap
1717 self._rustmap
1718
1718
1719 def clear(self):
1719 def clear(self):
1720 self._rustmap.clear()
1720 self._rustmap.clear()
1721 self.setparents(nullid, nullid)
1721 self.setparents(nullid, nullid)
1722 util.clearcachedproperty(self, b"_dirs")
1722 util.clearcachedproperty(self, b"_dirs")
1723 util.clearcachedproperty(self, b"_alldirs")
1723 util.clearcachedproperty(self, b"_alldirs")
1724 util.clearcachedproperty(self, b"dirfoldmap")
1724 util.clearcachedproperty(self, b"dirfoldmap")
1725
1725
1726 def items(self):
1726 def items(self):
1727 return self._rustmap.items()
1727 return self._rustmap.items()
1728
1728
1729 def keys(self):
1729 def keys(self):
1730 return iter(self._rustmap)
1730 return iter(self._rustmap)
1731
1731
1732 def __contains__(self, key):
1732 def __contains__(self, key):
1733 return key in self._rustmap
1733 return key in self._rustmap
1734
1734
1735 def __getitem__(self, item):
1735 def __getitem__(self, item):
1736 return self._rustmap[item]
1736 return self._rustmap[item]
1737
1737
1738 def __len__(self):
1738 def __len__(self):
1739 return len(self._rustmap)
1739 return len(self._rustmap)
1740
1740
1741 def __iter__(self):
1741 def __iter__(self):
1742 return iter(self._rustmap)
1742 return iter(self._rustmap)
1743
1743
1744 # forward for python2,3 compat
1744 # forward for python2,3 compat
1745 iteritems = items
1745 iteritems = items
1746
1746
1747 def _opendirstatefile(self):
1747 def _opendirstatefile(self):
1748 fp, mode = txnutil.trypending(
1748 fp, mode = txnutil.trypending(
1749 self._root, self._opener, self._filename
1749 self._root, self._opener, self._filename
1750 )
1750 )
1751 if self._pendingmode is not None and self._pendingmode != mode:
1751 if self._pendingmode is not None and self._pendingmode != mode:
1752 fp.close()
1752 fp.close()
1753 raise error.Abort(
1753 raise error.Abort(
1754 _(b'working directory state may be changed parallelly')
1754 _(b'working directory state may be changed parallelly')
1755 )
1755 )
1756 self._pendingmode = mode
1756 self._pendingmode = mode
1757 return fp
1757 return fp
1758
1758
1759 def setparents(self, p1, p2):
1759 def setparents(self, p1, p2):
1760 self._rustmap.setparents(p1, p2)
1760 self._rustmap.setparents(p1, p2)
1761 self._parents = (p1, p2)
1761 self._parents = (p1, p2)
1762 self._dirtyparents = True
1762 self._dirtyparents = True
1763
1763
1764 def parents(self):
1764 def parents(self):
1765 if not self._parents:
1765 if not self._parents:
1766 try:
1766 try:
1767 fp = self._opendirstatefile()
1767 fp = self._opendirstatefile()
1768 st = fp.read(40)
1768 st = fp.read(40)
1769 fp.close()
1769 fp.close()
1770 except IOError as err:
1770 except IOError as err:
1771 if err.errno != errno.ENOENT:
1771 if err.errno != errno.ENOENT:
1772 raise
1772 raise
1773 # File doesn't exist, so the current state is empty
1773 # File doesn't exist, so the current state is empty
1774 st = b''
1774 st = b''
1775
1775
1776 try:
1776 try:
1777 self._parents = self._rustmap.parents(st)
1777 self._parents = self._rustmap.parents(st)
1778 except ValueError:
1778 except ValueError:
1779 raise error.Abort(
1779 raise error.Abort(
1780 _(b'working directory state appears damaged!')
1780 _(b'working directory state appears damaged!')
1781 )
1781 )
1782
1782
1783 return self._parents
1783 return self._parents
1784
1784
1785 def read(self):
1785 def read(self):
1786 # ignore HG_PENDING because identity is used only for writing
1786 # ignore HG_PENDING because identity is used only for writing
1787 self.identity = util.filestat.frompath(
1787 self.identity = util.filestat.frompath(
1788 self._opener.join(self._filename)
1788 self._opener.join(self._filename)
1789 )
1789 )
1790
1790
1791 try:
1791 try:
1792 fp = self._opendirstatefile()
1792 fp = self._opendirstatefile()
1793 try:
1793 try:
1794 st = fp.read()
1794 st = fp.read()
1795 finally:
1795 finally:
1796 fp.close()
1796 fp.close()
1797 except IOError as err:
1797 except IOError as err:
1798 if err.errno != errno.ENOENT:
1798 if err.errno != errno.ENOENT:
1799 raise
1799 raise
1800 return
1800 return
1801 if not st:
1801 if not st:
1802 return
1802 return
1803
1803
1804 parse_dirstate = util.nogc(self._rustmap.read)
1804 parse_dirstate = util.nogc(self._rustmap.read)
1805 parents = parse_dirstate(st)
1805 parents = parse_dirstate(st)
1806 if parents and not self._dirtyparents:
1806 if parents and not self._dirtyparents:
1807 self.setparents(*parents)
1807 self.setparents(*parents)
1808
1808
1809 self.__contains__ = self._rustmap.__contains__
1809 self.__contains__ = self._rustmap.__contains__
1810 self.__getitem__ = self._rustmap.__getitem__
1810 self.__getitem__ = self._rustmap.__getitem__
1811 self.get = self._rustmap.get
1811 self.get = self._rustmap.get
1812
1812
1813 def write(self, st, now):
1813 def write(self, st, now):
1814 parents = self.parents()
1814 parents = self.parents()
1815 st.write(self._rustmap.write(parents[0], parents[1], now))
1815 st.write(self._rustmap.write(parents[0], parents[1], now))
1816 st.close()
1816 st.close()
1817 self._dirtyparents = False
1817 self._dirtyparents = False
1818
1818
1819 @propertycache
1819 @propertycache
1820 def filefoldmap(self):
1820 def filefoldmap(self):
1821 """Returns a dictionary mapping normalized case paths to their
1821 """Returns a dictionary mapping normalized case paths to their
1822 non-normalized versions.
1822 non-normalized versions.
1823 """
1823 """
1824 return self._rustmap.filefoldmapasdict()
1824 return self._rustmap.filefoldmapasdict()
1825
1825
1826 def hastrackeddir(self, d):
1826 def hastrackeddir(self, d):
1827 self._dirs # Trigger Python's propertycache
1827 self._dirs # Trigger Python's propertycache
1828 return self._rustmap.hastrackeddir(d)
1828 return self._rustmap.hastrackeddir(d)
1829
1829
1830 def hasdir(self, d):
1830 def hasdir(self, d):
1831 self._dirs # Trigger Python's propertycache
1831 self._dirs # Trigger Python's propertycache
1832 return self._rustmap.hasdir(d)
1832 return self._rustmap.hasdir(d)
1833
1833
1834 @propertycache
1834 @propertycache
1835 def _dirs(self):
1835 def _dirs(self):
1836 return self._rustmap.getdirs()
1836 return self._rustmap.getdirs()
1837
1837
1838 @propertycache
1838 @propertycache
1839 def _alldirs(self):
1839 def _alldirs(self):
1840 return self._rustmap.getalldirs()
1840 return self._rustmap.getalldirs()
1841
1841
1842 @propertycache
1842 @propertycache
1843 def identity(self):
1843 def identity(self):
1844 self._rustmap
1844 self._rustmap
1845 return self.identity
1845 return self.identity
1846
1846
1847 @property
1847 @property
1848 def nonnormalset(self):
1848 def nonnormalset(self):
1849 nonnorm, otherparents = self._rustmap.nonnormalentries()
1849 nonnorm = self._rustmap.non_normal_entries()
1850 return nonnorm
1850 return nonnorm
1851
1851
1852 @propertycache
1852 @propertycache
1853 def otherparentset(self):
1853 def otherparentset(self):
1854 nonnorm, otherparents = self._rustmap.nonnormalentries()
1854 otherparents = self._rustmap.other_parent_entries()
1855 return otherparents
1855 return otherparents
1856
1856
1857 @propertycache
1857 @propertycache
1858 def dirfoldmap(self):
1858 def dirfoldmap(self):
1859 f = {}
1859 f = {}
1860 normcase = util.normcase
1860 normcase = util.normcase
1861 for name in self._dirs:
1861 for name in self._dirs:
1862 f[normcase(name)] = name
1862 f[normcase(name)] = name
1863 return f
1863 return f
@@ -1,132 +1,133 b''
1 // dirstate.rs
1 // dirstate.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate` module provided by the
8 //! Bindings for the `hg::dirstate` module provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10 //!
10 //!
11 //! From Python, this will be seen as `mercurial.rustext.dirstate`
11 //! From Python, this will be seen as `mercurial.rustext.dirstate`
12 mod copymap;
12 mod copymap;
13 mod dirs_multiset;
13 mod dirs_multiset;
14 mod dirstate_map;
14 mod dirstate_map;
15 mod non_normal_entries;
15 mod status;
16 mod status;
16 use crate::dirstate::{
17 use crate::dirstate::{
17 dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
18 dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
18 };
19 };
19 use cpython::{
20 use cpython::{
20 exc, PyBytes, PyDict, PyErr, PyModule, PyObject, PyResult, PySequence,
21 exc, PyBytes, PyDict, PyErr, PyModule, PyObject, PyResult, PySequence,
21 Python,
22 Python,
22 };
23 };
23 use hg::{
24 use hg::{
24 utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState,
25 utils::hg_path::HgPathBuf, DirstateEntry, DirstateParseError, EntryState,
25 StateMap,
26 StateMap,
26 };
27 };
27 use libc::{c_char, c_int};
28 use libc::{c_char, c_int};
28 use std::convert::TryFrom;
29 use std::convert::TryFrom;
29
30
30 // C code uses a custom `dirstate_tuple` type, checks in multiple instances
31 // C code uses a custom `dirstate_tuple` type, checks in multiple instances
31 // for this type, and raises a Python `Exception` if the check does not pass.
32 // for this type, and raises a Python `Exception` if the check does not pass.
32 // Because this type differs only in name from the regular Python tuple, it
33 // Because this type differs only in name from the regular Python tuple, it
33 // would be a good idea in the near future to remove it entirely to allow
34 // would be a good idea in the near future to remove it entirely to allow
34 // for a pure Python tuple of the same effective structure to be used,
35 // for a pure Python tuple of the same effective structure to be used,
35 // rendering this type and the capsule below useless.
36 // rendering this type and the capsule below useless.
36 py_capsule_fn!(
37 py_capsule_fn!(
37 from mercurial.cext.parsers import make_dirstate_tuple_CAPI
38 from mercurial.cext.parsers import make_dirstate_tuple_CAPI
38 as make_dirstate_tuple_capi
39 as make_dirstate_tuple_capi
39 signature (
40 signature (
40 state: c_char,
41 state: c_char,
41 mode: c_int,
42 mode: c_int,
42 size: c_int,
43 size: c_int,
43 mtime: c_int,
44 mtime: c_int,
44 ) -> *mut RawPyObject
45 ) -> *mut RawPyObject
45 );
46 );
46
47
47 pub fn make_dirstate_tuple(
48 pub fn make_dirstate_tuple(
48 py: Python,
49 py: Python,
49 entry: &DirstateEntry,
50 entry: &DirstateEntry,
50 ) -> PyResult<PyObject> {
51 ) -> PyResult<PyObject> {
51 // might be silly to retrieve capsule function in hot loop
52 // might be silly to retrieve capsule function in hot loop
52 let make = make_dirstate_tuple_capi::retrieve(py)?;
53 let make = make_dirstate_tuple_capi::retrieve(py)?;
53
54
54 let &DirstateEntry {
55 let &DirstateEntry {
55 state,
56 state,
56 mode,
57 mode,
57 size,
58 size,
58 mtime,
59 mtime,
59 } = entry;
60 } = entry;
60 // Explicitly go through u8 first, then cast to platform-specific `c_char`
61 // Explicitly go through u8 first, then cast to platform-specific `c_char`
61 // because Into<u8> has a specific implementation while `as c_char` would
62 // because Into<u8> has a specific implementation while `as c_char` would
62 // just do a naive enum cast.
63 // just do a naive enum cast.
63 let state_code: u8 = state.into();
64 let state_code: u8 = state.into();
64
65
65 let maybe_obj = unsafe {
66 let maybe_obj = unsafe {
66 let ptr = make(state_code as c_char, mode, size, mtime);
67 let ptr = make(state_code as c_char, mode, size, mtime);
67 PyObject::from_owned_ptr_opt(py, ptr)
68 PyObject::from_owned_ptr_opt(py, ptr)
68 };
69 };
69 maybe_obj.ok_or_else(|| PyErr::fetch(py))
70 maybe_obj.ok_or_else(|| PyErr::fetch(py))
70 }
71 }
71
72
72 pub fn extract_dirstate(py: Python, dmap: &PyDict) -> Result<StateMap, PyErr> {
73 pub fn extract_dirstate(py: Python, dmap: &PyDict) -> Result<StateMap, PyErr> {
73 dmap.items(py)
74 dmap.items(py)
74 .iter()
75 .iter()
75 .map(|(filename, stats)| {
76 .map(|(filename, stats)| {
76 let stats = stats.extract::<PySequence>(py)?;
77 let stats = stats.extract::<PySequence>(py)?;
77 let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?;
78 let state = stats.get_item(py, 0)?.extract::<PyBytes>(py)?;
78 let state = EntryState::try_from(state.data(py)[0]).map_err(
79 let state = EntryState::try_from(state.data(py)[0]).map_err(
79 |e: DirstateParseError| {
80 |e: DirstateParseError| {
80 PyErr::new::<exc::ValueError, _>(py, e.to_string())
81 PyErr::new::<exc::ValueError, _>(py, e.to_string())
81 },
82 },
82 )?;
83 )?;
83 let mode = stats.get_item(py, 1)?.extract(py)?;
84 let mode = stats.get_item(py, 1)?.extract(py)?;
84 let size = stats.get_item(py, 2)?.extract(py)?;
85 let size = stats.get_item(py, 2)?.extract(py)?;
85 let mtime = stats.get_item(py, 3)?.extract(py)?;
86 let mtime = stats.get_item(py, 3)?.extract(py)?;
86 let filename = filename.extract::<PyBytes>(py)?;
87 let filename = filename.extract::<PyBytes>(py)?;
87 let filename = filename.data(py);
88 let filename = filename.data(py);
88 Ok((
89 Ok((
89 HgPathBuf::from(filename.to_owned()),
90 HgPathBuf::from(filename.to_owned()),
90 DirstateEntry {
91 DirstateEntry {
91 state,
92 state,
92 mode,
93 mode,
93 size,
94 size,
94 mtime,
95 mtime,
95 },
96 },
96 ))
97 ))
97 })
98 })
98 .collect()
99 .collect()
99 }
100 }
100
101
101 /// Create the module, with `__package__` given from parent
102 /// Create the module, with `__package__` given from parent
102 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
103 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
103 let dotted_name = &format!("{}.dirstate", package);
104 let dotted_name = &format!("{}.dirstate", package);
104 let m = PyModule::new(py, dotted_name)?;
105 let m = PyModule::new(py, dotted_name)?;
105
106
106 m.add(py, "__package__", package)?;
107 m.add(py, "__package__", package)?;
107 m.add(py, "__doc__", "Dirstate - Rust implementation")?;
108 m.add(py, "__doc__", "Dirstate - Rust implementation")?;
108
109
109 m.add_class::<Dirs>(py)?;
110 m.add_class::<Dirs>(py)?;
110 m.add_class::<DirstateMap>(py)?;
111 m.add_class::<DirstateMap>(py)?;
111 m.add(
112 m.add(
112 py,
113 py,
113 "status",
114 "status",
114 py_fn!(
115 py_fn!(
115 py,
116 py,
116 status_wrapper(
117 status_wrapper(
117 dmap: DirstateMap,
118 dmap: DirstateMap,
118 root_dir: PyObject,
119 root_dir: PyObject,
119 matcher: PyObject,
120 matcher: PyObject,
120 list_clean: bool,
121 list_clean: bool,
121 last_normal_time: i64,
122 last_normal_time: i64,
122 check_exec: bool
123 check_exec: bool
123 )
124 )
124 ),
125 ),
125 )?;
126 )?;
126
127
127 let sys = PyModule::import(py, "sys")?;
128 let sys = PyModule::import(py, "sys")?;
128 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
129 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
129 sys_modules.set_item(py, dotted_name, &m)?;
130 sys_modules.set_item(py, dotted_name, &m)?;
130
131
131 Ok(m)
132 Ok(m)
132 }
133 }
@@ -1,522 +1,577 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10
10
11 use std::cell::{Ref, RefCell};
11 use std::cell::{Ref, RefCell};
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13 use std::time::Duration;
13 use std::time::Duration;
14
14
15 use cpython::{
15 use cpython::{
16 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyObject,
16 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
17 PyResult, PyTuple, Python, PythonObject, ToPyObject,
17 PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject,
18 };
18 };
19
19
20 use crate::{
20 use crate::{
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 dirstate::non_normal_entries::NonNormalEntries,
22 dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
23 dirstate::{dirs_multiset::Dirs, make_dirstate_tuple},
23 ref_sharing::{PyLeaked, PySharedRefCell},
24 ref_sharing::{PyLeaked, PySharedRefCell},
24 };
25 };
25 use hg::{
26 use hg::{
26 utils::hg_path::{HgPath, HgPathBuf},
27 utils::hg_path::{HgPath, HgPathBuf},
27 DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap,
28 DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap,
28 DirstateMapError, DirstateParents, DirstateParseError, EntryState,
29 DirstateMapError, DirstateParents, DirstateParseError, EntryState,
29 StateMapIter, PARENT_SIZE,
30 StateMapIter, PARENT_SIZE,
30 };
31 };
31
32
32 // TODO
33 // TODO
33 // This object needs to share references to multiple members of its Rust
34 // This object needs to share references to multiple members of its Rust
34 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
35 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
35 // Right now `CopyMap` is done, but it needs to have an explicit reference
36 // Right now `CopyMap` is done, but it needs to have an explicit reference
36 // to `RustDirstateMap` which itself needs to have an encapsulation for
37 // to `RustDirstateMap` which itself needs to have an encapsulation for
37 // every method in `CopyMap` (copymapcopy, etc.).
38 // every method in `CopyMap` (copymapcopy, etc.).
38 // This is ugly and hard to maintain.
39 // This is ugly and hard to maintain.
39 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
40 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
40 // `py_class!` is already implemented and does not mention
41 // `py_class!` is already implemented and does not mention
41 // `RustDirstateMap`, rightfully so.
42 // `RustDirstateMap`, rightfully so.
42 // All attributes also have to have a separate refcount data attribute for
43 // All attributes also have to have a separate refcount data attribute for
43 // leaks, with all methods that go along for reference sharing.
44 // leaks, with all methods that go along for reference sharing.
44 py_class!(pub class DirstateMap |py| {
45 py_class!(pub class DirstateMap |py| {
45 data inner: PySharedRefCell<RustDirstateMap>;
46 data inner: PySharedRefCell<RustDirstateMap>;
46
47
47 def __new__(_cls, _root: PyObject) -> PyResult<Self> {
48 def __new__(_cls, _root: PyObject) -> PyResult<Self> {
48 let inner = RustDirstateMap::default();
49 let inner = RustDirstateMap::default();
49 Self::create_instance(
50 Self::create_instance(
50 py,
51 py,
51 PySharedRefCell::new(inner),
52 PySharedRefCell::new(inner),
52 )
53 )
53 }
54 }
54
55
55 def clear(&self) -> PyResult<PyObject> {
56 def clear(&self) -> PyResult<PyObject> {
56 self.inner_shared(py).borrow_mut()?.clear();
57 self.inner_shared(py).borrow_mut()?.clear();
57 Ok(py.None())
58 Ok(py.None())
58 }
59 }
59
60
60 def get(
61 def get(
61 &self,
62 &self,
62 key: PyObject,
63 key: PyObject,
63 default: Option<PyObject> = None
64 default: Option<PyObject> = None
64 ) -> PyResult<Option<PyObject>> {
65 ) -> PyResult<Option<PyObject>> {
65 let key = key.extract::<PyBytes>(py)?;
66 let key = key.extract::<PyBytes>(py)?;
66 match self.inner_shared(py).borrow().get(HgPath::new(key.data(py))) {
67 match self.inner_shared(py).borrow().get(HgPath::new(key.data(py))) {
67 Some(entry) => {
68 Some(entry) => {
68 Ok(Some(make_dirstate_tuple(py, entry)?))
69 Ok(Some(make_dirstate_tuple(py, entry)?))
69 },
70 },
70 None => Ok(default)
71 None => Ok(default)
71 }
72 }
72 }
73 }
73
74
74 def addfile(
75 def addfile(
75 &self,
76 &self,
76 f: PyObject,
77 f: PyObject,
77 oldstate: PyObject,
78 oldstate: PyObject,
78 state: PyObject,
79 state: PyObject,
79 mode: PyObject,
80 mode: PyObject,
80 size: PyObject,
81 size: PyObject,
81 mtime: PyObject
82 mtime: PyObject
82 ) -> PyResult<PyObject> {
83 ) -> PyResult<PyObject> {
83 self.inner_shared(py).borrow_mut()?.add_file(
84 self.inner_shared(py).borrow_mut()?.add_file(
84 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
85 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
85 oldstate.extract::<PyBytes>(py)?.data(py)[0]
86 oldstate.extract::<PyBytes>(py)?.data(py)[0]
86 .try_into()
87 .try_into()
87 .map_err(|e: DirstateParseError| {
88 .map_err(|e: DirstateParseError| {
88 PyErr::new::<exc::ValueError, _>(py, e.to_string())
89 PyErr::new::<exc::ValueError, _>(py, e.to_string())
89 })?,
90 })?,
90 DirstateEntry {
91 DirstateEntry {
91 state: state.extract::<PyBytes>(py)?.data(py)[0]
92 state: state.extract::<PyBytes>(py)?.data(py)[0]
92 .try_into()
93 .try_into()
93 .map_err(|e: DirstateParseError| {
94 .map_err(|e: DirstateParseError| {
94 PyErr::new::<exc::ValueError, _>(py, e.to_string())
95 PyErr::new::<exc::ValueError, _>(py, e.to_string())
95 })?,
96 })?,
96 mode: mode.extract(py)?,
97 mode: mode.extract(py)?,
97 size: size.extract(py)?,
98 size: size.extract(py)?,
98 mtime: mtime.extract(py)?,
99 mtime: mtime.extract(py)?,
99 },
100 },
100 ).and(Ok(py.None())).or_else(|e: DirstateMapError| {
101 ).and(Ok(py.None())).or_else(|e: DirstateMapError| {
101 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
102 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
102 })
103 })
103 }
104 }
104
105
105 def removefile(
106 def removefile(
106 &self,
107 &self,
107 f: PyObject,
108 f: PyObject,
108 oldstate: PyObject,
109 oldstate: PyObject,
109 size: PyObject
110 size: PyObject
110 ) -> PyResult<PyObject> {
111 ) -> PyResult<PyObject> {
111 self.inner_shared(py).borrow_mut()?
112 self.inner_shared(py).borrow_mut()?
112 .remove_file(
113 .remove_file(
113 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
114 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
114 oldstate.extract::<PyBytes>(py)?.data(py)[0]
115 oldstate.extract::<PyBytes>(py)?.data(py)[0]
115 .try_into()
116 .try_into()
116 .map_err(|e: DirstateParseError| {
117 .map_err(|e: DirstateParseError| {
117 PyErr::new::<exc::ValueError, _>(py, e.to_string())
118 PyErr::new::<exc::ValueError, _>(py, e.to_string())
118 })?,
119 })?,
119 size.extract(py)?,
120 size.extract(py)?,
120 )
121 )
121 .or_else(|_| {
122 .or_else(|_| {
122 Err(PyErr::new::<exc::OSError, _>(
123 Err(PyErr::new::<exc::OSError, _>(
123 py,
124 py,
124 "Dirstate error".to_string(),
125 "Dirstate error".to_string(),
125 ))
126 ))
126 })?;
127 })?;
127 Ok(py.None())
128 Ok(py.None())
128 }
129 }
129
130
130 def dropfile(
131 def dropfile(
131 &self,
132 &self,
132 f: PyObject,
133 f: PyObject,
133 oldstate: PyObject
134 oldstate: PyObject
134 ) -> PyResult<PyBool> {
135 ) -> PyResult<PyBool> {
135 self.inner_shared(py).borrow_mut()?
136 self.inner_shared(py).borrow_mut()?
136 .drop_file(
137 .drop_file(
137 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
138 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
138 oldstate.extract::<PyBytes>(py)?.data(py)[0]
139 oldstate.extract::<PyBytes>(py)?.data(py)[0]
139 .try_into()
140 .try_into()
140 .map_err(|e: DirstateParseError| {
141 .map_err(|e: DirstateParseError| {
141 PyErr::new::<exc::ValueError, _>(py, e.to_string())
142 PyErr::new::<exc::ValueError, _>(py, e.to_string())
142 })?,
143 })?,
143 )
144 )
144 .and_then(|b| Ok(b.to_py_object(py)))
145 .and_then(|b| Ok(b.to_py_object(py)))
145 .or_else(|_| {
146 .or_else(|_| {
146 Err(PyErr::new::<exc::OSError, _>(
147 Err(PyErr::new::<exc::OSError, _>(
147 py,
148 py,
148 "Dirstate error".to_string(),
149 "Dirstate error".to_string(),
149 ))
150 ))
150 })
151 })
151 }
152 }
152
153
153 def clearambiguoustimes(
154 def clearambiguoustimes(
154 &self,
155 &self,
155 files: PyObject,
156 files: PyObject,
156 now: PyObject
157 now: PyObject
157 ) -> PyResult<PyObject> {
158 ) -> PyResult<PyObject> {
158 let files: PyResult<Vec<HgPathBuf>> = files
159 let files: PyResult<Vec<HgPathBuf>> = files
159 .iter(py)?
160 .iter(py)?
160 .map(|filename| {
161 .map(|filename| {
161 Ok(HgPathBuf::from_bytes(
162 Ok(HgPathBuf::from_bytes(
162 filename?.extract::<PyBytes>(py)?.data(py),
163 filename?.extract::<PyBytes>(py)?.data(py),
163 ))
164 ))
164 })
165 })
165 .collect();
166 .collect();
166 self.inner_shared(py).borrow_mut()?
167 self.inner_shared(py).borrow_mut()?
167 .clear_ambiguous_times(files?, now.extract(py)?);
168 .clear_ambiguous_times(files?, now.extract(py)?);
168 Ok(py.None())
169 Ok(py.None())
169 }
170 }
170
171
171 // TODO share the reference
172 def other_parent_entries(&self) -> PyResult<PyObject> {
172 def nonnormalentries(&self) -> PyResult<PyObject> {
173 let mut inner_shared = self.inner_shared(py).borrow_mut()?;
173 let (non_normal, other_parent) =
174 let (_, other_parent) =
174 self.inner_shared(py).borrow().non_normal_other_parent_entries();
175 inner_shared.get_non_normal_other_parent_entries();
175
176
176 let locals = PyDict::new(py);
177 let locals = PyDict::new(py);
177 locals.set_item(
178 locals.set_item(
178 py,
179 py,
179 "non_normal",
180 non_normal
181 .iter()
182 .map(|v| PyBytes::new(py, v.as_ref()))
183 .collect::<Vec<PyBytes>>()
184 .to_py_object(py),
185 )?;
186 locals.set_item(
187 py,
188 "other_parent",
180 "other_parent",
189 other_parent
181 other_parent.as_ref()
182 .unwrap()
190 .iter()
183 .iter()
191 .map(|v| PyBytes::new(py, v.as_ref()))
184 .map(|v| PyBytes::new(py, v.as_ref()))
192 .collect::<Vec<PyBytes>>()
185 .collect::<Vec<PyBytes>>()
193 .to_py_object(py),
186 .to_py_object(py),
194 )?;
187 )?;
195
188
196 py.eval("set(non_normal), set(other_parent)", None, Some(&locals))
189 py.eval("set(other_parent)", None, Some(&locals))
190 }
191
192 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
193 NonNormalEntries::from_inner(py, self.clone_ref(py))
194 }
195
196 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
197 let key = key.extract::<PyBytes>(py)?;
198 Ok(self
199 .inner_shared(py)
200 .borrow_mut()?
201 .get_non_normal_other_parent_entries().0
202 .as_ref()
203 .unwrap()
204 .contains(HgPath::new(key.data(py))))
205 }
206
207 def non_normal_entries_display(&self) -> PyResult<PyString> {
208 Ok(
209 PyString::new(
210 py,
211 &format!(
212 "NonNormalEntries: {:?}",
213 self
214 .inner_shared(py)
215 .borrow_mut()?
216 .get_non_normal_other_parent_entries().0
217 .as_ref()
218 .unwrap().iter().map(|o| o))
219 )
220 )
221 }
222
223 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
224 let key = key.extract::<PyBytes>(py)?;
225 self
226 .inner_shared(py)
227 .borrow_mut()?
228 .non_normal_entries_remove(HgPath::new(key.data(py)));
229 Ok(py.None())
230 }
231
232 def non_normal_entries_union(&self, other: PyObject) -> PyResult<PyList> {
233 let other: PyResult<_> = other.iter(py)?
234 .map(|f| {
235 Ok(HgPathBuf::from_bytes(
236 f?.extract::<PyBytes>(py)?.data(py),
237 ))
238 })
239 .collect();
240
241 let res = self
242 .inner_shared(py)
243 .borrow_mut()?
244 .non_normal_entries_union(other?);
245
246 let ret = PyList::new(py, &[]);
247 for (i, filename) in res.iter().enumerate() {
248 let as_pystring = PyBytes::new(py, filename.as_bytes());
249 ret.insert_item(py, i, as_pystring.into_object());
250 }
251 Ok(ret)
197 }
252 }
198
253
199 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
254 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
200 let d = d.extract::<PyBytes>(py)?;
255 let d = d.extract::<PyBytes>(py)?;
201 Ok(self.inner_shared(py).borrow_mut()?
256 Ok(self.inner_shared(py).borrow_mut()?
202 .has_tracked_dir(HgPath::new(d.data(py)))
257 .has_tracked_dir(HgPath::new(d.data(py)))
203 .map_err(|e| {
258 .map_err(|e| {
204 PyErr::new::<exc::ValueError, _>(py, e.to_string())
259 PyErr::new::<exc::ValueError, _>(py, e.to_string())
205 })?
260 })?
206 .to_py_object(py))
261 .to_py_object(py))
207 }
262 }
208
263
209 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
264 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
210 let d = d.extract::<PyBytes>(py)?;
265 let d = d.extract::<PyBytes>(py)?;
211 Ok(self.inner_shared(py).borrow_mut()?
266 Ok(self.inner_shared(py).borrow_mut()?
212 .has_dir(HgPath::new(d.data(py)))
267 .has_dir(HgPath::new(d.data(py)))
213 .map_err(|e| {
268 .map_err(|e| {
214 PyErr::new::<exc::ValueError, _>(py, e.to_string())
269 PyErr::new::<exc::ValueError, _>(py, e.to_string())
215 })?
270 })?
216 .to_py_object(py))
271 .to_py_object(py))
217 }
272 }
218
273
219 def parents(&self, st: PyObject) -> PyResult<PyTuple> {
274 def parents(&self, st: PyObject) -> PyResult<PyTuple> {
220 self.inner_shared(py).borrow_mut()?
275 self.inner_shared(py).borrow_mut()?
221 .parents(st.extract::<PyBytes>(py)?.data(py))
276 .parents(st.extract::<PyBytes>(py)?.data(py))
222 .and_then(|d| {
277 .and_then(|d| {
223 Ok((PyBytes::new(py, &d.p1), PyBytes::new(py, &d.p2))
278 Ok((PyBytes::new(py, &d.p1), PyBytes::new(py, &d.p2))
224 .to_py_object(py))
279 .to_py_object(py))
225 })
280 })
226 .or_else(|_| {
281 .or_else(|_| {
227 Err(PyErr::new::<exc::OSError, _>(
282 Err(PyErr::new::<exc::OSError, _>(
228 py,
283 py,
229 "Dirstate error".to_string(),
284 "Dirstate error".to_string(),
230 ))
285 ))
231 })
286 })
232 }
287 }
233
288
234 def setparents(&self, p1: PyObject, p2: PyObject) -> PyResult<PyObject> {
289 def setparents(&self, p1: PyObject, p2: PyObject) -> PyResult<PyObject> {
235 let p1 = extract_node_id(py, &p1)?;
290 let p1 = extract_node_id(py, &p1)?;
236 let p2 = extract_node_id(py, &p2)?;
291 let p2 = extract_node_id(py, &p2)?;
237
292
238 self.inner_shared(py).borrow_mut()?
293 self.inner_shared(py).borrow_mut()?
239 .set_parents(&DirstateParents { p1, p2 });
294 .set_parents(&DirstateParents { p1, p2 });
240 Ok(py.None())
295 Ok(py.None())
241 }
296 }
242
297
243 def read(&self, st: PyObject) -> PyResult<Option<PyObject>> {
298 def read(&self, st: PyObject) -> PyResult<Option<PyObject>> {
244 match self.inner_shared(py).borrow_mut()?
299 match self.inner_shared(py).borrow_mut()?
245 .read(st.extract::<PyBytes>(py)?.data(py))
300 .read(st.extract::<PyBytes>(py)?.data(py))
246 {
301 {
247 Ok(Some(parents)) => Ok(Some(
302 Ok(Some(parents)) => Ok(Some(
248 (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2))
303 (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2))
249 .to_py_object(py)
304 .to_py_object(py)
250 .into_object(),
305 .into_object(),
251 )),
306 )),
252 Ok(None) => Ok(Some(py.None())),
307 Ok(None) => Ok(Some(py.None())),
253 Err(_) => Err(PyErr::new::<exc::OSError, _>(
308 Err(_) => Err(PyErr::new::<exc::OSError, _>(
254 py,
309 py,
255 "Dirstate error".to_string(),
310 "Dirstate error".to_string(),
256 )),
311 )),
257 }
312 }
258 }
313 }
259 def write(
314 def write(
260 &self,
315 &self,
261 p1: PyObject,
316 p1: PyObject,
262 p2: PyObject,
317 p2: PyObject,
263 now: PyObject
318 now: PyObject
264 ) -> PyResult<PyBytes> {
319 ) -> PyResult<PyBytes> {
265 let now = Duration::new(now.extract(py)?, 0);
320 let now = Duration::new(now.extract(py)?, 0);
266 let parents = DirstateParents {
321 let parents = DirstateParents {
267 p1: extract_node_id(py, &p1)?,
322 p1: extract_node_id(py, &p1)?,
268 p2: extract_node_id(py, &p2)?,
323 p2: extract_node_id(py, &p2)?,
269 };
324 };
270
325
271 match self.inner_shared(py).borrow_mut()?.pack(parents, now) {
326 match self.inner_shared(py).borrow_mut()?.pack(parents, now) {
272 Ok(packed) => Ok(PyBytes::new(py, &packed)),
327 Ok(packed) => Ok(PyBytes::new(py, &packed)),
273 Err(_) => Err(PyErr::new::<exc::OSError, _>(
328 Err(_) => Err(PyErr::new::<exc::OSError, _>(
274 py,
329 py,
275 "Dirstate error".to_string(),
330 "Dirstate error".to_string(),
276 )),
331 )),
277 }
332 }
278 }
333 }
279
334
280 def filefoldmapasdict(&self) -> PyResult<PyDict> {
335 def filefoldmapasdict(&self) -> PyResult<PyDict> {
281 let dict = PyDict::new(py);
336 let dict = PyDict::new(py);
282 for (key, value) in
337 for (key, value) in
283 self.inner_shared(py).borrow_mut()?.build_file_fold_map().iter()
338 self.inner_shared(py).borrow_mut()?.build_file_fold_map().iter()
284 {
339 {
285 dict.set_item(py, key.as_ref().to_vec(), value.as_ref().to_vec())?;
340 dict.set_item(py, key.as_ref().to_vec(), value.as_ref().to_vec())?;
286 }
341 }
287 Ok(dict)
342 Ok(dict)
288 }
343 }
289
344
290 def __len__(&self) -> PyResult<usize> {
345 def __len__(&self) -> PyResult<usize> {
291 Ok(self.inner_shared(py).borrow().len())
346 Ok(self.inner_shared(py).borrow().len())
292 }
347 }
293
348
294 def __contains__(&self, key: PyObject) -> PyResult<bool> {
349 def __contains__(&self, key: PyObject) -> PyResult<bool> {
295 let key = key.extract::<PyBytes>(py)?;
350 let key = key.extract::<PyBytes>(py)?;
296 Ok(self.inner_shared(py).borrow().contains_key(HgPath::new(key.data(py))))
351 Ok(self.inner_shared(py).borrow().contains_key(HgPath::new(key.data(py))))
297 }
352 }
298
353
299 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
354 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
300 let key = key.extract::<PyBytes>(py)?;
355 let key = key.extract::<PyBytes>(py)?;
301 let key = HgPath::new(key.data(py));
356 let key = HgPath::new(key.data(py));
302 match self.inner_shared(py).borrow().get(key) {
357 match self.inner_shared(py).borrow().get(key) {
303 Some(entry) => {
358 Some(entry) => {
304 Ok(make_dirstate_tuple(py, entry)?)
359 Ok(make_dirstate_tuple(py, entry)?)
305 },
360 },
306 None => Err(PyErr::new::<exc::KeyError, _>(
361 None => Err(PyErr::new::<exc::KeyError, _>(
307 py,
362 py,
308 String::from_utf8_lossy(key.as_bytes()),
363 String::from_utf8_lossy(key.as_bytes()),
309 )),
364 )),
310 }
365 }
311 }
366 }
312
367
313 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
368 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
314 let leaked_ref = self.inner_shared(py).leak_immutable();
369 let leaked_ref = self.inner_shared(py).leak_immutable();
315 DirstateMapKeysIterator::from_inner(
370 DirstateMapKeysIterator::from_inner(
316 py,
371 py,
317 unsafe { leaked_ref.map(py, |o| o.iter()) },
372 unsafe { leaked_ref.map(py, |o| o.iter()) },
318 )
373 )
319 }
374 }
320
375
321 def items(&self) -> PyResult<DirstateMapItemsIterator> {
376 def items(&self) -> PyResult<DirstateMapItemsIterator> {
322 let leaked_ref = self.inner_shared(py).leak_immutable();
377 let leaked_ref = self.inner_shared(py).leak_immutable();
323 DirstateMapItemsIterator::from_inner(
378 DirstateMapItemsIterator::from_inner(
324 py,
379 py,
325 unsafe { leaked_ref.map(py, |o| o.iter()) },
380 unsafe { leaked_ref.map(py, |o| o.iter()) },
326 )
381 )
327 }
382 }
328
383
329 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
384 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
330 let leaked_ref = self.inner_shared(py).leak_immutable();
385 let leaked_ref = self.inner_shared(py).leak_immutable();
331 DirstateMapKeysIterator::from_inner(
386 DirstateMapKeysIterator::from_inner(
332 py,
387 py,
333 unsafe { leaked_ref.map(py, |o| o.iter()) },
388 unsafe { leaked_ref.map(py, |o| o.iter()) },
334 )
389 )
335 }
390 }
336
391
337 def getdirs(&self) -> PyResult<Dirs> {
392 def getdirs(&self) -> PyResult<Dirs> {
338 // TODO don't copy, share the reference
393 // TODO don't copy, share the reference
339 self.inner_shared(py).borrow_mut()?.set_dirs()
394 self.inner_shared(py).borrow_mut()?.set_dirs()
340 .map_err(|e| {
395 .map_err(|e| {
341 PyErr::new::<exc::ValueError, _>(py, e.to_string())
396 PyErr::new::<exc::ValueError, _>(py, e.to_string())
342 })?;
397 })?;
343 Dirs::from_inner(
398 Dirs::from_inner(
344 py,
399 py,
345 DirsMultiset::from_dirstate(
400 DirsMultiset::from_dirstate(
346 &self.inner_shared(py).borrow(),
401 &self.inner_shared(py).borrow(),
347 Some(EntryState::Removed),
402 Some(EntryState::Removed),
348 )
403 )
349 .map_err(|e| {
404 .map_err(|e| {
350 PyErr::new::<exc::ValueError, _>(py, e.to_string())
405 PyErr::new::<exc::ValueError, _>(py, e.to_string())
351 })?,
406 })?,
352 )
407 )
353 }
408 }
354 def getalldirs(&self) -> PyResult<Dirs> {
409 def getalldirs(&self) -> PyResult<Dirs> {
355 // TODO don't copy, share the reference
410 // TODO don't copy, share the reference
356 self.inner_shared(py).borrow_mut()?.set_all_dirs()
411 self.inner_shared(py).borrow_mut()?.set_all_dirs()
357 .map_err(|e| {
412 .map_err(|e| {
358 PyErr::new::<exc::ValueError, _>(py, e.to_string())
413 PyErr::new::<exc::ValueError, _>(py, e.to_string())
359 })?;
414 })?;
360 Dirs::from_inner(
415 Dirs::from_inner(
361 py,
416 py,
362 DirsMultiset::from_dirstate(
417 DirsMultiset::from_dirstate(
363 &self.inner_shared(py).borrow(),
418 &self.inner_shared(py).borrow(),
364 None,
419 None,
365 ).map_err(|e| {
420 ).map_err(|e| {
366 PyErr::new::<exc::ValueError, _>(py, e.to_string())
421 PyErr::new::<exc::ValueError, _>(py, e.to_string())
367 })?,
422 })?,
368 )
423 )
369 }
424 }
370
425
371 // TODO all copymap* methods, see docstring above
426 // TODO all copymap* methods, see docstring above
372 def copymapcopy(&self) -> PyResult<PyDict> {
427 def copymapcopy(&self) -> PyResult<PyDict> {
373 let dict = PyDict::new(py);
428 let dict = PyDict::new(py);
374 for (key, value) in self.inner_shared(py).borrow().copy_map.iter() {
429 for (key, value) in self.inner_shared(py).borrow().copy_map.iter() {
375 dict.set_item(
430 dict.set_item(
376 py,
431 py,
377 PyBytes::new(py, key.as_ref()),
432 PyBytes::new(py, key.as_ref()),
378 PyBytes::new(py, value.as_ref()),
433 PyBytes::new(py, value.as_ref()),
379 )?;
434 )?;
380 }
435 }
381 Ok(dict)
436 Ok(dict)
382 }
437 }
383
438
384 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
439 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
385 let key = key.extract::<PyBytes>(py)?;
440 let key = key.extract::<PyBytes>(py)?;
386 match self.inner_shared(py).borrow().copy_map.get(HgPath::new(key.data(py))) {
441 match self.inner_shared(py).borrow().copy_map.get(HgPath::new(key.data(py))) {
387 Some(copy) => Ok(PyBytes::new(py, copy.as_ref())),
442 Some(copy) => Ok(PyBytes::new(py, copy.as_ref())),
388 None => Err(PyErr::new::<exc::KeyError, _>(
443 None => Err(PyErr::new::<exc::KeyError, _>(
389 py,
444 py,
390 String::from_utf8_lossy(key.data(py)),
445 String::from_utf8_lossy(key.data(py)),
391 )),
446 )),
392 }
447 }
393 }
448 }
394 def copymap(&self) -> PyResult<CopyMap> {
449 def copymap(&self) -> PyResult<CopyMap> {
395 CopyMap::from_inner(py, self.clone_ref(py))
450 CopyMap::from_inner(py, self.clone_ref(py))
396 }
451 }
397
452
398 def copymaplen(&self) -> PyResult<usize> {
453 def copymaplen(&self) -> PyResult<usize> {
399 Ok(self.inner_shared(py).borrow().copy_map.len())
454 Ok(self.inner_shared(py).borrow().copy_map.len())
400 }
455 }
401 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
456 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
402 let key = key.extract::<PyBytes>(py)?;
457 let key = key.extract::<PyBytes>(py)?;
403 Ok(self
458 Ok(self
404 .inner_shared(py)
459 .inner_shared(py)
405 .borrow()
460 .borrow()
406 .copy_map
461 .copy_map
407 .contains_key(HgPath::new(key.data(py))))
462 .contains_key(HgPath::new(key.data(py))))
408 }
463 }
409 def copymapget(
464 def copymapget(
410 &self,
465 &self,
411 key: PyObject,
466 key: PyObject,
412 default: Option<PyObject>
467 default: Option<PyObject>
413 ) -> PyResult<Option<PyObject>> {
468 ) -> PyResult<Option<PyObject>> {
414 let key = key.extract::<PyBytes>(py)?;
469 let key = key.extract::<PyBytes>(py)?;
415 match self
470 match self
416 .inner_shared(py)
471 .inner_shared(py)
417 .borrow()
472 .borrow()
418 .copy_map
473 .copy_map
419 .get(HgPath::new(key.data(py)))
474 .get(HgPath::new(key.data(py)))
420 {
475 {
421 Some(copy) => Ok(Some(
476 Some(copy) => Ok(Some(
422 PyBytes::new(py, copy.as_ref()).into_object(),
477 PyBytes::new(py, copy.as_ref()).into_object(),
423 )),
478 )),
424 None => Ok(default),
479 None => Ok(default),
425 }
480 }
426 }
481 }
427 def copymapsetitem(
482 def copymapsetitem(
428 &self,
483 &self,
429 key: PyObject,
484 key: PyObject,
430 value: PyObject
485 value: PyObject
431 ) -> PyResult<PyObject> {
486 ) -> PyResult<PyObject> {
432 let key = key.extract::<PyBytes>(py)?;
487 let key = key.extract::<PyBytes>(py)?;
433 let value = value.extract::<PyBytes>(py)?;
488 let value = value.extract::<PyBytes>(py)?;
434 self.inner_shared(py).borrow_mut()?.copy_map.insert(
489 self.inner_shared(py).borrow_mut()?.copy_map.insert(
435 HgPathBuf::from_bytes(key.data(py)),
490 HgPathBuf::from_bytes(key.data(py)),
436 HgPathBuf::from_bytes(value.data(py)),
491 HgPathBuf::from_bytes(value.data(py)),
437 );
492 );
438 Ok(py.None())
493 Ok(py.None())
439 }
494 }
440 def copymappop(
495 def copymappop(
441 &self,
496 &self,
442 key: PyObject,
497 key: PyObject,
443 default: Option<PyObject>
498 default: Option<PyObject>
444 ) -> PyResult<Option<PyObject>> {
499 ) -> PyResult<Option<PyObject>> {
445 let key = key.extract::<PyBytes>(py)?;
500 let key = key.extract::<PyBytes>(py)?;
446 match self
501 match self
447 .inner_shared(py)
502 .inner_shared(py)
448 .borrow_mut()?
503 .borrow_mut()?
449 .copy_map
504 .copy_map
450 .remove(HgPath::new(key.data(py)))
505 .remove(HgPath::new(key.data(py)))
451 {
506 {
452 Some(_) => Ok(None),
507 Some(_) => Ok(None),
453 None => Ok(default),
508 None => Ok(default),
454 }
509 }
455 }
510 }
456
511
457 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
512 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
458 let leaked_ref = self.inner_shared(py).leak_immutable();
513 let leaked_ref = self.inner_shared(py).leak_immutable();
459 CopyMapKeysIterator::from_inner(
514 CopyMapKeysIterator::from_inner(
460 py,
515 py,
461 unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
516 unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
462 )
517 )
463 }
518 }
464
519
465 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
520 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
466 let leaked_ref = self.inner_shared(py).leak_immutable();
521 let leaked_ref = self.inner_shared(py).leak_immutable();
467 CopyMapItemsIterator::from_inner(
522 CopyMapItemsIterator::from_inner(
468 py,
523 py,
469 unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
524 unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) },
470 )
525 )
471 }
526 }
472
527
473 });
528 });
474
529
475 impl DirstateMap {
530 impl DirstateMap {
476 pub fn get_inner<'a>(
531 pub fn get_inner<'a>(
477 &'a self,
532 &'a self,
478 py: Python<'a>,
533 py: Python<'a>,
479 ) -> Ref<'a, RustDirstateMap> {
534 ) -> Ref<'a, RustDirstateMap> {
480 self.inner_shared(py).borrow()
535 self.inner_shared(py).borrow()
481 }
536 }
482 fn translate_key(
537 fn translate_key(
483 py: Python,
538 py: Python,
484 res: (&HgPathBuf, &DirstateEntry),
539 res: (&HgPathBuf, &DirstateEntry),
485 ) -> PyResult<Option<PyBytes>> {
540 ) -> PyResult<Option<PyBytes>> {
486 Ok(Some(PyBytes::new(py, res.0.as_ref())))
541 Ok(Some(PyBytes::new(py, res.0.as_ref())))
487 }
542 }
488 fn translate_key_value(
543 fn translate_key_value(
489 py: Python,
544 py: Python,
490 res: (&HgPathBuf, &DirstateEntry),
545 res: (&HgPathBuf, &DirstateEntry),
491 ) -> PyResult<Option<(PyBytes, PyObject)>> {
546 ) -> PyResult<Option<(PyBytes, PyObject)>> {
492 let (f, entry) = res;
547 let (f, entry) = res;
493 Ok(Some((
548 Ok(Some((
494 PyBytes::new(py, f.as_ref()),
549 PyBytes::new(py, f.as_ref()),
495 make_dirstate_tuple(py, entry)?,
550 make_dirstate_tuple(py, entry)?,
496 )))
551 )))
497 }
552 }
498 }
553 }
499
554
500 py_shared_ref!(DirstateMap, RustDirstateMap, inner, inner_shared);
555 py_shared_ref!(DirstateMap, RustDirstateMap, inner, inner_shared);
501
556
502 py_shared_iterator!(
557 py_shared_iterator!(
503 DirstateMapKeysIterator,
558 DirstateMapKeysIterator,
504 PyLeaked<StateMapIter<'static>>,
559 PyLeaked<StateMapIter<'static>>,
505 DirstateMap::translate_key,
560 DirstateMap::translate_key,
506 Option<PyBytes>
561 Option<PyBytes>
507 );
562 );
508
563
509 py_shared_iterator!(
564 py_shared_iterator!(
510 DirstateMapItemsIterator,
565 DirstateMapItemsIterator,
511 PyLeaked<StateMapIter<'static>>,
566 PyLeaked<StateMapIter<'static>>,
512 DirstateMap::translate_key_value,
567 DirstateMap::translate_key_value,
513 Option<(PyBytes, PyObject)>
568 Option<(PyBytes, PyObject)>
514 );
569 );
515
570
516 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<[u8; PARENT_SIZE]> {
571 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<[u8; PARENT_SIZE]> {
517 let bytes = obj.extract::<PyBytes>(py)?;
572 let bytes = obj.extract::<PyBytes>(py)?;
518 match bytes.data(py).try_into() {
573 match bytes.data(py).try_into() {
519 Ok(s) => Ok(s),
574 Ok(s) => Ok(s),
520 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
575 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
521 }
576 }
522 }
577 }
General Comments 0
You need to be logged in to leave comments. Login now