##// END OF EJS Templates
dirstate: drop the deprecated `drop` method...
marmoute -
r48726:51cd60c0 default
parent child Browse files
Show More
@@ -1,72 +1,71
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
2 # dirstate's non-normal map
2 # dirstate's non-normal map
3 #
3 #
4 # For most operations on dirstate, this extensions checks that the nonnormalset
4 # For most operations on dirstate, this extensions checks that the nonnormalset
5 # contains the right entries.
5 # contains the right entries.
6 # It compares the nonnormal file to a nonnormalset built from the map of all
6 # It compares the nonnormal file to a nonnormalset built from the map of all
7 # the files in the dirstate to check that they contain the same files.
7 # the files in the dirstate to check that they contain the same files.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 from mercurial import (
11 from mercurial import (
12 dirstate,
12 dirstate,
13 extensions,
13 extensions,
14 pycompat,
14 pycompat,
15 )
15 )
16
16
17
17
18 def nonnormalentries(dmap):
18 def nonnormalentries(dmap):
19 """Compute nonnormal entries from dirstate's dmap"""
19 """Compute nonnormal entries from dirstate's dmap"""
20 res = set()
20 res = set()
21 for f, e in dmap.iteritems():
21 for f, e in dmap.iteritems():
22 if e.state != b'n' or e.mtime == -1:
22 if e.state != b'n' or e.mtime == -1:
23 res.add(f)
23 res.add(f)
24 return res
24 return res
25
25
26
26
27 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
27 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
28 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
28 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
29 nonnormalcomputedmap = nonnormalentries(dmap)
29 nonnormalcomputedmap = nonnormalentries(dmap)
30 if _nonnormalset != nonnormalcomputedmap:
30 if _nonnormalset != nonnormalcomputedmap:
31 b_orig = pycompat.sysbytes(repr(orig))
31 b_orig = pycompat.sysbytes(repr(orig))
32 ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate')
32 ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate')
33 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
33 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
34 b_nonnormal = pycompat.sysbytes(repr(_nonnormalset))
34 b_nonnormal = pycompat.sysbytes(repr(_nonnormalset))
35 ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate')
35 ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate')
36 b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap))
36 b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap))
37 ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate')
37 ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate')
38
38
39
39
40 def _checkdirstate(orig, self, *args, **kwargs):
40 def _checkdirstate(orig, self, *args, **kwargs):
41 """Check nonnormal set consistency before and after the call to orig"""
41 """Check nonnormal set consistency before and after the call to orig"""
42 checkconsistency(
42 checkconsistency(
43 self._ui, orig, self._map, self._map.nonnormalset, b"before"
43 self._ui, orig, self._map, self._map.nonnormalset, b"before"
44 )
44 )
45 r = orig(self, *args, **kwargs)
45 r = orig(self, *args, **kwargs)
46 checkconsistency(
46 checkconsistency(
47 self._ui, orig, self._map, self._map.nonnormalset, b"after"
47 self._ui, orig, self._map, self._map.nonnormalset, b"after"
48 )
48 )
49 return r
49 return r
50
50
51
51
52 def extsetup(ui):
52 def extsetup(ui):
53 """Wrap functions modifying dirstate to check nonnormalset consistency"""
53 """Wrap functions modifying dirstate to check nonnormalset consistency"""
54 dirstatecl = dirstate.dirstate
54 dirstatecl = dirstate.dirstate
55 devel = ui.configbool(b'devel', b'all-warnings')
55 devel = ui.configbool(b'devel', b'all-warnings')
56 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
56 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
57 if devel:
57 if devel:
58 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
58 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
59 if paranoid:
59 if paranoid:
60 # We don't do all these checks when paranoid is disable as it would
60 # We don't do all these checks when paranoid is disable as it would
61 # make the extension run very slowly on large repos
61 # make the extension run very slowly on large repos
62 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
62 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
63 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
64 extensions.wrapfunction(dirstatecl, 'set_tracked', _checkdirstate)
63 extensions.wrapfunction(dirstatecl, 'set_tracked', _checkdirstate)
65 extensions.wrapfunction(dirstatecl, 'set_untracked', _checkdirstate)
64 extensions.wrapfunction(dirstatecl, 'set_untracked', _checkdirstate)
66 extensions.wrapfunction(
65 extensions.wrapfunction(
67 dirstatecl, 'set_possibly_dirty', _checkdirstate
66 dirstatecl, 'set_possibly_dirty', _checkdirstate
68 )
67 )
69 extensions.wrapfunction(
68 extensions.wrapfunction(
70 dirstatecl, 'update_file_p1', _checkdirstate
69 dirstatecl, 'update_file_p1', _checkdirstate
71 )
70 )
72 extensions.wrapfunction(dirstatecl, 'update_file', _checkdirstate)
71 extensions.wrapfunction(dirstatecl, 'update_file', _checkdirstate)
@@ -1,1660 +1,1642
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self._normallookup(f)
406 self._normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self._normallookup(filename)
479 self._normallookup(filename)
480 return True
480 return True
481 # XXX This is probably overkill for more case, but we need this to
481 # XXX This is probably overkill for more case, but we need this to
482 # fully replace the `normallookup` call with `set_tracked` one.
482 # fully replace the `normallookup` call with `set_tracked` one.
483 # Consider smoothing this in the future.
483 # Consider smoothing this in the future.
484 self.set_possibly_dirty(filename)
484 self.set_possibly_dirty(filename)
485 return False
485 return False
486
486
487 @requires_no_parents_change
487 @requires_no_parents_change
488 def set_untracked(self, filename):
488 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
489 """a "public" method for generic code to mark a file as untracked
490
490
491 This function is to be called outside of "update/merge" case. For
491 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
492 example by a command like `hg remove X`.
493
493
494 return True the file was previously tracked, False otherwise.
494 return True the file was previously tracked, False otherwise.
495 """
495 """
496 entry = self._map.get(filename)
496 entry = self._map.get(filename)
497 if entry is None:
497 if entry is None:
498 return False
498 return False
499 elif entry.added:
499 elif entry.added:
500 self._drop(filename)
500 self._drop(filename)
501 return True
501 return True
502 else:
502 else:
503 self._dirty = True
503 self._dirty = True
504 self._updatedfiles.add(filename)
504 self._updatedfiles.add(filename)
505 self._map.set_untracked(filename)
505 self._map.set_untracked(filename)
506 return True
506 return True
507
507
508 @requires_no_parents_change
508 @requires_no_parents_change
509 def set_clean(self, filename, parentfiledata=None):
509 def set_clean(self, filename, parentfiledata=None):
510 """record that the current state of the file on disk is known to be clean"""
510 """record that the current state of the file on disk is known to be clean"""
511 self._dirty = True
511 self._dirty = True
512 self._updatedfiles.add(filename)
512 self._updatedfiles.add(filename)
513 self._normal(filename, parentfiledata=parentfiledata)
513 self._normal(filename, parentfiledata=parentfiledata)
514
514
515 @requires_no_parents_change
515 @requires_no_parents_change
516 def set_possibly_dirty(self, filename):
516 def set_possibly_dirty(self, filename):
517 """record that the current state of the file on disk is unknown"""
517 """record that the current state of the file on disk is unknown"""
518 self._dirty = True
518 self._dirty = True
519 self._updatedfiles.add(filename)
519 self._updatedfiles.add(filename)
520 self._map.set_possibly_dirty(filename)
520 self._map.set_possibly_dirty(filename)
521
521
522 @requires_parents_change
522 @requires_parents_change
523 def update_file_p1(
523 def update_file_p1(
524 self,
524 self,
525 filename,
525 filename,
526 p1_tracked,
526 p1_tracked,
527 ):
527 ):
528 """Set a file as tracked in the parent (or not)
528 """Set a file as tracked in the parent (or not)
529
529
530 This is to be called when adjust the dirstate to a new parent after an history
530 This is to be called when adjust the dirstate to a new parent after an history
531 rewriting operation.
531 rewriting operation.
532
532
533 It should not be called during a merge (p2 != nullid) and only within
533 It should not be called during a merge (p2 != nullid) and only within
534 a `with dirstate.parentchange():` context.
534 a `with dirstate.parentchange():` context.
535 """
535 """
536 if self.in_merge:
536 if self.in_merge:
537 msg = b'update_file_reference should not be called when merging'
537 msg = b'update_file_reference should not be called when merging'
538 raise error.ProgrammingError(msg)
538 raise error.ProgrammingError(msg)
539 entry = self._map.get(filename)
539 entry = self._map.get(filename)
540 if entry is None:
540 if entry is None:
541 wc_tracked = False
541 wc_tracked = False
542 else:
542 else:
543 wc_tracked = entry.tracked
543 wc_tracked = entry.tracked
544 possibly_dirty = False
544 possibly_dirty = False
545 if p1_tracked and wc_tracked:
545 if p1_tracked and wc_tracked:
546 # the underlying reference might have changed, we will have to
546 # the underlying reference might have changed, we will have to
547 # check it.
547 # check it.
548 possibly_dirty = True
548 possibly_dirty = True
549 elif not (p1_tracked or wc_tracked):
549 elif not (p1_tracked or wc_tracked):
550 # the file is no longer relevant to anyone
550 # the file is no longer relevant to anyone
551 self._drop(filename)
551 self._drop(filename)
552 elif (not p1_tracked) and wc_tracked:
552 elif (not p1_tracked) and wc_tracked:
553 if entry is not None and entry.added:
553 if entry is not None and entry.added:
554 return # avoid dropping copy information (maybe?)
554 return # avoid dropping copy information (maybe?)
555 elif p1_tracked and not wc_tracked:
555 elif p1_tracked and not wc_tracked:
556 pass
556 pass
557 else:
557 else:
558 assert False, 'unreachable'
558 assert False, 'unreachable'
559
559
560 # this mean we are doing call for file we do not really care about the
560 # this mean we are doing call for file we do not really care about the
561 # data (eg: added or removed), however this should be a minor overhead
561 # data (eg: added or removed), however this should be a minor overhead
562 # compared to the overall update process calling this.
562 # compared to the overall update process calling this.
563 parentfiledata = None
563 parentfiledata = None
564 if wc_tracked:
564 if wc_tracked:
565 parentfiledata = self._get_filedata(filename)
565 parentfiledata = self._get_filedata(filename)
566
566
567 self._updatedfiles.add(filename)
567 self._updatedfiles.add(filename)
568 self._map.reset_state(
568 self._map.reset_state(
569 filename,
569 filename,
570 wc_tracked,
570 wc_tracked,
571 p1_tracked,
571 p1_tracked,
572 possibly_dirty=possibly_dirty,
572 possibly_dirty=possibly_dirty,
573 parentfiledata=parentfiledata,
573 parentfiledata=parentfiledata,
574 )
574 )
575 if (
575 if (
576 parentfiledata is not None
576 parentfiledata is not None
577 and parentfiledata[2] > self._lastnormaltime
577 and parentfiledata[2] > self._lastnormaltime
578 ):
578 ):
579 # Remember the most recent modification timeslot for status(),
579 # Remember the most recent modification timeslot for status(),
580 # to make sure we won't miss future size-preserving file content
580 # to make sure we won't miss future size-preserving file content
581 # modifications that happen within the same timeslot.
581 # modifications that happen within the same timeslot.
582 self._lastnormaltime = parentfiledata[2]
582 self._lastnormaltime = parentfiledata[2]
583
583
584 @requires_parents_change
584 @requires_parents_change
585 def update_file(
585 def update_file(
586 self,
586 self,
587 filename,
587 filename,
588 wc_tracked,
588 wc_tracked,
589 p1_tracked,
589 p1_tracked,
590 p2_tracked=False,
590 p2_tracked=False,
591 merged=False,
591 merged=False,
592 clean_p1=False,
592 clean_p1=False,
593 clean_p2=False,
593 clean_p2=False,
594 possibly_dirty=False,
594 possibly_dirty=False,
595 parentfiledata=None,
595 parentfiledata=None,
596 ):
596 ):
597 """update the information about a file in the dirstate
597 """update the information about a file in the dirstate
598
598
599 This is to be called when the direstates parent changes to keep track
599 This is to be called when the direstates parent changes to keep track
600 of what is the file situation in regards to the working copy and its parent.
600 of what is the file situation in regards to the working copy and its parent.
601
601
602 This function must be called within a `dirstate.parentchange` context.
602 This function must be called within a `dirstate.parentchange` context.
603
603
604 note: the API is at an early stage and we might need to adjust it
604 note: the API is at an early stage and we might need to adjust it
605 depending of what information ends up being relevant and useful to
605 depending of what information ends up being relevant and useful to
606 other processing.
606 other processing.
607 """
607 """
608 if merged and (clean_p1 or clean_p2):
608 if merged and (clean_p1 or clean_p2):
609 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
609 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
610 raise error.ProgrammingError(msg)
610 raise error.ProgrammingError(msg)
611
611
612 # note: I do not think we need to double check name clash here since we
612 # note: I do not think we need to double check name clash here since we
613 # are in a update/merge case that should already have taken care of
613 # are in a update/merge case that should already have taken care of
614 # this. The test agrees
614 # this. The test agrees
615
615
616 self._dirty = True
616 self._dirty = True
617 self._updatedfiles.add(filename)
617 self._updatedfiles.add(filename)
618
618
619 need_parent_file_data = (
619 need_parent_file_data = (
620 not (possibly_dirty or clean_p2 or merged)
620 not (possibly_dirty or clean_p2 or merged)
621 and wc_tracked
621 and wc_tracked
622 and p1_tracked
622 and p1_tracked
623 )
623 )
624
624
625 # this mean we are doing call for file we do not really care about the
625 # this mean we are doing call for file we do not really care about the
626 # data (eg: added or removed), however this should be a minor overhead
626 # data (eg: added or removed), however this should be a minor overhead
627 # compared to the overall update process calling this.
627 # compared to the overall update process calling this.
628 if need_parent_file_data:
628 if need_parent_file_data:
629 if parentfiledata is None:
629 if parentfiledata is None:
630 parentfiledata = self._get_filedata(filename)
630 parentfiledata = self._get_filedata(filename)
631 mtime = parentfiledata[2]
631 mtime = parentfiledata[2]
632
632
633 if mtime > self._lastnormaltime:
633 if mtime > self._lastnormaltime:
634 # Remember the most recent modification timeslot for
634 # Remember the most recent modification timeslot for
635 # status(), to make sure we won't miss future
635 # status(), to make sure we won't miss future
636 # size-preserving file content modifications that happen
636 # size-preserving file content modifications that happen
637 # within the same timeslot.
637 # within the same timeslot.
638 self._lastnormaltime = mtime
638 self._lastnormaltime = mtime
639
639
640 self._map.reset_state(
640 self._map.reset_state(
641 filename,
641 filename,
642 wc_tracked,
642 wc_tracked,
643 p1_tracked,
643 p1_tracked,
644 p2_tracked=p2_tracked,
644 p2_tracked=p2_tracked,
645 merged=merged,
645 merged=merged,
646 clean_p1=clean_p1,
646 clean_p1=clean_p1,
647 clean_p2=clean_p2,
647 clean_p2=clean_p2,
648 possibly_dirty=possibly_dirty,
648 possibly_dirty=possibly_dirty,
649 parentfiledata=parentfiledata,
649 parentfiledata=parentfiledata,
650 )
650 )
651 if (
651 if (
652 parentfiledata is not None
652 parentfiledata is not None
653 and parentfiledata[2] > self._lastnormaltime
653 and parentfiledata[2] > self._lastnormaltime
654 ):
654 ):
655 # Remember the most recent modification timeslot for status(),
655 # Remember the most recent modification timeslot for status(),
656 # to make sure we won't miss future size-preserving file content
656 # to make sure we won't miss future size-preserving file content
657 # modifications that happen within the same timeslot.
657 # modifications that happen within the same timeslot.
658 self._lastnormaltime = parentfiledata[2]
658 self._lastnormaltime = parentfiledata[2]
659
659
660 def _addpath(
660 def _addpath(
661 self,
661 self,
662 f,
662 f,
663 mode=0,
663 mode=0,
664 size=None,
664 size=None,
665 mtime=None,
665 mtime=None,
666 added=False,
666 added=False,
667 merged=False,
667 merged=False,
668 from_p2=False,
668 from_p2=False,
669 possibly_dirty=False,
669 possibly_dirty=False,
670 ):
670 ):
671 entry = self._map.get(f)
671 entry = self._map.get(f)
672 if added or entry is not None and entry.removed:
672 if added or entry is not None and entry.removed:
673 scmutil.checkfilename(f)
673 scmutil.checkfilename(f)
674 if self._map.hastrackeddir(f):
674 if self._map.hastrackeddir(f):
675 msg = _(b'directory %r already in dirstate')
675 msg = _(b'directory %r already in dirstate')
676 msg %= pycompat.bytestr(f)
676 msg %= pycompat.bytestr(f)
677 raise error.Abort(msg)
677 raise error.Abort(msg)
678 # shadows
678 # shadows
679 for d in pathutil.finddirs(f):
679 for d in pathutil.finddirs(f):
680 if self._map.hastrackeddir(d):
680 if self._map.hastrackeddir(d):
681 break
681 break
682 entry = self._map.get(d)
682 entry = self._map.get(d)
683 if entry is not None and not entry.removed:
683 if entry is not None and not entry.removed:
684 msg = _(b'file %r in dirstate clashes with %r')
684 msg = _(b'file %r in dirstate clashes with %r')
685 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
685 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
686 raise error.Abort(msg)
686 raise error.Abort(msg)
687 self._dirty = True
687 self._dirty = True
688 self._updatedfiles.add(f)
688 self._updatedfiles.add(f)
689 self._map.addfile(
689 self._map.addfile(
690 f,
690 f,
691 mode=mode,
691 mode=mode,
692 size=size,
692 size=size,
693 mtime=mtime,
693 mtime=mtime,
694 added=added,
694 added=added,
695 merged=merged,
695 merged=merged,
696 from_p2=from_p2,
696 from_p2=from_p2,
697 possibly_dirty=possibly_dirty,
697 possibly_dirty=possibly_dirty,
698 )
698 )
699
699
700 def _get_filedata(self, filename):
700 def _get_filedata(self, filename):
701 """returns"""
701 """returns"""
702 s = os.lstat(self._join(filename))
702 s = os.lstat(self._join(filename))
703 mode = s.st_mode
703 mode = s.st_mode
704 size = s.st_size
704 size = s.st_size
705 mtime = s[stat.ST_MTIME]
705 mtime = s[stat.ST_MTIME]
706 return (mode, size, mtime)
706 return (mode, size, mtime)
707
707
708 def _normal(self, f, parentfiledata=None):
708 def _normal(self, f, parentfiledata=None):
709 if parentfiledata:
709 if parentfiledata:
710 (mode, size, mtime) = parentfiledata
710 (mode, size, mtime) = parentfiledata
711 else:
711 else:
712 (mode, size, mtime) = self._get_filedata(f)
712 (mode, size, mtime) = self._get_filedata(f)
713 self._addpath(f, mode=mode, size=size, mtime=mtime)
713 self._addpath(f, mode=mode, size=size, mtime=mtime)
714 self._map.copymap.pop(f, None)
714 self._map.copymap.pop(f, None)
715 if f in self._map.nonnormalset:
715 if f in self._map.nonnormalset:
716 self._map.nonnormalset.remove(f)
716 self._map.nonnormalset.remove(f)
717 if mtime > self._lastnormaltime:
717 if mtime > self._lastnormaltime:
718 # Remember the most recent modification timeslot for status(),
718 # Remember the most recent modification timeslot for status(),
719 # to make sure we won't miss future size-preserving file content
719 # to make sure we won't miss future size-preserving file content
720 # modifications that happen within the same timeslot.
720 # modifications that happen within the same timeslot.
721 self._lastnormaltime = mtime
721 self._lastnormaltime = mtime
722
722
723 def _normallookup(self, f):
723 def _normallookup(self, f):
724 '''Mark a file normal, but possibly dirty.'''
724 '''Mark a file normal, but possibly dirty.'''
725 if self.in_merge:
725 if self.in_merge:
726 # if there is a merge going on and the file was either
726 # if there is a merge going on and the file was either
727 # "merged" or coming from other parent (-2) before
727 # "merged" or coming from other parent (-2) before
728 # being removed, restore that state.
728 # being removed, restore that state.
729 entry = self._map.get(f)
729 entry = self._map.get(f)
730 if entry is not None:
730 if entry is not None:
731 # XXX this should probably be dealt with a a lower level
731 # XXX this should probably be dealt with a a lower level
732 # (see `merged_removed` and `from_p2_removed`)
732 # (see `merged_removed` and `from_p2_removed`)
733 if entry.merged_removed or entry.from_p2_removed:
733 if entry.merged_removed or entry.from_p2_removed:
734 source = self._map.copymap.get(f)
734 source = self._map.copymap.get(f)
735 if entry.merged_removed:
735 if entry.merged_removed:
736 self._merge(f)
736 self._merge(f)
737 elif entry.from_p2_removed:
737 elif entry.from_p2_removed:
738 self._otherparent(f)
738 self._otherparent(f)
739 if source is not None:
739 if source is not None:
740 self.copy(source, f)
740 self.copy(source, f)
741 return
741 return
742 elif entry.merged or entry.from_p2:
742 elif entry.merged or entry.from_p2:
743 return
743 return
744 self._addpath(f, possibly_dirty=True)
744 self._addpath(f, possibly_dirty=True)
745 self._map.copymap.pop(f, None)
745 self._map.copymap.pop(f, None)
746
746
747 def _otherparent(self, f):
747 def _otherparent(self, f):
748 if not self.in_merge:
748 if not self.in_merge:
749 msg = _(b"setting %r to other parent only allowed in merges") % f
749 msg = _(b"setting %r to other parent only allowed in merges") % f
750 raise error.Abort(msg)
750 raise error.Abort(msg)
751 entry = self._map.get(f)
751 entry = self._map.get(f)
752 if entry is not None and entry.tracked:
752 if entry is not None and entry.tracked:
753 # merge-like
753 # merge-like
754 self._addpath(f, merged=True)
754 self._addpath(f, merged=True)
755 else:
755 else:
756 # add-like
756 # add-like
757 self._addpath(f, from_p2=True)
757 self._addpath(f, from_p2=True)
758 self._map.copymap.pop(f, None)
758 self._map.copymap.pop(f, None)
759
759
760 def _add(self, filename):
760 def _add(self, filename):
761 """internal function to mark a file as added"""
761 """internal function to mark a file as added"""
762 self._addpath(filename, added=True)
762 self._addpath(filename, added=True)
763 self._map.copymap.pop(filename, None)
763 self._map.copymap.pop(filename, None)
764
764
765 def _merge(self, f):
765 def _merge(self, f):
766 if not self.in_merge:
766 if not self.in_merge:
767 return self._normallookup(f)
767 return self._normallookup(f)
768 return self._otherparent(f)
768 return self._otherparent(f)
769
769
770 def drop(self, f):
771 '''Drop a file from the dirstate'''
772 if self.pendingparentchange():
773 util.nouideprecwarn(
774 b"do not use `drop` inside of update/merge context."
775 b" Use `update_file`",
776 b'6.0',
777 stacklevel=2,
778 )
779 else:
780 util.nouideprecwarn(
781 b"do not use `drop` outside of update/merge context."
782 b" Use `set_untracked`",
783 b'6.0',
784 stacklevel=2,
785 )
786 self._drop(f)
787
788 def _drop(self, filename):
770 def _drop(self, filename):
789 """internal function to drop a file from the dirstate"""
771 """internal function to drop a file from the dirstate"""
790 if self._map.dropfile(filename):
772 if self._map.dropfile(filename):
791 self._dirty = True
773 self._dirty = True
792 self._updatedfiles.add(filename)
774 self._updatedfiles.add(filename)
793 self._map.copymap.pop(filename, None)
775 self._map.copymap.pop(filename, None)
794
776
795 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
777 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
796 if exists is None:
778 if exists is None:
797 exists = os.path.lexists(os.path.join(self._root, path))
779 exists = os.path.lexists(os.path.join(self._root, path))
798 if not exists:
780 if not exists:
799 # Maybe a path component exists
781 # Maybe a path component exists
800 if not ignoremissing and b'/' in path:
782 if not ignoremissing and b'/' in path:
801 d, f = path.rsplit(b'/', 1)
783 d, f = path.rsplit(b'/', 1)
802 d = self._normalize(d, False, ignoremissing, None)
784 d = self._normalize(d, False, ignoremissing, None)
803 folded = d + b"/" + f
785 folded = d + b"/" + f
804 else:
786 else:
805 # No path components, preserve original case
787 # No path components, preserve original case
806 folded = path
788 folded = path
807 else:
789 else:
808 # recursively normalize leading directory components
790 # recursively normalize leading directory components
809 # against dirstate
791 # against dirstate
810 if b'/' in normed:
792 if b'/' in normed:
811 d, f = normed.rsplit(b'/', 1)
793 d, f = normed.rsplit(b'/', 1)
812 d = self._normalize(d, False, ignoremissing, True)
794 d = self._normalize(d, False, ignoremissing, True)
813 r = self._root + b"/" + d
795 r = self._root + b"/" + d
814 folded = d + b"/" + util.fspath(f, r)
796 folded = d + b"/" + util.fspath(f, r)
815 else:
797 else:
816 folded = util.fspath(normed, self._root)
798 folded = util.fspath(normed, self._root)
817 storemap[normed] = folded
799 storemap[normed] = folded
818
800
819 return folded
801 return folded
820
802
821 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
803 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
822 normed = util.normcase(path)
804 normed = util.normcase(path)
823 folded = self._map.filefoldmap.get(normed, None)
805 folded = self._map.filefoldmap.get(normed, None)
824 if folded is None:
806 if folded is None:
825 if isknown:
807 if isknown:
826 folded = path
808 folded = path
827 else:
809 else:
828 folded = self._discoverpath(
810 folded = self._discoverpath(
829 path, normed, ignoremissing, exists, self._map.filefoldmap
811 path, normed, ignoremissing, exists, self._map.filefoldmap
830 )
812 )
831 return folded
813 return folded
832
814
833 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
815 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
834 normed = util.normcase(path)
816 normed = util.normcase(path)
835 folded = self._map.filefoldmap.get(normed, None)
817 folded = self._map.filefoldmap.get(normed, None)
836 if folded is None:
818 if folded is None:
837 folded = self._map.dirfoldmap.get(normed, None)
819 folded = self._map.dirfoldmap.get(normed, None)
838 if folded is None:
820 if folded is None:
839 if isknown:
821 if isknown:
840 folded = path
822 folded = path
841 else:
823 else:
842 # store discovered result in dirfoldmap so that future
824 # store discovered result in dirfoldmap so that future
843 # normalizefile calls don't start matching directories
825 # normalizefile calls don't start matching directories
844 folded = self._discoverpath(
826 folded = self._discoverpath(
845 path, normed, ignoremissing, exists, self._map.dirfoldmap
827 path, normed, ignoremissing, exists, self._map.dirfoldmap
846 )
828 )
847 return folded
829 return folded
848
830
849 def normalize(self, path, isknown=False, ignoremissing=False):
831 def normalize(self, path, isknown=False, ignoremissing=False):
850 """
832 """
851 normalize the case of a pathname when on a casefolding filesystem
833 normalize the case of a pathname when on a casefolding filesystem
852
834
853 isknown specifies whether the filename came from walking the
835 isknown specifies whether the filename came from walking the
854 disk, to avoid extra filesystem access.
836 disk, to avoid extra filesystem access.
855
837
856 If ignoremissing is True, missing path are returned
838 If ignoremissing is True, missing path are returned
857 unchanged. Otherwise, we try harder to normalize possibly
839 unchanged. Otherwise, we try harder to normalize possibly
858 existing path components.
840 existing path components.
859
841
860 The normalized case is determined based on the following precedence:
842 The normalized case is determined based on the following precedence:
861
843
862 - version of name already stored in the dirstate
844 - version of name already stored in the dirstate
863 - version of name stored on disk
845 - version of name stored on disk
864 - version provided via command arguments
846 - version provided via command arguments
865 """
847 """
866
848
867 if self._checkcase:
849 if self._checkcase:
868 return self._normalize(path, isknown, ignoremissing)
850 return self._normalize(path, isknown, ignoremissing)
869 return path
851 return path
870
852
871 def clear(self):
853 def clear(self):
872 self._map.clear()
854 self._map.clear()
873 self._lastnormaltime = 0
855 self._lastnormaltime = 0
874 self._updatedfiles.clear()
856 self._updatedfiles.clear()
875 self._dirty = True
857 self._dirty = True
876
858
877 def rebuild(self, parent, allfiles, changedfiles=None):
859 def rebuild(self, parent, allfiles, changedfiles=None):
878 if changedfiles is None:
860 if changedfiles is None:
879 # Rebuild entire dirstate
861 # Rebuild entire dirstate
880 to_lookup = allfiles
862 to_lookup = allfiles
881 to_drop = []
863 to_drop = []
882 lastnormaltime = self._lastnormaltime
864 lastnormaltime = self._lastnormaltime
883 self.clear()
865 self.clear()
884 self._lastnormaltime = lastnormaltime
866 self._lastnormaltime = lastnormaltime
885 elif len(changedfiles) < 10:
867 elif len(changedfiles) < 10:
886 # Avoid turning allfiles into a set, which can be expensive if it's
868 # Avoid turning allfiles into a set, which can be expensive if it's
887 # large.
869 # large.
888 to_lookup = []
870 to_lookup = []
889 to_drop = []
871 to_drop = []
890 for f in changedfiles:
872 for f in changedfiles:
891 if f in allfiles:
873 if f in allfiles:
892 to_lookup.append(f)
874 to_lookup.append(f)
893 else:
875 else:
894 to_drop.append(f)
876 to_drop.append(f)
895 else:
877 else:
896 changedfilesset = set(changedfiles)
878 changedfilesset = set(changedfiles)
897 to_lookup = changedfilesset & set(allfiles)
879 to_lookup = changedfilesset & set(allfiles)
898 to_drop = changedfilesset - to_lookup
880 to_drop = changedfilesset - to_lookup
899
881
900 if self._origpl is None:
882 if self._origpl is None:
901 self._origpl = self._pl
883 self._origpl = self._pl
902 self._map.setparents(parent, self._nodeconstants.nullid)
884 self._map.setparents(parent, self._nodeconstants.nullid)
903
885
904 for f in to_lookup:
886 for f in to_lookup:
905 self._normallookup(f)
887 self._normallookup(f)
906 for f in to_drop:
888 for f in to_drop:
907 self._drop(f)
889 self._drop(f)
908
890
909 self._dirty = True
891 self._dirty = True
910
892
911 def identity(self):
893 def identity(self):
912 """Return identity of dirstate itself to detect changing in storage
894 """Return identity of dirstate itself to detect changing in storage
913
895
914 If identity of previous dirstate is equal to this, writing
896 If identity of previous dirstate is equal to this, writing
915 changes based on the former dirstate out can keep consistency.
897 changes based on the former dirstate out can keep consistency.
916 """
898 """
917 return self._map.identity
899 return self._map.identity
918
900
919 def write(self, tr):
901 def write(self, tr):
920 if not self._dirty:
902 if not self._dirty:
921 return
903 return
922
904
923 filename = self._filename
905 filename = self._filename
924 if tr:
906 if tr:
925 # 'dirstate.write()' is not only for writing in-memory
907 # 'dirstate.write()' is not only for writing in-memory
926 # changes out, but also for dropping ambiguous timestamp.
908 # changes out, but also for dropping ambiguous timestamp.
927 # delayed writing re-raise "ambiguous timestamp issue".
909 # delayed writing re-raise "ambiguous timestamp issue".
928 # See also the wiki page below for detail:
910 # See also the wiki page below for detail:
929 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
911 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
930
912
931 # emulate dropping timestamp in 'parsers.pack_dirstate'
913 # emulate dropping timestamp in 'parsers.pack_dirstate'
932 now = _getfsnow(self._opener)
914 now = _getfsnow(self._opener)
933 self._map.clearambiguoustimes(self._updatedfiles, now)
915 self._map.clearambiguoustimes(self._updatedfiles, now)
934
916
935 # emulate that all 'dirstate.normal' results are written out
917 # emulate that all 'dirstate.normal' results are written out
936 self._lastnormaltime = 0
918 self._lastnormaltime = 0
937 self._updatedfiles.clear()
919 self._updatedfiles.clear()
938
920
939 # delay writing in-memory changes out
921 # delay writing in-memory changes out
940 tr.addfilegenerator(
922 tr.addfilegenerator(
941 b'dirstate',
923 b'dirstate',
942 (self._filename,),
924 (self._filename,),
943 lambda f: self._writedirstate(tr, f),
925 lambda f: self._writedirstate(tr, f),
944 location=b'plain',
926 location=b'plain',
945 )
927 )
946 return
928 return
947
929
948 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
930 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
949 self._writedirstate(tr, st)
931 self._writedirstate(tr, st)
950
932
951 def addparentchangecallback(self, category, callback):
933 def addparentchangecallback(self, category, callback):
952 """add a callback to be called when the wd parents are changed
934 """add a callback to be called when the wd parents are changed
953
935
954 Callback will be called with the following arguments:
936 Callback will be called with the following arguments:
955 dirstate, (oldp1, oldp2), (newp1, newp2)
937 dirstate, (oldp1, oldp2), (newp1, newp2)
956
938
957 Category is a unique identifier to allow overwriting an old callback
939 Category is a unique identifier to allow overwriting an old callback
958 with a newer callback.
940 with a newer callback.
959 """
941 """
960 self._plchangecallbacks[category] = callback
942 self._plchangecallbacks[category] = callback
961
943
962 def _writedirstate(self, tr, st):
944 def _writedirstate(self, tr, st):
963 # notify callbacks about parents change
945 # notify callbacks about parents change
964 if self._origpl is not None and self._origpl != self._pl:
946 if self._origpl is not None and self._origpl != self._pl:
965 for c, callback in sorted(
947 for c, callback in sorted(
966 pycompat.iteritems(self._plchangecallbacks)
948 pycompat.iteritems(self._plchangecallbacks)
967 ):
949 ):
968 callback(self, self._origpl, self._pl)
950 callback(self, self._origpl, self._pl)
969 self._origpl = None
951 self._origpl = None
970 # use the modification time of the newly created temporary file as the
952 # use the modification time of the newly created temporary file as the
971 # filesystem's notion of 'now'
953 # filesystem's notion of 'now'
972 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
954 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
973
955
974 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
956 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
975 # timestamp of each entries in dirstate, because of 'now > mtime'
957 # timestamp of each entries in dirstate, because of 'now > mtime'
976 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
958 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
977 if delaywrite > 0:
959 if delaywrite > 0:
978 # do we have any files to delay for?
960 # do we have any files to delay for?
979 for f, e in pycompat.iteritems(self._map):
961 for f, e in pycompat.iteritems(self._map):
980 if e.need_delay(now):
962 if e.need_delay(now):
981 import time # to avoid useless import
963 import time # to avoid useless import
982
964
983 # rather than sleep n seconds, sleep until the next
965 # rather than sleep n seconds, sleep until the next
984 # multiple of n seconds
966 # multiple of n seconds
985 clock = time.time()
967 clock = time.time()
986 start = int(clock) - (int(clock) % delaywrite)
968 start = int(clock) - (int(clock) % delaywrite)
987 end = start + delaywrite
969 end = start + delaywrite
988 time.sleep(end - clock)
970 time.sleep(end - clock)
989 now = end # trust our estimate that the end is near now
971 now = end # trust our estimate that the end is near now
990 break
972 break
991
973
992 self._map.write(tr, st, now)
974 self._map.write(tr, st, now)
993 self._lastnormaltime = 0
975 self._lastnormaltime = 0
994 self._dirty = False
976 self._dirty = False
995
977
996 def _dirignore(self, f):
978 def _dirignore(self, f):
997 if self._ignore(f):
979 if self._ignore(f):
998 return True
980 return True
999 for p in pathutil.finddirs(f):
981 for p in pathutil.finddirs(f):
1000 if self._ignore(p):
982 if self._ignore(p):
1001 return True
983 return True
1002 return False
984 return False
1003
985
1004 def _ignorefiles(self):
986 def _ignorefiles(self):
1005 files = []
987 files = []
1006 if os.path.exists(self._join(b'.hgignore')):
988 if os.path.exists(self._join(b'.hgignore')):
1007 files.append(self._join(b'.hgignore'))
989 files.append(self._join(b'.hgignore'))
1008 for name, path in self._ui.configitems(b"ui"):
990 for name, path in self._ui.configitems(b"ui"):
1009 if name == b'ignore' or name.startswith(b'ignore.'):
991 if name == b'ignore' or name.startswith(b'ignore.'):
1010 # we need to use os.path.join here rather than self._join
992 # we need to use os.path.join here rather than self._join
1011 # because path is arbitrary and user-specified
993 # because path is arbitrary and user-specified
1012 files.append(os.path.join(self._rootdir, util.expandpath(path)))
994 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1013 return files
995 return files
1014
996
1015 def _ignorefileandline(self, f):
997 def _ignorefileandline(self, f):
1016 files = collections.deque(self._ignorefiles())
998 files = collections.deque(self._ignorefiles())
1017 visited = set()
999 visited = set()
1018 while files:
1000 while files:
1019 i = files.popleft()
1001 i = files.popleft()
1020 patterns = matchmod.readpatternfile(
1002 patterns = matchmod.readpatternfile(
1021 i, self._ui.warn, sourceinfo=True
1003 i, self._ui.warn, sourceinfo=True
1022 )
1004 )
1023 for pattern, lineno, line in patterns:
1005 for pattern, lineno, line in patterns:
1024 kind, p = matchmod._patsplit(pattern, b'glob')
1006 kind, p = matchmod._patsplit(pattern, b'glob')
1025 if kind == b"subinclude":
1007 if kind == b"subinclude":
1026 if p not in visited:
1008 if p not in visited:
1027 files.append(p)
1009 files.append(p)
1028 continue
1010 continue
1029 m = matchmod.match(
1011 m = matchmod.match(
1030 self._root, b'', [], [pattern], warn=self._ui.warn
1012 self._root, b'', [], [pattern], warn=self._ui.warn
1031 )
1013 )
1032 if m(f):
1014 if m(f):
1033 return (i, lineno, line)
1015 return (i, lineno, line)
1034 visited.add(i)
1016 visited.add(i)
1035 return (None, -1, b"")
1017 return (None, -1, b"")
1036
1018
1037 def _walkexplicit(self, match, subrepos):
1019 def _walkexplicit(self, match, subrepos):
1038 """Get stat data about the files explicitly specified by match.
1020 """Get stat data about the files explicitly specified by match.
1039
1021
1040 Return a triple (results, dirsfound, dirsnotfound).
1022 Return a triple (results, dirsfound, dirsnotfound).
1041 - results is a mapping from filename to stat result. It also contains
1023 - results is a mapping from filename to stat result. It also contains
1042 listings mapping subrepos and .hg to None.
1024 listings mapping subrepos and .hg to None.
1043 - dirsfound is a list of files found to be directories.
1025 - dirsfound is a list of files found to be directories.
1044 - dirsnotfound is a list of files that the dirstate thinks are
1026 - dirsnotfound is a list of files that the dirstate thinks are
1045 directories and that were not found."""
1027 directories and that were not found."""
1046
1028
1047 def badtype(mode):
1029 def badtype(mode):
1048 kind = _(b'unknown')
1030 kind = _(b'unknown')
1049 if stat.S_ISCHR(mode):
1031 if stat.S_ISCHR(mode):
1050 kind = _(b'character device')
1032 kind = _(b'character device')
1051 elif stat.S_ISBLK(mode):
1033 elif stat.S_ISBLK(mode):
1052 kind = _(b'block device')
1034 kind = _(b'block device')
1053 elif stat.S_ISFIFO(mode):
1035 elif stat.S_ISFIFO(mode):
1054 kind = _(b'fifo')
1036 kind = _(b'fifo')
1055 elif stat.S_ISSOCK(mode):
1037 elif stat.S_ISSOCK(mode):
1056 kind = _(b'socket')
1038 kind = _(b'socket')
1057 elif stat.S_ISDIR(mode):
1039 elif stat.S_ISDIR(mode):
1058 kind = _(b'directory')
1040 kind = _(b'directory')
1059 return _(b'unsupported file type (type is %s)') % kind
1041 return _(b'unsupported file type (type is %s)') % kind
1060
1042
1061 badfn = match.bad
1043 badfn = match.bad
1062 dmap = self._map
1044 dmap = self._map
1063 lstat = os.lstat
1045 lstat = os.lstat
1064 getkind = stat.S_IFMT
1046 getkind = stat.S_IFMT
1065 dirkind = stat.S_IFDIR
1047 dirkind = stat.S_IFDIR
1066 regkind = stat.S_IFREG
1048 regkind = stat.S_IFREG
1067 lnkkind = stat.S_IFLNK
1049 lnkkind = stat.S_IFLNK
1068 join = self._join
1050 join = self._join
1069 dirsfound = []
1051 dirsfound = []
1070 foundadd = dirsfound.append
1052 foundadd = dirsfound.append
1071 dirsnotfound = []
1053 dirsnotfound = []
1072 notfoundadd = dirsnotfound.append
1054 notfoundadd = dirsnotfound.append
1073
1055
1074 if not match.isexact() and self._checkcase:
1056 if not match.isexact() and self._checkcase:
1075 normalize = self._normalize
1057 normalize = self._normalize
1076 else:
1058 else:
1077 normalize = None
1059 normalize = None
1078
1060
1079 files = sorted(match.files())
1061 files = sorted(match.files())
1080 subrepos.sort()
1062 subrepos.sort()
1081 i, j = 0, 0
1063 i, j = 0, 0
1082 while i < len(files) and j < len(subrepos):
1064 while i < len(files) and j < len(subrepos):
1083 subpath = subrepos[j] + b"/"
1065 subpath = subrepos[j] + b"/"
1084 if files[i] < subpath:
1066 if files[i] < subpath:
1085 i += 1
1067 i += 1
1086 continue
1068 continue
1087 while i < len(files) and files[i].startswith(subpath):
1069 while i < len(files) and files[i].startswith(subpath):
1088 del files[i]
1070 del files[i]
1089 j += 1
1071 j += 1
1090
1072
1091 if not files or b'' in files:
1073 if not files or b'' in files:
1092 files = [b'']
1074 files = [b'']
1093 # constructing the foldmap is expensive, so don't do it for the
1075 # constructing the foldmap is expensive, so don't do it for the
1094 # common case where files is ['']
1076 # common case where files is ['']
1095 normalize = None
1077 normalize = None
1096 results = dict.fromkeys(subrepos)
1078 results = dict.fromkeys(subrepos)
1097 results[b'.hg'] = None
1079 results[b'.hg'] = None
1098
1080
1099 for ff in files:
1081 for ff in files:
1100 if normalize:
1082 if normalize:
1101 nf = normalize(ff, False, True)
1083 nf = normalize(ff, False, True)
1102 else:
1084 else:
1103 nf = ff
1085 nf = ff
1104 if nf in results:
1086 if nf in results:
1105 continue
1087 continue
1106
1088
1107 try:
1089 try:
1108 st = lstat(join(nf))
1090 st = lstat(join(nf))
1109 kind = getkind(st.st_mode)
1091 kind = getkind(st.st_mode)
1110 if kind == dirkind:
1092 if kind == dirkind:
1111 if nf in dmap:
1093 if nf in dmap:
1112 # file replaced by dir on disk but still in dirstate
1094 # file replaced by dir on disk but still in dirstate
1113 results[nf] = None
1095 results[nf] = None
1114 foundadd((nf, ff))
1096 foundadd((nf, ff))
1115 elif kind == regkind or kind == lnkkind:
1097 elif kind == regkind or kind == lnkkind:
1116 results[nf] = st
1098 results[nf] = st
1117 else:
1099 else:
1118 badfn(ff, badtype(kind))
1100 badfn(ff, badtype(kind))
1119 if nf in dmap:
1101 if nf in dmap:
1120 results[nf] = None
1102 results[nf] = None
1121 except OSError as inst: # nf not found on disk - it is dirstate only
1103 except OSError as inst: # nf not found on disk - it is dirstate only
1122 if nf in dmap: # does it exactly match a missing file?
1104 if nf in dmap: # does it exactly match a missing file?
1123 results[nf] = None
1105 results[nf] = None
1124 else: # does it match a missing directory?
1106 else: # does it match a missing directory?
1125 if self._map.hasdir(nf):
1107 if self._map.hasdir(nf):
1126 notfoundadd(nf)
1108 notfoundadd(nf)
1127 else:
1109 else:
1128 badfn(ff, encoding.strtolocal(inst.strerror))
1110 badfn(ff, encoding.strtolocal(inst.strerror))
1129
1111
1130 # match.files() may contain explicitly-specified paths that shouldn't
1112 # match.files() may contain explicitly-specified paths that shouldn't
1131 # be taken; drop them from the list of files found. dirsfound/notfound
1113 # be taken; drop them from the list of files found. dirsfound/notfound
1132 # aren't filtered here because they will be tested later.
1114 # aren't filtered here because they will be tested later.
1133 if match.anypats():
1115 if match.anypats():
1134 for f in list(results):
1116 for f in list(results):
1135 if f == b'.hg' or f in subrepos:
1117 if f == b'.hg' or f in subrepos:
1136 # keep sentinel to disable further out-of-repo walks
1118 # keep sentinel to disable further out-of-repo walks
1137 continue
1119 continue
1138 if not match(f):
1120 if not match(f):
1139 del results[f]
1121 del results[f]
1140
1122
1141 # Case insensitive filesystems cannot rely on lstat() failing to detect
1123 # Case insensitive filesystems cannot rely on lstat() failing to detect
1142 # a case-only rename. Prune the stat object for any file that does not
1124 # a case-only rename. Prune the stat object for any file that does not
1143 # match the case in the filesystem, if there are multiple files that
1125 # match the case in the filesystem, if there are multiple files that
1144 # normalize to the same path.
1126 # normalize to the same path.
1145 if match.isexact() and self._checkcase:
1127 if match.isexact() and self._checkcase:
1146 normed = {}
1128 normed = {}
1147
1129
1148 for f, st in pycompat.iteritems(results):
1130 for f, st in pycompat.iteritems(results):
1149 if st is None:
1131 if st is None:
1150 continue
1132 continue
1151
1133
1152 nc = util.normcase(f)
1134 nc = util.normcase(f)
1153 paths = normed.get(nc)
1135 paths = normed.get(nc)
1154
1136
1155 if paths is None:
1137 if paths is None:
1156 paths = set()
1138 paths = set()
1157 normed[nc] = paths
1139 normed[nc] = paths
1158
1140
1159 paths.add(f)
1141 paths.add(f)
1160
1142
1161 for norm, paths in pycompat.iteritems(normed):
1143 for norm, paths in pycompat.iteritems(normed):
1162 if len(paths) > 1:
1144 if len(paths) > 1:
1163 for path in paths:
1145 for path in paths:
1164 folded = self._discoverpath(
1146 folded = self._discoverpath(
1165 path, norm, True, None, self._map.dirfoldmap
1147 path, norm, True, None, self._map.dirfoldmap
1166 )
1148 )
1167 if path != folded:
1149 if path != folded:
1168 results[path] = None
1150 results[path] = None
1169
1151
1170 return results, dirsfound, dirsnotfound
1152 return results, dirsfound, dirsnotfound
1171
1153
1172 def walk(self, match, subrepos, unknown, ignored, full=True):
1154 def walk(self, match, subrepos, unknown, ignored, full=True):
1173 """
1155 """
1174 Walk recursively through the directory tree, finding all files
1156 Walk recursively through the directory tree, finding all files
1175 matched by match.
1157 matched by match.
1176
1158
1177 If full is False, maybe skip some known-clean files.
1159 If full is False, maybe skip some known-clean files.
1178
1160
1179 Return a dict mapping filename to stat-like object (either
1161 Return a dict mapping filename to stat-like object (either
1180 mercurial.osutil.stat instance or return value of os.stat()).
1162 mercurial.osutil.stat instance or return value of os.stat()).
1181
1163
1182 """
1164 """
1183 # full is a flag that extensions that hook into walk can use -- this
1165 # full is a flag that extensions that hook into walk can use -- this
1184 # implementation doesn't use it at all. This satisfies the contract
1166 # implementation doesn't use it at all. This satisfies the contract
1185 # because we only guarantee a "maybe".
1167 # because we only guarantee a "maybe".
1186
1168
1187 if ignored:
1169 if ignored:
1188 ignore = util.never
1170 ignore = util.never
1189 dirignore = util.never
1171 dirignore = util.never
1190 elif unknown:
1172 elif unknown:
1191 ignore = self._ignore
1173 ignore = self._ignore
1192 dirignore = self._dirignore
1174 dirignore = self._dirignore
1193 else:
1175 else:
1194 # if not unknown and not ignored, drop dir recursion and step 2
1176 # if not unknown and not ignored, drop dir recursion and step 2
1195 ignore = util.always
1177 ignore = util.always
1196 dirignore = util.always
1178 dirignore = util.always
1197
1179
1198 matchfn = match.matchfn
1180 matchfn = match.matchfn
1199 matchalways = match.always()
1181 matchalways = match.always()
1200 matchtdir = match.traversedir
1182 matchtdir = match.traversedir
1201 dmap = self._map
1183 dmap = self._map
1202 listdir = util.listdir
1184 listdir = util.listdir
1203 lstat = os.lstat
1185 lstat = os.lstat
1204 dirkind = stat.S_IFDIR
1186 dirkind = stat.S_IFDIR
1205 regkind = stat.S_IFREG
1187 regkind = stat.S_IFREG
1206 lnkkind = stat.S_IFLNK
1188 lnkkind = stat.S_IFLNK
1207 join = self._join
1189 join = self._join
1208
1190
1209 exact = skipstep3 = False
1191 exact = skipstep3 = False
1210 if match.isexact(): # match.exact
1192 if match.isexact(): # match.exact
1211 exact = True
1193 exact = True
1212 dirignore = util.always # skip step 2
1194 dirignore = util.always # skip step 2
1213 elif match.prefix(): # match.match, no patterns
1195 elif match.prefix(): # match.match, no patterns
1214 skipstep3 = True
1196 skipstep3 = True
1215
1197
1216 if not exact and self._checkcase:
1198 if not exact and self._checkcase:
1217 normalize = self._normalize
1199 normalize = self._normalize
1218 normalizefile = self._normalizefile
1200 normalizefile = self._normalizefile
1219 skipstep3 = False
1201 skipstep3 = False
1220 else:
1202 else:
1221 normalize = self._normalize
1203 normalize = self._normalize
1222 normalizefile = None
1204 normalizefile = None
1223
1205
1224 # step 1: find all explicit files
1206 # step 1: find all explicit files
1225 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1207 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1226 if matchtdir:
1208 if matchtdir:
1227 for d in work:
1209 for d in work:
1228 matchtdir(d[0])
1210 matchtdir(d[0])
1229 for d in dirsnotfound:
1211 for d in dirsnotfound:
1230 matchtdir(d)
1212 matchtdir(d)
1231
1213
1232 skipstep3 = skipstep3 and not (work or dirsnotfound)
1214 skipstep3 = skipstep3 and not (work or dirsnotfound)
1233 work = [d for d in work if not dirignore(d[0])]
1215 work = [d for d in work if not dirignore(d[0])]
1234
1216
1235 # step 2: visit subdirectories
1217 # step 2: visit subdirectories
1236 def traverse(work, alreadynormed):
1218 def traverse(work, alreadynormed):
1237 wadd = work.append
1219 wadd = work.append
1238 while work:
1220 while work:
1239 tracing.counter('dirstate.walk work', len(work))
1221 tracing.counter('dirstate.walk work', len(work))
1240 nd = work.pop()
1222 nd = work.pop()
1241 visitentries = match.visitchildrenset(nd)
1223 visitentries = match.visitchildrenset(nd)
1242 if not visitentries:
1224 if not visitentries:
1243 continue
1225 continue
1244 if visitentries == b'this' or visitentries == b'all':
1226 if visitentries == b'this' or visitentries == b'all':
1245 visitentries = None
1227 visitentries = None
1246 skip = None
1228 skip = None
1247 if nd != b'':
1229 if nd != b'':
1248 skip = b'.hg'
1230 skip = b'.hg'
1249 try:
1231 try:
1250 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1232 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1251 entries = listdir(join(nd), stat=True, skip=skip)
1233 entries = listdir(join(nd), stat=True, skip=skip)
1252 except OSError as inst:
1234 except OSError as inst:
1253 if inst.errno in (errno.EACCES, errno.ENOENT):
1235 if inst.errno in (errno.EACCES, errno.ENOENT):
1254 match.bad(
1236 match.bad(
1255 self.pathto(nd), encoding.strtolocal(inst.strerror)
1237 self.pathto(nd), encoding.strtolocal(inst.strerror)
1256 )
1238 )
1257 continue
1239 continue
1258 raise
1240 raise
1259 for f, kind, st in entries:
1241 for f, kind, st in entries:
1260 # Some matchers may return files in the visitentries set,
1242 # Some matchers may return files in the visitentries set,
1261 # instead of 'this', if the matcher explicitly mentions them
1243 # instead of 'this', if the matcher explicitly mentions them
1262 # and is not an exactmatcher. This is acceptable; we do not
1244 # and is not an exactmatcher. This is acceptable; we do not
1263 # make any hard assumptions about file-or-directory below
1245 # make any hard assumptions about file-or-directory below
1264 # based on the presence of `f` in visitentries. If
1246 # based on the presence of `f` in visitentries. If
1265 # visitchildrenset returned a set, we can always skip the
1247 # visitchildrenset returned a set, we can always skip the
1266 # entries *not* in the set it provided regardless of whether
1248 # entries *not* in the set it provided regardless of whether
1267 # they're actually a file or a directory.
1249 # they're actually a file or a directory.
1268 if visitentries and f not in visitentries:
1250 if visitentries and f not in visitentries:
1269 continue
1251 continue
1270 if normalizefile:
1252 if normalizefile:
1271 # even though f might be a directory, we're only
1253 # even though f might be a directory, we're only
1272 # interested in comparing it to files currently in the
1254 # interested in comparing it to files currently in the
1273 # dmap -- therefore normalizefile is enough
1255 # dmap -- therefore normalizefile is enough
1274 nf = normalizefile(
1256 nf = normalizefile(
1275 nd and (nd + b"/" + f) or f, True, True
1257 nd and (nd + b"/" + f) or f, True, True
1276 )
1258 )
1277 else:
1259 else:
1278 nf = nd and (nd + b"/" + f) or f
1260 nf = nd and (nd + b"/" + f) or f
1279 if nf not in results:
1261 if nf not in results:
1280 if kind == dirkind:
1262 if kind == dirkind:
1281 if not ignore(nf):
1263 if not ignore(nf):
1282 if matchtdir:
1264 if matchtdir:
1283 matchtdir(nf)
1265 matchtdir(nf)
1284 wadd(nf)
1266 wadd(nf)
1285 if nf in dmap and (matchalways or matchfn(nf)):
1267 if nf in dmap and (matchalways or matchfn(nf)):
1286 results[nf] = None
1268 results[nf] = None
1287 elif kind == regkind or kind == lnkkind:
1269 elif kind == regkind or kind == lnkkind:
1288 if nf in dmap:
1270 if nf in dmap:
1289 if matchalways or matchfn(nf):
1271 if matchalways or matchfn(nf):
1290 results[nf] = st
1272 results[nf] = st
1291 elif (matchalways or matchfn(nf)) and not ignore(
1273 elif (matchalways or matchfn(nf)) and not ignore(
1292 nf
1274 nf
1293 ):
1275 ):
1294 # unknown file -- normalize if necessary
1276 # unknown file -- normalize if necessary
1295 if not alreadynormed:
1277 if not alreadynormed:
1296 nf = normalize(nf, False, True)
1278 nf = normalize(nf, False, True)
1297 results[nf] = st
1279 results[nf] = st
1298 elif nf in dmap and (matchalways or matchfn(nf)):
1280 elif nf in dmap and (matchalways or matchfn(nf)):
1299 results[nf] = None
1281 results[nf] = None
1300
1282
1301 for nd, d in work:
1283 for nd, d in work:
1302 # alreadynormed means that processwork doesn't have to do any
1284 # alreadynormed means that processwork doesn't have to do any
1303 # expensive directory normalization
1285 # expensive directory normalization
1304 alreadynormed = not normalize or nd == d
1286 alreadynormed = not normalize or nd == d
1305 traverse([d], alreadynormed)
1287 traverse([d], alreadynormed)
1306
1288
1307 for s in subrepos:
1289 for s in subrepos:
1308 del results[s]
1290 del results[s]
1309 del results[b'.hg']
1291 del results[b'.hg']
1310
1292
1311 # step 3: visit remaining files from dmap
1293 # step 3: visit remaining files from dmap
1312 if not skipstep3 and not exact:
1294 if not skipstep3 and not exact:
1313 # If a dmap file is not in results yet, it was either
1295 # If a dmap file is not in results yet, it was either
1314 # a) not matching matchfn b) ignored, c) missing, or d) under a
1296 # a) not matching matchfn b) ignored, c) missing, or d) under a
1315 # symlink directory.
1297 # symlink directory.
1316 if not results and matchalways:
1298 if not results and matchalways:
1317 visit = [f for f in dmap]
1299 visit = [f for f in dmap]
1318 else:
1300 else:
1319 visit = [f for f in dmap if f not in results and matchfn(f)]
1301 visit = [f for f in dmap if f not in results and matchfn(f)]
1320 visit.sort()
1302 visit.sort()
1321
1303
1322 if unknown:
1304 if unknown:
1323 # unknown == True means we walked all dirs under the roots
1305 # unknown == True means we walked all dirs under the roots
1324 # that wasn't ignored, and everything that matched was stat'ed
1306 # that wasn't ignored, and everything that matched was stat'ed
1325 # and is already in results.
1307 # and is already in results.
1326 # The rest must thus be ignored or under a symlink.
1308 # The rest must thus be ignored or under a symlink.
1327 audit_path = pathutil.pathauditor(self._root, cached=True)
1309 audit_path = pathutil.pathauditor(self._root, cached=True)
1328
1310
1329 for nf in iter(visit):
1311 for nf in iter(visit):
1330 # If a stat for the same file was already added with a
1312 # If a stat for the same file was already added with a
1331 # different case, don't add one for this, since that would
1313 # different case, don't add one for this, since that would
1332 # make it appear as if the file exists under both names
1314 # make it appear as if the file exists under both names
1333 # on disk.
1315 # on disk.
1334 if (
1316 if (
1335 normalizefile
1317 normalizefile
1336 and normalizefile(nf, True, True) in results
1318 and normalizefile(nf, True, True) in results
1337 ):
1319 ):
1338 results[nf] = None
1320 results[nf] = None
1339 # Report ignored items in the dmap as long as they are not
1321 # Report ignored items in the dmap as long as they are not
1340 # under a symlink directory.
1322 # under a symlink directory.
1341 elif audit_path.check(nf):
1323 elif audit_path.check(nf):
1342 try:
1324 try:
1343 results[nf] = lstat(join(nf))
1325 results[nf] = lstat(join(nf))
1344 # file was just ignored, no links, and exists
1326 # file was just ignored, no links, and exists
1345 except OSError:
1327 except OSError:
1346 # file doesn't exist
1328 # file doesn't exist
1347 results[nf] = None
1329 results[nf] = None
1348 else:
1330 else:
1349 # It's either missing or under a symlink directory
1331 # It's either missing or under a symlink directory
1350 # which we in this case report as missing
1332 # which we in this case report as missing
1351 results[nf] = None
1333 results[nf] = None
1352 else:
1334 else:
1353 # We may not have walked the full directory tree above,
1335 # We may not have walked the full directory tree above,
1354 # so stat and check everything we missed.
1336 # so stat and check everything we missed.
1355 iv = iter(visit)
1337 iv = iter(visit)
1356 for st in util.statfiles([join(i) for i in visit]):
1338 for st in util.statfiles([join(i) for i in visit]):
1357 results[next(iv)] = st
1339 results[next(iv)] = st
1358 return results
1340 return results
1359
1341
1360 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1342 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1361 # Force Rayon (Rust parallelism library) to respect the number of
1343 # Force Rayon (Rust parallelism library) to respect the number of
1362 # workers. This is a temporary workaround until Rust code knows
1344 # workers. This is a temporary workaround until Rust code knows
1363 # how to read the config file.
1345 # how to read the config file.
1364 numcpus = self._ui.configint(b"worker", b"numcpus")
1346 numcpus = self._ui.configint(b"worker", b"numcpus")
1365 if numcpus is not None:
1347 if numcpus is not None:
1366 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1348 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1367
1349
1368 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1350 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1369 if not workers_enabled:
1351 if not workers_enabled:
1370 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1352 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1371
1353
1372 (
1354 (
1373 lookup,
1355 lookup,
1374 modified,
1356 modified,
1375 added,
1357 added,
1376 removed,
1358 removed,
1377 deleted,
1359 deleted,
1378 clean,
1360 clean,
1379 ignored,
1361 ignored,
1380 unknown,
1362 unknown,
1381 warnings,
1363 warnings,
1382 bad,
1364 bad,
1383 traversed,
1365 traversed,
1384 dirty,
1366 dirty,
1385 ) = rustmod.status(
1367 ) = rustmod.status(
1386 self._map._rustmap,
1368 self._map._rustmap,
1387 matcher,
1369 matcher,
1388 self._rootdir,
1370 self._rootdir,
1389 self._ignorefiles(),
1371 self._ignorefiles(),
1390 self._checkexec,
1372 self._checkexec,
1391 self._lastnormaltime,
1373 self._lastnormaltime,
1392 bool(list_clean),
1374 bool(list_clean),
1393 bool(list_ignored),
1375 bool(list_ignored),
1394 bool(list_unknown),
1376 bool(list_unknown),
1395 bool(matcher.traversedir),
1377 bool(matcher.traversedir),
1396 )
1378 )
1397
1379
1398 self._dirty |= dirty
1380 self._dirty |= dirty
1399
1381
1400 if matcher.traversedir:
1382 if matcher.traversedir:
1401 for dir in traversed:
1383 for dir in traversed:
1402 matcher.traversedir(dir)
1384 matcher.traversedir(dir)
1403
1385
1404 if self._ui.warn:
1386 if self._ui.warn:
1405 for item in warnings:
1387 for item in warnings:
1406 if isinstance(item, tuple):
1388 if isinstance(item, tuple):
1407 file_path, syntax = item
1389 file_path, syntax = item
1408 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1390 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1409 file_path,
1391 file_path,
1410 syntax,
1392 syntax,
1411 )
1393 )
1412 self._ui.warn(msg)
1394 self._ui.warn(msg)
1413 else:
1395 else:
1414 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1396 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1415 self._ui.warn(
1397 self._ui.warn(
1416 msg
1398 msg
1417 % (
1399 % (
1418 pathutil.canonpath(
1400 pathutil.canonpath(
1419 self._rootdir, self._rootdir, item
1401 self._rootdir, self._rootdir, item
1420 ),
1402 ),
1421 b"No such file or directory",
1403 b"No such file or directory",
1422 )
1404 )
1423 )
1405 )
1424
1406
1425 for (fn, message) in bad:
1407 for (fn, message) in bad:
1426 matcher.bad(fn, encoding.strtolocal(message))
1408 matcher.bad(fn, encoding.strtolocal(message))
1427
1409
1428 status = scmutil.status(
1410 status = scmutil.status(
1429 modified=modified,
1411 modified=modified,
1430 added=added,
1412 added=added,
1431 removed=removed,
1413 removed=removed,
1432 deleted=deleted,
1414 deleted=deleted,
1433 unknown=unknown,
1415 unknown=unknown,
1434 ignored=ignored,
1416 ignored=ignored,
1435 clean=clean,
1417 clean=clean,
1436 )
1418 )
1437 return (lookup, status)
1419 return (lookup, status)
1438
1420
1439 def status(self, match, subrepos, ignored, clean, unknown):
1421 def status(self, match, subrepos, ignored, clean, unknown):
1440 """Determine the status of the working copy relative to the
1422 """Determine the status of the working copy relative to the
1441 dirstate and return a pair of (unsure, status), where status is of type
1423 dirstate and return a pair of (unsure, status), where status is of type
1442 scmutil.status and:
1424 scmutil.status and:
1443
1425
1444 unsure:
1426 unsure:
1445 files that might have been modified since the dirstate was
1427 files that might have been modified since the dirstate was
1446 written, but need to be read to be sure (size is the same
1428 written, but need to be read to be sure (size is the same
1447 but mtime differs)
1429 but mtime differs)
1448 status.modified:
1430 status.modified:
1449 files that have definitely been modified since the dirstate
1431 files that have definitely been modified since the dirstate
1450 was written (different size or mode)
1432 was written (different size or mode)
1451 status.clean:
1433 status.clean:
1452 files that have definitely not been modified since the
1434 files that have definitely not been modified since the
1453 dirstate was written
1435 dirstate was written
1454 """
1436 """
1455 listignored, listclean, listunknown = ignored, clean, unknown
1437 listignored, listclean, listunknown = ignored, clean, unknown
1456 lookup, modified, added, unknown, ignored = [], [], [], [], []
1438 lookup, modified, added, unknown, ignored = [], [], [], [], []
1457 removed, deleted, clean = [], [], []
1439 removed, deleted, clean = [], [], []
1458
1440
1459 dmap = self._map
1441 dmap = self._map
1460 dmap.preload()
1442 dmap.preload()
1461
1443
1462 use_rust = True
1444 use_rust = True
1463
1445
1464 allowed_matchers = (
1446 allowed_matchers = (
1465 matchmod.alwaysmatcher,
1447 matchmod.alwaysmatcher,
1466 matchmod.exactmatcher,
1448 matchmod.exactmatcher,
1467 matchmod.includematcher,
1449 matchmod.includematcher,
1468 )
1450 )
1469
1451
1470 if rustmod is None:
1452 if rustmod is None:
1471 use_rust = False
1453 use_rust = False
1472 elif self._checkcase:
1454 elif self._checkcase:
1473 # Case-insensitive filesystems are not handled yet
1455 # Case-insensitive filesystems are not handled yet
1474 use_rust = False
1456 use_rust = False
1475 elif subrepos:
1457 elif subrepos:
1476 use_rust = False
1458 use_rust = False
1477 elif sparse.enabled:
1459 elif sparse.enabled:
1478 use_rust = False
1460 use_rust = False
1479 elif not isinstance(match, allowed_matchers):
1461 elif not isinstance(match, allowed_matchers):
1480 # Some matchers have yet to be implemented
1462 # Some matchers have yet to be implemented
1481 use_rust = False
1463 use_rust = False
1482
1464
1483 if use_rust:
1465 if use_rust:
1484 try:
1466 try:
1485 return self._rust_status(
1467 return self._rust_status(
1486 match, listclean, listignored, listunknown
1468 match, listclean, listignored, listunknown
1487 )
1469 )
1488 except rustmod.FallbackError:
1470 except rustmod.FallbackError:
1489 pass
1471 pass
1490
1472
1491 def noop(f):
1473 def noop(f):
1492 pass
1474 pass
1493
1475
1494 dcontains = dmap.__contains__
1476 dcontains = dmap.__contains__
1495 dget = dmap.__getitem__
1477 dget = dmap.__getitem__
1496 ladd = lookup.append # aka "unsure"
1478 ladd = lookup.append # aka "unsure"
1497 madd = modified.append
1479 madd = modified.append
1498 aadd = added.append
1480 aadd = added.append
1499 uadd = unknown.append if listunknown else noop
1481 uadd = unknown.append if listunknown else noop
1500 iadd = ignored.append if listignored else noop
1482 iadd = ignored.append if listignored else noop
1501 radd = removed.append
1483 radd = removed.append
1502 dadd = deleted.append
1484 dadd = deleted.append
1503 cadd = clean.append if listclean else noop
1485 cadd = clean.append if listclean else noop
1504 mexact = match.exact
1486 mexact = match.exact
1505 dirignore = self._dirignore
1487 dirignore = self._dirignore
1506 checkexec = self._checkexec
1488 checkexec = self._checkexec
1507 copymap = self._map.copymap
1489 copymap = self._map.copymap
1508 lastnormaltime = self._lastnormaltime
1490 lastnormaltime = self._lastnormaltime
1509
1491
1510 # We need to do full walks when either
1492 # We need to do full walks when either
1511 # - we're listing all clean files, or
1493 # - we're listing all clean files, or
1512 # - match.traversedir does something, because match.traversedir should
1494 # - match.traversedir does something, because match.traversedir should
1513 # be called for every dir in the working dir
1495 # be called for every dir in the working dir
1514 full = listclean or match.traversedir is not None
1496 full = listclean or match.traversedir is not None
1515 for fn, st in pycompat.iteritems(
1497 for fn, st in pycompat.iteritems(
1516 self.walk(match, subrepos, listunknown, listignored, full=full)
1498 self.walk(match, subrepos, listunknown, listignored, full=full)
1517 ):
1499 ):
1518 if not dcontains(fn):
1500 if not dcontains(fn):
1519 if (listignored or mexact(fn)) and dirignore(fn):
1501 if (listignored or mexact(fn)) and dirignore(fn):
1520 if listignored:
1502 if listignored:
1521 iadd(fn)
1503 iadd(fn)
1522 else:
1504 else:
1523 uadd(fn)
1505 uadd(fn)
1524 continue
1506 continue
1525
1507
1526 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1508 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1527 # written like that for performance reasons. dmap[fn] is not a
1509 # written like that for performance reasons. dmap[fn] is not a
1528 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1510 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1529 # opcode has fast paths when the value to be unpacked is a tuple or
1511 # opcode has fast paths when the value to be unpacked is a tuple or
1530 # a list, but falls back to creating a full-fledged iterator in
1512 # a list, but falls back to creating a full-fledged iterator in
1531 # general. That is much slower than simply accessing and storing the
1513 # general. That is much slower than simply accessing and storing the
1532 # tuple members one by one.
1514 # tuple members one by one.
1533 t = dget(fn)
1515 t = dget(fn)
1534 mode = t.mode
1516 mode = t.mode
1535 size = t.size
1517 size = t.size
1536 time = t.mtime
1518 time = t.mtime
1537
1519
1538 if not st and t.tracked:
1520 if not st and t.tracked:
1539 dadd(fn)
1521 dadd(fn)
1540 elif t.merged:
1522 elif t.merged:
1541 madd(fn)
1523 madd(fn)
1542 elif t.added:
1524 elif t.added:
1543 aadd(fn)
1525 aadd(fn)
1544 elif t.removed:
1526 elif t.removed:
1545 radd(fn)
1527 radd(fn)
1546 elif t.tracked:
1528 elif t.tracked:
1547 if (
1529 if (
1548 size >= 0
1530 size >= 0
1549 and (
1531 and (
1550 (size != st.st_size and size != st.st_size & _rangemask)
1532 (size != st.st_size and size != st.st_size & _rangemask)
1551 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1533 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1552 )
1534 )
1553 or t.from_p2
1535 or t.from_p2
1554 or fn in copymap
1536 or fn in copymap
1555 ):
1537 ):
1556 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1538 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1557 # issue6456: Size returned may be longer due to
1539 # issue6456: Size returned may be longer due to
1558 # encryption on EXT-4 fscrypt, undecided.
1540 # encryption on EXT-4 fscrypt, undecided.
1559 ladd(fn)
1541 ladd(fn)
1560 else:
1542 else:
1561 madd(fn)
1543 madd(fn)
1562 elif (
1544 elif (
1563 time != st[stat.ST_MTIME]
1545 time != st[stat.ST_MTIME]
1564 and time != st[stat.ST_MTIME] & _rangemask
1546 and time != st[stat.ST_MTIME] & _rangemask
1565 ):
1547 ):
1566 ladd(fn)
1548 ladd(fn)
1567 elif st[stat.ST_MTIME] == lastnormaltime:
1549 elif st[stat.ST_MTIME] == lastnormaltime:
1568 # fn may have just been marked as normal and it may have
1550 # fn may have just been marked as normal and it may have
1569 # changed in the same second without changing its size.
1551 # changed in the same second without changing its size.
1570 # This can happen if we quickly do multiple commits.
1552 # This can happen if we quickly do multiple commits.
1571 # Force lookup, so we don't miss such a racy file change.
1553 # Force lookup, so we don't miss such a racy file change.
1572 ladd(fn)
1554 ladd(fn)
1573 elif listclean:
1555 elif listclean:
1574 cadd(fn)
1556 cadd(fn)
1575 status = scmutil.status(
1557 status = scmutil.status(
1576 modified, added, removed, deleted, unknown, ignored, clean
1558 modified, added, removed, deleted, unknown, ignored, clean
1577 )
1559 )
1578 return (lookup, status)
1560 return (lookup, status)
1579
1561
1580 def matches(self, match):
1562 def matches(self, match):
1581 """
1563 """
1582 return files in the dirstate (in whatever state) filtered by match
1564 return files in the dirstate (in whatever state) filtered by match
1583 """
1565 """
1584 dmap = self._map
1566 dmap = self._map
1585 if rustmod is not None:
1567 if rustmod is not None:
1586 dmap = self._map._rustmap
1568 dmap = self._map._rustmap
1587
1569
1588 if match.always():
1570 if match.always():
1589 return dmap.keys()
1571 return dmap.keys()
1590 files = match.files()
1572 files = match.files()
1591 if match.isexact():
1573 if match.isexact():
1592 # fast path -- filter the other way around, since typically files is
1574 # fast path -- filter the other way around, since typically files is
1593 # much smaller than dmap
1575 # much smaller than dmap
1594 return [f for f in files if f in dmap]
1576 return [f for f in files if f in dmap]
1595 if match.prefix() and all(fn in dmap for fn in files):
1577 if match.prefix() and all(fn in dmap for fn in files):
1596 # fast path -- all the values are known to be files, so just return
1578 # fast path -- all the values are known to be files, so just return
1597 # that
1579 # that
1598 return list(files)
1580 return list(files)
1599 return [f for f in dmap if match(f)]
1581 return [f for f in dmap if match(f)]
1600
1582
1601 def _actualfilename(self, tr):
1583 def _actualfilename(self, tr):
1602 if tr:
1584 if tr:
1603 return self._pendingfilename
1585 return self._pendingfilename
1604 else:
1586 else:
1605 return self._filename
1587 return self._filename
1606
1588
1607 def savebackup(self, tr, backupname):
1589 def savebackup(self, tr, backupname):
1608 '''Save current dirstate into backup file'''
1590 '''Save current dirstate into backup file'''
1609 filename = self._actualfilename(tr)
1591 filename = self._actualfilename(tr)
1610 assert backupname != filename
1592 assert backupname != filename
1611
1593
1612 # use '_writedirstate' instead of 'write' to write changes certainly,
1594 # use '_writedirstate' instead of 'write' to write changes certainly,
1613 # because the latter omits writing out if transaction is running.
1595 # because the latter omits writing out if transaction is running.
1614 # output file will be used to create backup of dirstate at this point.
1596 # output file will be used to create backup of dirstate at this point.
1615 if self._dirty or not self._opener.exists(filename):
1597 if self._dirty or not self._opener.exists(filename):
1616 self._writedirstate(
1598 self._writedirstate(
1617 tr,
1599 tr,
1618 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1600 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1619 )
1601 )
1620
1602
1621 if tr:
1603 if tr:
1622 # ensure that subsequent tr.writepending returns True for
1604 # ensure that subsequent tr.writepending returns True for
1623 # changes written out above, even if dirstate is never
1605 # changes written out above, even if dirstate is never
1624 # changed after this
1606 # changed after this
1625 tr.addfilegenerator(
1607 tr.addfilegenerator(
1626 b'dirstate',
1608 b'dirstate',
1627 (self._filename,),
1609 (self._filename,),
1628 lambda f: self._writedirstate(tr, f),
1610 lambda f: self._writedirstate(tr, f),
1629 location=b'plain',
1611 location=b'plain',
1630 )
1612 )
1631
1613
1632 # ensure that pending file written above is unlinked at
1614 # ensure that pending file written above is unlinked at
1633 # failure, even if tr.writepending isn't invoked until the
1615 # failure, even if tr.writepending isn't invoked until the
1634 # end of this transaction
1616 # end of this transaction
1635 tr.registertmp(filename, location=b'plain')
1617 tr.registertmp(filename, location=b'plain')
1636
1618
1637 self._opener.tryunlink(backupname)
1619 self._opener.tryunlink(backupname)
1638 # hardlink backup is okay because _writedirstate is always called
1620 # hardlink backup is okay because _writedirstate is always called
1639 # with an "atomictemp=True" file.
1621 # with an "atomictemp=True" file.
1640 util.copyfile(
1622 util.copyfile(
1641 self._opener.join(filename),
1623 self._opener.join(filename),
1642 self._opener.join(backupname),
1624 self._opener.join(backupname),
1643 hardlink=True,
1625 hardlink=True,
1644 )
1626 )
1645
1627
1646 def restorebackup(self, tr, backupname):
1628 def restorebackup(self, tr, backupname):
1647 '''Restore dirstate by backup file'''
1629 '''Restore dirstate by backup file'''
1648 # this "invalidate()" prevents "wlock.release()" from writing
1630 # this "invalidate()" prevents "wlock.release()" from writing
1649 # changes of dirstate out after restoring from backup file
1631 # changes of dirstate out after restoring from backup file
1650 self.invalidate()
1632 self.invalidate()
1651 filename = self._actualfilename(tr)
1633 filename = self._actualfilename(tr)
1652 o = self._opener
1634 o = self._opener
1653 if util.samefile(o.join(backupname), o.join(filename)):
1635 if util.samefile(o.join(backupname), o.join(filename)):
1654 o.unlink(backupname)
1636 o.unlink(backupname)
1655 else:
1637 else:
1656 o.rename(backupname, filename, checkambig=True)
1638 o.rename(backupname, filename, checkambig=True)
1657
1639
1658 def clearbackup(self, tr, backupname):
1640 def clearbackup(self, tr, backupname):
1659 '''Clear backup file'''
1641 '''Clear backup file'''
1660 self._opener.unlink(backupname)
1642 self._opener.unlink(backupname)
@@ -1,223 +1,220
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import contextlib
3 import contextlib
4
4
5 from . import util as interfaceutil
5 from . import util as interfaceutil
6
6
7
7
8 class idirstate(interfaceutil.Interface):
8 class idirstate(interfaceutil.Interface):
9 def __init__(
9 def __init__(
10 opener,
10 opener,
11 ui,
11 ui,
12 root,
12 root,
13 validate,
13 validate,
14 sparsematchfn,
14 sparsematchfn,
15 nodeconstants,
15 nodeconstants,
16 use_dirstate_v2,
16 use_dirstate_v2,
17 ):
17 ):
18 """Create a new dirstate object.
18 """Create a new dirstate object.
19
19
20 opener is an open()-like callable that can be used to open the
20 opener is an open()-like callable that can be used to open the
21 dirstate file; root is the root of the directory tracked by
21 dirstate file; root is the root of the directory tracked by
22 the dirstate.
22 the dirstate.
23 """
23 """
24
24
25 # TODO: all these private methods and attributes should be made
25 # TODO: all these private methods and attributes should be made
26 # public or removed from the interface.
26 # public or removed from the interface.
27 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
27 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
28
28
29 def _ignorefiles():
29 def _ignorefiles():
30 """Return a list of files containing patterns to ignore."""
30 """Return a list of files containing patterns to ignore."""
31
31
32 def _ignorefileandline(f):
32 def _ignorefileandline(f):
33 """Given a file `f`, return the ignore file and line that ignores it."""
33 """Given a file `f`, return the ignore file and line that ignores it."""
34
34
35 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
35 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
36 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
36 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
37
37
38 @contextlib.contextmanager
38 @contextlib.contextmanager
39 def parentchange():
39 def parentchange():
40 """Context manager for handling dirstate parents.
40 """Context manager for handling dirstate parents.
41
41
42 If an exception occurs in the scope of the context manager,
42 If an exception occurs in the scope of the context manager,
43 the incoherent dirstate won't be written when wlock is
43 the incoherent dirstate won't be written when wlock is
44 released.
44 released.
45 """
45 """
46
46
47 def pendingparentchange():
47 def pendingparentchange():
48 """Returns true if the dirstate is in the middle of a set of changes
48 """Returns true if the dirstate is in the middle of a set of changes
49 that modify the dirstate parent.
49 that modify the dirstate parent.
50 """
50 """
51
51
52 def hasdir(d):
52 def hasdir(d):
53 pass
53 pass
54
54
55 def flagfunc(buildfallback):
55 def flagfunc(buildfallback):
56 pass
56 pass
57
57
58 def getcwd():
58 def getcwd():
59 """Return the path from which a canonical path is calculated.
59 """Return the path from which a canonical path is calculated.
60
60
61 This path should be used to resolve file patterns or to convert
61 This path should be used to resolve file patterns or to convert
62 canonical paths back to file paths for display. It shouldn't be
62 canonical paths back to file paths for display. It shouldn't be
63 used to get real file paths. Use vfs functions instead.
63 used to get real file paths. Use vfs functions instead.
64 """
64 """
65
65
66 def pathto(f, cwd=None):
66 def pathto(f, cwd=None):
67 pass
67 pass
68
68
69 def __getitem__(key):
69 def __getitem__(key):
70 """Return the current state of key (a filename) in the dirstate.
70 """Return the current state of key (a filename) in the dirstate.
71
71
72 States are:
72 States are:
73 n normal
73 n normal
74 m needs merging
74 m needs merging
75 r marked for removal
75 r marked for removal
76 a marked for addition
76 a marked for addition
77 ? not tracked
77 ? not tracked
78 """
78 """
79
79
80 def __contains__(key):
80 def __contains__(key):
81 """Check if bytestring `key` is known to the dirstate."""
81 """Check if bytestring `key` is known to the dirstate."""
82
82
83 def __iter__():
83 def __iter__():
84 """Iterate the dirstate's contained filenames as bytestrings."""
84 """Iterate the dirstate's contained filenames as bytestrings."""
85
85
86 def items():
86 def items():
87 """Iterate the dirstate's entries as (filename, DirstateItem.
87 """Iterate the dirstate's entries as (filename, DirstateItem.
88
88
89 As usual, filename is a bytestring.
89 As usual, filename is a bytestring.
90 """
90 """
91
91
92 iteritems = items
92 iteritems = items
93
93
94 def parents():
94 def parents():
95 pass
95 pass
96
96
97 def p1():
97 def p1():
98 pass
98 pass
99
99
100 def p2():
100 def p2():
101 pass
101 pass
102
102
103 def branch():
103 def branch():
104 pass
104 pass
105
105
106 def setparents(p1, p2=None):
106 def setparents(p1, p2=None):
107 """Set dirstate parents to p1 and p2.
107 """Set dirstate parents to p1 and p2.
108
108
109 When moving from two parents to one, 'm' merged entries a
109 When moving from two parents to one, 'm' merged entries a
110 adjusted to normal and previous copy records discarded and
110 adjusted to normal and previous copy records discarded and
111 returned by the call.
111 returned by the call.
112
112
113 See localrepo.setparents()
113 See localrepo.setparents()
114 """
114 """
115
115
116 def setbranch(branch):
116 def setbranch(branch):
117 pass
117 pass
118
118
119 def invalidate():
119 def invalidate():
120 """Causes the next access to reread the dirstate.
120 """Causes the next access to reread the dirstate.
121
121
122 This is different from localrepo.invalidatedirstate() because it always
122 This is different from localrepo.invalidatedirstate() because it always
123 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
123 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
124 check whether the dirstate has changed before rereading it."""
124 check whether the dirstate has changed before rereading it."""
125
125
126 def copy(source, dest):
126 def copy(source, dest):
127 """Mark dest as a copy of source. Unmark dest if source is None."""
127 """Mark dest as a copy of source. Unmark dest if source is None."""
128
128
129 def copied(file):
129 def copied(file):
130 pass
130 pass
131
131
132 def copies():
132 def copies():
133 pass
133 pass
134
134
135 def drop(f):
136 '''Drop a file from the dirstate'''
137
138 def normalize(path, isknown=False, ignoremissing=False):
135 def normalize(path, isknown=False, ignoremissing=False):
139 """
136 """
140 normalize the case of a pathname when on a casefolding filesystem
137 normalize the case of a pathname when on a casefolding filesystem
141
138
142 isknown specifies whether the filename came from walking the
139 isknown specifies whether the filename came from walking the
143 disk, to avoid extra filesystem access.
140 disk, to avoid extra filesystem access.
144
141
145 If ignoremissing is True, missing path are returned
142 If ignoremissing is True, missing path are returned
146 unchanged. Otherwise, we try harder to normalize possibly
143 unchanged. Otherwise, we try harder to normalize possibly
147 existing path components.
144 existing path components.
148
145
149 The normalized case is determined based on the following precedence:
146 The normalized case is determined based on the following precedence:
150
147
151 - version of name already stored in the dirstate
148 - version of name already stored in the dirstate
152 - version of name stored on disk
149 - version of name stored on disk
153 - version provided via command arguments
150 - version provided via command arguments
154 """
151 """
155
152
156 def clear():
153 def clear():
157 pass
154 pass
158
155
159 def rebuild(parent, allfiles, changedfiles=None):
156 def rebuild(parent, allfiles, changedfiles=None):
160 pass
157 pass
161
158
162 def identity():
159 def identity():
163 """Return identity of dirstate it to detect changing in storage
160 """Return identity of dirstate it to detect changing in storage
164
161
165 If identity of previous dirstate is equal to this, writing
162 If identity of previous dirstate is equal to this, writing
166 changes based on the former dirstate out can keep consistency.
163 changes based on the former dirstate out can keep consistency.
167 """
164 """
168
165
169 def write(tr):
166 def write(tr):
170 pass
167 pass
171
168
172 def addparentchangecallback(category, callback):
169 def addparentchangecallback(category, callback):
173 """add a callback to be called when the wd parents are changed
170 """add a callback to be called when the wd parents are changed
174
171
175 Callback will be called with the following arguments:
172 Callback will be called with the following arguments:
176 dirstate, (oldp1, oldp2), (newp1, newp2)
173 dirstate, (oldp1, oldp2), (newp1, newp2)
177
174
178 Category is a unique identifier to allow overwriting an old callback
175 Category is a unique identifier to allow overwriting an old callback
179 with a newer callback.
176 with a newer callback.
180 """
177 """
181
178
182 def walk(match, subrepos, unknown, ignored, full=True):
179 def walk(match, subrepos, unknown, ignored, full=True):
183 """
180 """
184 Walk recursively through the directory tree, finding all files
181 Walk recursively through the directory tree, finding all files
185 matched by match.
182 matched by match.
186
183
187 If full is False, maybe skip some known-clean files.
184 If full is False, maybe skip some known-clean files.
188
185
189 Return a dict mapping filename to stat-like object (either
186 Return a dict mapping filename to stat-like object (either
190 mercurial.osutil.stat instance or return value of os.stat()).
187 mercurial.osutil.stat instance or return value of os.stat()).
191
188
192 """
189 """
193
190
194 def status(match, subrepos, ignored, clean, unknown):
191 def status(match, subrepos, ignored, clean, unknown):
195 """Determine the status of the working copy relative to the
192 """Determine the status of the working copy relative to the
196 dirstate and return a pair of (unsure, status), where status is of type
193 dirstate and return a pair of (unsure, status), where status is of type
197 scmutil.status and:
194 scmutil.status and:
198
195
199 unsure:
196 unsure:
200 files that might have been modified since the dirstate was
197 files that might have been modified since the dirstate was
201 written, but need to be read to be sure (size is the same
198 written, but need to be read to be sure (size is the same
202 but mtime differs)
199 but mtime differs)
203 status.modified:
200 status.modified:
204 files that have definitely been modified since the dirstate
201 files that have definitely been modified since the dirstate
205 was written (different size or mode)
202 was written (different size or mode)
206 status.clean:
203 status.clean:
207 files that have definitely not been modified since the
204 files that have definitely not been modified since the
208 dirstate was written
205 dirstate was written
209 """
206 """
210
207
211 def matches(match):
208 def matches(match):
212 """
209 """
213 return files in the dirstate (in whatever state) filtered by match
210 return files in the dirstate (in whatever state) filtered by match
214 """
211 """
215
212
216 def savebackup(tr, backupname):
213 def savebackup(tr, backupname):
217 '''Save current dirstate into backup file'''
214 '''Save current dirstate into backup file'''
218
215
219 def restorebackup(tr, backupname):
216 def restorebackup(tr, backupname):
220 '''Restore dirstate by backup file'''
217 '''Restore dirstate by backup file'''
221
218
222 def clearbackup(tr, backupname):
219 def clearbackup(tr, backupname):
223 '''Clear backup file'''
220 '''Clear backup file'''
General Comments 0
You need to be logged in to leave comments. Login now