##// END OF EJS Templates
dirstate: add a comment about the semantic of `dirstate.clear`...
marmoute -
r51009:c175b485 default
parent child Browse files
Show More
@@ -1,1670 +1,1673 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def requires_changing_parents(func):
68 def requires_changing_parents(func):
69 def wrap(self, *args, **kwargs):
69 def wrap(self, *args, **kwargs):
70 if not self.is_changing_parents:
70 if not self.is_changing_parents:
71 msg = 'calling `%s` outside of a changing_parents context'
71 msg = 'calling `%s` outside of a changing_parents context'
72 msg %= func.__name__
72 msg %= func.__name__
73 raise error.ProgrammingError(msg)
73 raise error.ProgrammingError(msg)
74 if self._invalidated_context:
74 if self._invalidated_context:
75 msg = 'calling `%s` after the dirstate was invalidated'
75 msg = 'calling `%s` after the dirstate was invalidated'
76 raise error.ProgrammingError(msg)
76 raise error.ProgrammingError(msg)
77 return func(self, *args, **kwargs)
77 return func(self, *args, **kwargs)
78
78
79 return wrap
79 return wrap
80
80
81
81
82 def requires_changing_files(func):
82 def requires_changing_files(func):
83 def wrap(self, *args, **kwargs):
83 def wrap(self, *args, **kwargs):
84 if not self.is_changing_files:
84 if not self.is_changing_files:
85 msg = 'calling `%s` outside of a `changing_files`'
85 msg = 'calling `%s` outside of a `changing_files`'
86 msg %= func.__name__
86 msg %= func.__name__
87 raise error.ProgrammingError(msg)
87 raise error.ProgrammingError(msg)
88 return func(self, *args, **kwargs)
88 return func(self, *args, **kwargs)
89
89
90 return wrap
90 return wrap
91
91
92
92
93 def requires_changing_any(func):
93 def requires_changing_any(func):
94 def wrap(self, *args, **kwargs):
94 def wrap(self, *args, **kwargs):
95 if not self.is_changing_any:
95 if not self.is_changing_any:
96 msg = 'calling `%s` outside of a changing context'
96 msg = 'calling `%s` outside of a changing context'
97 msg %= func.__name__
97 msg %= func.__name__
98 raise error.ProgrammingError(msg)
98 raise error.ProgrammingError(msg)
99 if self._invalidated_context:
99 if self._invalidated_context:
100 msg = 'calling `%s` after the dirstate was invalidated'
100 msg = 'calling `%s` after the dirstate was invalidated'
101 raise error.ProgrammingError(msg)
101 raise error.ProgrammingError(msg)
102 return func(self, *args, **kwargs)
102 return func(self, *args, **kwargs)
103
103
104 return wrap
104 return wrap
105
105
106
106
107 def requires_not_changing_parents(func):
107 def requires_not_changing_parents(func):
108 def wrap(self, *args, **kwargs):
108 def wrap(self, *args, **kwargs):
109 if self.is_changing_parents:
109 if self.is_changing_parents:
110 msg = 'calling `%s` inside of a changing_parents context'
110 msg = 'calling `%s` inside of a changing_parents context'
111 msg %= func.__name__
111 msg %= func.__name__
112 raise error.ProgrammingError(msg)
112 raise error.ProgrammingError(msg)
113 return func(self, *args, **kwargs)
113 return func(self, *args, **kwargs)
114
114
115 return wrap
115 return wrap
116
116
117
117
118 CHANGE_TYPE_PARENTS = "parents"
118 CHANGE_TYPE_PARENTS = "parents"
119 CHANGE_TYPE_FILES = "files"
119 CHANGE_TYPE_FILES = "files"
120
120
121
121
122 @interfaceutil.implementer(intdirstate.idirstate)
122 @interfaceutil.implementer(intdirstate.idirstate)
123 class dirstate:
123 class dirstate:
124 def __init__(
124 def __init__(
125 self,
125 self,
126 opener,
126 opener,
127 ui,
127 ui,
128 root,
128 root,
129 validate,
129 validate,
130 sparsematchfn,
130 sparsematchfn,
131 nodeconstants,
131 nodeconstants,
132 use_dirstate_v2,
132 use_dirstate_v2,
133 use_tracked_hint=False,
133 use_tracked_hint=False,
134 ):
134 ):
135 """Create a new dirstate object.
135 """Create a new dirstate object.
136
136
137 opener is an open()-like callable that can be used to open the
137 opener is an open()-like callable that can be used to open the
138 dirstate file; root is the root of the directory tracked by
138 dirstate file; root is the root of the directory tracked by
139 the dirstate.
139 the dirstate.
140 """
140 """
141 self._use_dirstate_v2 = use_dirstate_v2
141 self._use_dirstate_v2 = use_dirstate_v2
142 self._use_tracked_hint = use_tracked_hint
142 self._use_tracked_hint = use_tracked_hint
143 self._nodeconstants = nodeconstants
143 self._nodeconstants = nodeconstants
144 self._opener = opener
144 self._opener = opener
145 self._validate = validate
145 self._validate = validate
146 self._root = root
146 self._root = root
147 # Either build a sparse-matcher or None if sparse is disabled
147 # Either build a sparse-matcher or None if sparse is disabled
148 self._sparsematchfn = sparsematchfn
148 self._sparsematchfn = sparsematchfn
149 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
149 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
150 # UNC path pointing to root share (issue4557)
150 # UNC path pointing to root share (issue4557)
151 self._rootdir = pathutil.normasprefix(root)
151 self._rootdir = pathutil.normasprefix(root)
152 # True is any internal state may be different
152 # True is any internal state may be different
153 self._dirty = False
153 self._dirty = False
154 # True if the set of tracked file may be different
154 # True if the set of tracked file may be different
155 self._dirty_tracked_set = False
155 self._dirty_tracked_set = False
156 self._ui = ui
156 self._ui = ui
157 self._filecache = {}
157 self._filecache = {}
158 # nesting level of `changing_parents` context
158 # nesting level of `changing_parents` context
159 self._changing_level = 0
159 self._changing_level = 0
160 # the change currently underway
160 # the change currently underway
161 self._change_type = None
161 self._change_type = None
162 # True if the current dirstate changing operations have been
162 # True if the current dirstate changing operations have been
163 # invalidated (used to make sure all nested contexts have been exited)
163 # invalidated (used to make sure all nested contexts have been exited)
164 self._invalidated_context = False
164 self._invalidated_context = False
165 self._filename = b'dirstate'
165 self._filename = b'dirstate'
166 self._filename_th = b'dirstate-tracked-hint'
166 self._filename_th = b'dirstate-tracked-hint'
167 self._pendingfilename = b'%s.pending' % self._filename
167 self._pendingfilename = b'%s.pending' % self._filename
168 self._plchangecallbacks = {}
168 self._plchangecallbacks = {}
169 self._origpl = None
169 self._origpl = None
170 self._mapcls = dirstatemap.dirstatemap
170 self._mapcls = dirstatemap.dirstatemap
171 # Access and cache cwd early, so we don't access it for the first time
171 # Access and cache cwd early, so we don't access it for the first time
172 # after a working-copy update caused it to not exist (accessing it then
172 # after a working-copy update caused it to not exist (accessing it then
173 # raises an exception).
173 # raises an exception).
174 self._cwd
174 self._cwd
175
175
176 def prefetch_parents(self):
176 def prefetch_parents(self):
177 """make sure the parents are loaded
177 """make sure the parents are loaded
178
178
179 Used to avoid a race condition.
179 Used to avoid a race condition.
180 """
180 """
181 self._pl
181 self._pl
182
182
183 @contextlib.contextmanager
183 @contextlib.contextmanager
184 def _changing(self, repo, change_type):
184 def _changing(self, repo, change_type):
185 if repo.currentwlock() is None:
185 if repo.currentwlock() is None:
186 msg = b"trying to change the dirstate without holding the wlock"
186 msg = b"trying to change the dirstate without holding the wlock"
187 raise error.ProgrammingError(msg)
187 raise error.ProgrammingError(msg)
188 if self._invalidated_context:
188 if self._invalidated_context:
189 msg = "trying to use an invalidated dirstate before it has reset"
189 msg = "trying to use an invalidated dirstate before it has reset"
190 raise error.ProgrammingError(msg)
190 raise error.ProgrammingError(msg)
191
191
192 has_tr = repo.currenttransaction() is not None
192 has_tr = repo.currenttransaction() is not None
193
193
194 # different type of change are mutually exclusive
194 # different type of change are mutually exclusive
195 if self._change_type is None:
195 if self._change_type is None:
196 assert self._changing_level == 0
196 assert self._changing_level == 0
197 self._change_type = change_type
197 self._change_type = change_type
198 elif self._change_type != change_type:
198 elif self._change_type != change_type:
199 msg = (
199 msg = (
200 'trying to open "%s" dirstate-changing context while a "%s" is'
200 'trying to open "%s" dirstate-changing context while a "%s" is'
201 ' already open'
201 ' already open'
202 )
202 )
203 msg %= (change_type, self._change_type)
203 msg %= (change_type, self._change_type)
204 raise error.ProgrammingError(msg)
204 raise error.ProgrammingError(msg)
205 self._changing_level += 1
205 self._changing_level += 1
206 try:
206 try:
207 yield
207 yield
208 except: # re-raises
208 except: # re-raises
209 self.invalidate()
209 self.invalidate()
210 raise
210 raise
211 finally:
211 finally:
212 tr = repo.currenttransaction()
212 tr = repo.currenttransaction()
213 if self._changing_level > 0:
213 if self._changing_level > 0:
214 if self._invalidated_context:
214 if self._invalidated_context:
215 # make sure we invalidate anything an upper context might
215 # make sure we invalidate anything an upper context might
216 # have changed.
216 # have changed.
217 self.invalidate()
217 self.invalidate()
218 self._changing_level -= 1
218 self._changing_level -= 1
219 # The invalidation is complete once we exit the final context
219 # The invalidation is complete once we exit the final context
220 # manager
220 # manager
221 if self._changing_level <= 0:
221 if self._changing_level <= 0:
222 self._change_type = None
222 self._change_type = None
223 assert self._changing_level == 0
223 assert self._changing_level == 0
224 if self._invalidated_context:
224 if self._invalidated_context:
225 self._invalidated_context = False
225 self._invalidated_context = False
226 else:
226 else:
227 # When an exception occured, `_invalidated_context`
227 # When an exception occured, `_invalidated_context`
228 # would have been set to True by the `invalidate`
228 # would have been set to True by the `invalidate`
229 # call earlier.
229 # call earlier.
230 #
230 #
231 # We don't have more straightforward code, because the
231 # We don't have more straightforward code, because the
232 # Exception catching (and the associated `invalidate`
232 # Exception catching (and the associated `invalidate`
233 # calling) might have been called by a nested context
233 # calling) might have been called by a nested context
234 # instead of the top level one.
234 # instead of the top level one.
235 self.write(tr)
235 self.write(tr)
236 if has_tr != (tr is not None):
236 if has_tr != (tr is not None):
237 if has_tr:
237 if has_tr:
238 m = "transaction vanished while changing dirstate"
238 m = "transaction vanished while changing dirstate"
239 else:
239 else:
240 m = "transaction appeared while changing dirstate"
240 m = "transaction appeared while changing dirstate"
241 raise error.ProgrammingError(m)
241 raise error.ProgrammingError(m)
242
242
243 @contextlib.contextmanager
243 @contextlib.contextmanager
244 def changing_parents(self, repo):
244 def changing_parents(self, repo):
245 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
245 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
246 yield c
246 yield c
247
247
248 @contextlib.contextmanager
248 @contextlib.contextmanager
249 def changing_files(self, repo):
249 def changing_files(self, repo):
250 with self._changing(repo, CHANGE_TYPE_FILES) as c:
250 with self._changing(repo, CHANGE_TYPE_FILES) as c:
251 yield c
251 yield c
252
252
253 # here to help migration to the new code
253 # here to help migration to the new code
254 def parentchange(self):
254 def parentchange(self):
255 msg = (
255 msg = (
256 "Mercurial 6.4 and later requires call to "
256 "Mercurial 6.4 and later requires call to "
257 "`dirstate.changing_parents(repo)`"
257 "`dirstate.changing_parents(repo)`"
258 )
258 )
259 raise error.ProgrammingError(msg)
259 raise error.ProgrammingError(msg)
260
260
261 @property
261 @property
262 def is_changing_any(self):
262 def is_changing_any(self):
263 """Returns true if the dirstate is in the middle of a set of changes.
263 """Returns true if the dirstate is in the middle of a set of changes.
264
264
265 This returns True for any kind of change.
265 This returns True for any kind of change.
266 """
266 """
267 return self._changing_level > 0
267 return self._changing_level > 0
268
268
269 def pendingparentchange(self):
269 def pendingparentchange(self):
270 return self.is_changing_parent()
270 return self.is_changing_parent()
271
271
272 def is_changing_parent(self):
272 def is_changing_parent(self):
273 """Returns true if the dirstate is in the middle of a set of changes
273 """Returns true if the dirstate is in the middle of a set of changes
274 that modify the dirstate parent.
274 that modify the dirstate parent.
275 """
275 """
276 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
276 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
277 return self.is_changing_parents
277 return self.is_changing_parents
278
278
279 @property
279 @property
280 def is_changing_parents(self):
280 def is_changing_parents(self):
281 """Returns true if the dirstate is in the middle of a set of changes
281 """Returns true if the dirstate is in the middle of a set of changes
282 that modify the dirstate parent.
282 that modify the dirstate parent.
283 """
283 """
284 if self._changing_level <= 0:
284 if self._changing_level <= 0:
285 return False
285 return False
286 return self._change_type == CHANGE_TYPE_PARENTS
286 return self._change_type == CHANGE_TYPE_PARENTS
287
287
288 @property
288 @property
289 def is_changing_files(self):
289 def is_changing_files(self):
290 """Returns true if the dirstate is in the middle of a set of changes
290 """Returns true if the dirstate is in the middle of a set of changes
291 that modify the files tracked or their sources.
291 that modify the files tracked or their sources.
292 """
292 """
293 if self._changing_level <= 0:
293 if self._changing_level <= 0:
294 return False
294 return False
295 return self._change_type == CHANGE_TYPE_FILES
295 return self._change_type == CHANGE_TYPE_FILES
296
296
297 @propertycache
297 @propertycache
298 def _map(self):
298 def _map(self):
299 """Return the dirstate contents (see documentation for dirstatemap)."""
299 """Return the dirstate contents (see documentation for dirstatemap)."""
300 self._map = self._mapcls(
300 self._map = self._mapcls(
301 self._ui,
301 self._ui,
302 self._opener,
302 self._opener,
303 self._root,
303 self._root,
304 self._nodeconstants,
304 self._nodeconstants,
305 self._use_dirstate_v2,
305 self._use_dirstate_v2,
306 )
306 )
307 return self._map
307 return self._map
308
308
309 @property
309 @property
310 def _sparsematcher(self):
310 def _sparsematcher(self):
311 """The matcher for the sparse checkout.
311 """The matcher for the sparse checkout.
312
312
313 The working directory may not include every file from a manifest. The
313 The working directory may not include every file from a manifest. The
314 matcher obtained by this property will match a path if it is to be
314 matcher obtained by this property will match a path if it is to be
315 included in the working directory.
315 included in the working directory.
316
316
317 When sparse if disabled, return None.
317 When sparse if disabled, return None.
318 """
318 """
319 if self._sparsematchfn is None:
319 if self._sparsematchfn is None:
320 return None
320 return None
321 # TODO there is potential to cache this property. For now, the matcher
321 # TODO there is potential to cache this property. For now, the matcher
322 # is resolved on every access. (But the called function does use a
322 # is resolved on every access. (But the called function does use a
323 # cache to keep the lookup fast.)
323 # cache to keep the lookup fast.)
324 return self._sparsematchfn()
324 return self._sparsematchfn()
325
325
326 @repocache(b'branch')
326 @repocache(b'branch')
327 def _branch(self):
327 def _branch(self):
328 try:
328 try:
329 return self._opener.read(b"branch").strip() or b"default"
329 return self._opener.read(b"branch").strip() or b"default"
330 except FileNotFoundError:
330 except FileNotFoundError:
331 return b"default"
331 return b"default"
332
332
333 @property
333 @property
334 def _pl(self):
334 def _pl(self):
335 return self._map.parents()
335 return self._map.parents()
336
336
337 def hasdir(self, d):
337 def hasdir(self, d):
338 return self._map.hastrackeddir(d)
338 return self._map.hastrackeddir(d)
339
339
340 @rootcache(b'.hgignore')
340 @rootcache(b'.hgignore')
341 def _ignore(self):
341 def _ignore(self):
342 files = self._ignorefiles()
342 files = self._ignorefiles()
343 if not files:
343 if not files:
344 return matchmod.never()
344 return matchmod.never()
345
345
346 pats = [b'include:%s' % f for f in files]
346 pats = [b'include:%s' % f for f in files]
347 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
347 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
348
348
349 @propertycache
349 @propertycache
350 def _slash(self):
350 def _slash(self):
351 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
351 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
352
352
353 @propertycache
353 @propertycache
354 def _checklink(self):
354 def _checklink(self):
355 return util.checklink(self._root)
355 return util.checklink(self._root)
356
356
357 @propertycache
357 @propertycache
358 def _checkexec(self):
358 def _checkexec(self):
359 return bool(util.checkexec(self._root))
359 return bool(util.checkexec(self._root))
360
360
361 @propertycache
361 @propertycache
362 def _checkcase(self):
362 def _checkcase(self):
363 return not util.fscasesensitive(self._join(b'.hg'))
363 return not util.fscasesensitive(self._join(b'.hg'))
364
364
365 def _join(self, f):
365 def _join(self, f):
366 # much faster than os.path.join()
366 # much faster than os.path.join()
367 # it's safe because f is always a relative path
367 # it's safe because f is always a relative path
368 return self._rootdir + f
368 return self._rootdir + f
369
369
370 def flagfunc(self, buildfallback):
370 def flagfunc(self, buildfallback):
371 """build a callable that returns flags associated with a filename
371 """build a callable that returns flags associated with a filename
372
372
373 The information is extracted from three possible layers:
373 The information is extracted from three possible layers:
374 1. the file system if it supports the information
374 1. the file system if it supports the information
375 2. the "fallback" information stored in the dirstate if any
375 2. the "fallback" information stored in the dirstate if any
376 3. a more expensive mechanism inferring the flags from the parents.
376 3. a more expensive mechanism inferring the flags from the parents.
377 """
377 """
378
378
379 # small hack to cache the result of buildfallback()
379 # small hack to cache the result of buildfallback()
380 fallback_func = []
380 fallback_func = []
381
381
382 def get_flags(x):
382 def get_flags(x):
383 entry = None
383 entry = None
384 fallback_value = None
384 fallback_value = None
385 try:
385 try:
386 st = os.lstat(self._join(x))
386 st = os.lstat(self._join(x))
387 except OSError:
387 except OSError:
388 return b''
388 return b''
389
389
390 if self._checklink:
390 if self._checklink:
391 if util.statislink(st):
391 if util.statislink(st):
392 return b'l'
392 return b'l'
393 else:
393 else:
394 entry = self.get_entry(x)
394 entry = self.get_entry(x)
395 if entry.has_fallback_symlink:
395 if entry.has_fallback_symlink:
396 if entry.fallback_symlink:
396 if entry.fallback_symlink:
397 return b'l'
397 return b'l'
398 else:
398 else:
399 if not fallback_func:
399 if not fallback_func:
400 fallback_func.append(buildfallback())
400 fallback_func.append(buildfallback())
401 fallback_value = fallback_func[0](x)
401 fallback_value = fallback_func[0](x)
402 if b'l' in fallback_value:
402 if b'l' in fallback_value:
403 return b'l'
403 return b'l'
404
404
405 if self._checkexec:
405 if self._checkexec:
406 if util.statisexec(st):
406 if util.statisexec(st):
407 return b'x'
407 return b'x'
408 else:
408 else:
409 if entry is None:
409 if entry is None:
410 entry = self.get_entry(x)
410 entry = self.get_entry(x)
411 if entry.has_fallback_exec:
411 if entry.has_fallback_exec:
412 if entry.fallback_exec:
412 if entry.fallback_exec:
413 return b'x'
413 return b'x'
414 else:
414 else:
415 if fallback_value is None:
415 if fallback_value is None:
416 if not fallback_func:
416 if not fallback_func:
417 fallback_func.append(buildfallback())
417 fallback_func.append(buildfallback())
418 fallback_value = fallback_func[0](x)
418 fallback_value = fallback_func[0](x)
419 if b'x' in fallback_value:
419 if b'x' in fallback_value:
420 return b'x'
420 return b'x'
421 return b''
421 return b''
422
422
423 return get_flags
423 return get_flags
424
424
425 @propertycache
425 @propertycache
426 def _cwd(self):
426 def _cwd(self):
427 # internal config: ui.forcecwd
427 # internal config: ui.forcecwd
428 forcecwd = self._ui.config(b'ui', b'forcecwd')
428 forcecwd = self._ui.config(b'ui', b'forcecwd')
429 if forcecwd:
429 if forcecwd:
430 return forcecwd
430 return forcecwd
431 return encoding.getcwd()
431 return encoding.getcwd()
432
432
433 def getcwd(self):
433 def getcwd(self):
434 """Return the path from which a canonical path is calculated.
434 """Return the path from which a canonical path is calculated.
435
435
436 This path should be used to resolve file patterns or to convert
436 This path should be used to resolve file patterns or to convert
437 canonical paths back to file paths for display. It shouldn't be
437 canonical paths back to file paths for display. It shouldn't be
438 used to get real file paths. Use vfs functions instead.
438 used to get real file paths. Use vfs functions instead.
439 """
439 """
440 cwd = self._cwd
440 cwd = self._cwd
441 if cwd == self._root:
441 if cwd == self._root:
442 return b''
442 return b''
443 # self._root ends with a path separator if self._root is '/' or 'C:\'
443 # self._root ends with a path separator if self._root is '/' or 'C:\'
444 rootsep = self._root
444 rootsep = self._root
445 if not util.endswithsep(rootsep):
445 if not util.endswithsep(rootsep):
446 rootsep += pycompat.ossep
446 rootsep += pycompat.ossep
447 if cwd.startswith(rootsep):
447 if cwd.startswith(rootsep):
448 return cwd[len(rootsep) :]
448 return cwd[len(rootsep) :]
449 else:
449 else:
450 # we're outside the repo. return an absolute path.
450 # we're outside the repo. return an absolute path.
451 return cwd
451 return cwd
452
452
453 def pathto(self, f, cwd=None):
453 def pathto(self, f, cwd=None):
454 if cwd is None:
454 if cwd is None:
455 cwd = self.getcwd()
455 cwd = self.getcwd()
456 path = util.pathto(self._root, cwd, f)
456 path = util.pathto(self._root, cwd, f)
457 if self._slash:
457 if self._slash:
458 return util.pconvert(path)
458 return util.pconvert(path)
459 return path
459 return path
460
460
461 def get_entry(self, path):
461 def get_entry(self, path):
462 """return a DirstateItem for the associated path"""
462 """return a DirstateItem for the associated path"""
463 entry = self._map.get(path)
463 entry = self._map.get(path)
464 if entry is None:
464 if entry is None:
465 return DirstateItem()
465 return DirstateItem()
466 return entry
466 return entry
467
467
468 def __contains__(self, key):
468 def __contains__(self, key):
469 return key in self._map
469 return key in self._map
470
470
471 def __iter__(self):
471 def __iter__(self):
472 return iter(sorted(self._map))
472 return iter(sorted(self._map))
473
473
474 def items(self):
474 def items(self):
475 return self._map.items()
475 return self._map.items()
476
476
477 iteritems = items
477 iteritems = items
478
478
479 def parents(self):
479 def parents(self):
480 return [self._validate(p) for p in self._pl]
480 return [self._validate(p) for p in self._pl]
481
481
482 def p1(self):
482 def p1(self):
483 return self._validate(self._pl[0])
483 return self._validate(self._pl[0])
484
484
485 def p2(self):
485 def p2(self):
486 return self._validate(self._pl[1])
486 return self._validate(self._pl[1])
487
487
488 @property
488 @property
489 def in_merge(self):
489 def in_merge(self):
490 """True if a merge is in progress"""
490 """True if a merge is in progress"""
491 return self._pl[1] != self._nodeconstants.nullid
491 return self._pl[1] != self._nodeconstants.nullid
492
492
493 def branch(self):
493 def branch(self):
494 return encoding.tolocal(self._branch)
494 return encoding.tolocal(self._branch)
495
495
496 @requires_changing_parents
496 @requires_changing_parents
497 def setparents(self, p1, p2=None):
497 def setparents(self, p1, p2=None):
498 """Set dirstate parents to p1 and p2.
498 """Set dirstate parents to p1 and p2.
499
499
500 When moving from two parents to one, "merged" entries a
500 When moving from two parents to one, "merged" entries a
501 adjusted to normal and previous copy records discarded and
501 adjusted to normal and previous copy records discarded and
502 returned by the call.
502 returned by the call.
503
503
504 See localrepo.setparents()
504 See localrepo.setparents()
505 """
505 """
506 if p2 is None:
506 if p2 is None:
507 p2 = self._nodeconstants.nullid
507 p2 = self._nodeconstants.nullid
508 if self._changing_level == 0:
508 if self._changing_level == 0:
509 raise ValueError(
509 raise ValueError(
510 b"cannot set dirstate parent outside of "
510 b"cannot set dirstate parent outside of "
511 b"dirstate.changing_parents context manager"
511 b"dirstate.changing_parents context manager"
512 )
512 )
513
513
514 self._dirty = True
514 self._dirty = True
515 oldp2 = self._pl[1]
515 oldp2 = self._pl[1]
516 if self._origpl is None:
516 if self._origpl is None:
517 self._origpl = self._pl
517 self._origpl = self._pl
518 nullid = self._nodeconstants.nullid
518 nullid = self._nodeconstants.nullid
519 # True if we need to fold p2 related state back to a linear case
519 # True if we need to fold p2 related state back to a linear case
520 fold_p2 = oldp2 != nullid and p2 == nullid
520 fold_p2 = oldp2 != nullid and p2 == nullid
521 return self._map.setparents(p1, p2, fold_p2=fold_p2)
521 return self._map.setparents(p1, p2, fold_p2=fold_p2)
522
522
523 def setbranch(self, branch):
523 def setbranch(self, branch):
524 self.__class__._branch.set(self, encoding.fromlocal(branch))
524 self.__class__._branch.set(self, encoding.fromlocal(branch))
525 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
525 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
526 try:
526 try:
527 f.write(self._branch + b'\n')
527 f.write(self._branch + b'\n')
528 f.close()
528 f.close()
529
529
530 # make sure filecache has the correct stat info for _branch after
530 # make sure filecache has the correct stat info for _branch after
531 # replacing the underlying file
531 # replacing the underlying file
532 ce = self._filecache[b'_branch']
532 ce = self._filecache[b'_branch']
533 if ce:
533 if ce:
534 ce.refresh()
534 ce.refresh()
535 except: # re-raises
535 except: # re-raises
536 f.discard()
536 f.discard()
537 raise
537 raise
538
538
539 def invalidate(self):
539 def invalidate(self):
540 """Causes the next access to reread the dirstate.
540 """Causes the next access to reread the dirstate.
541
541
542 This is different from localrepo.invalidatedirstate() because it always
542 This is different from localrepo.invalidatedirstate() because it always
543 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
543 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
544 check whether the dirstate has changed before rereading it."""
544 check whether the dirstate has changed before rereading it."""
545
545
546 for a in ("_map", "_branch", "_ignore"):
546 for a in ("_map", "_branch", "_ignore"):
547 if a in self.__dict__:
547 if a in self.__dict__:
548 delattr(self, a)
548 delattr(self, a)
549 self._dirty = False
549 self._dirty = False
550 self._dirty_tracked_set = False
550 self._dirty_tracked_set = False
551 self._invalidated_context = self._changing_level > 0
551 self._invalidated_context = self._changing_level > 0
552 self._origpl = None
552 self._origpl = None
553
553
554 @requires_changing_any
554 @requires_changing_any
555 def copy(self, source, dest):
555 def copy(self, source, dest):
556 """Mark dest as a copy of source. Unmark dest if source is None."""
556 """Mark dest as a copy of source. Unmark dest if source is None."""
557 if source == dest:
557 if source == dest:
558 return
558 return
559 self._dirty = True
559 self._dirty = True
560 if source is not None:
560 if source is not None:
561 self._check_sparse(source)
561 self._check_sparse(source)
562 self._map.copymap[dest] = source
562 self._map.copymap[dest] = source
563 else:
563 else:
564 self._map.copymap.pop(dest, None)
564 self._map.copymap.pop(dest, None)
565
565
566 def copied(self, file):
566 def copied(self, file):
567 return self._map.copymap.get(file, None)
567 return self._map.copymap.get(file, None)
568
568
569 def copies(self):
569 def copies(self):
570 return self._map.copymap
570 return self._map.copymap
571
571
572 @requires_changing_files
572 @requires_changing_files
573 def set_tracked(self, filename, reset_copy=False):
573 def set_tracked(self, filename, reset_copy=False):
574 """a "public" method for generic code to mark a file as tracked
574 """a "public" method for generic code to mark a file as tracked
575
575
576 This function is to be called outside of "update/merge" case. For
576 This function is to be called outside of "update/merge" case. For
577 example by a command like `hg add X`.
577 example by a command like `hg add X`.
578
578
579 if reset_copy is set, any existing copy information will be dropped.
579 if reset_copy is set, any existing copy information will be dropped.
580
580
581 return True the file was previously untracked, False otherwise.
581 return True the file was previously untracked, False otherwise.
582 """
582 """
583 self._dirty = True
583 self._dirty = True
584 entry = self._map.get(filename)
584 entry = self._map.get(filename)
585 if entry is None or not entry.tracked:
585 if entry is None or not entry.tracked:
586 self._check_new_tracked_filename(filename)
586 self._check_new_tracked_filename(filename)
587 pre_tracked = self._map.set_tracked(filename)
587 pre_tracked = self._map.set_tracked(filename)
588 if reset_copy:
588 if reset_copy:
589 self._map.copymap.pop(filename, None)
589 self._map.copymap.pop(filename, None)
590 if pre_tracked:
590 if pre_tracked:
591 self._dirty_tracked_set = True
591 self._dirty_tracked_set = True
592 return pre_tracked
592 return pre_tracked
593
593
594 @requires_changing_files
594 @requires_changing_files
595 def set_untracked(self, filename):
595 def set_untracked(self, filename):
596 """a "public" method for generic code to mark a file as untracked
596 """a "public" method for generic code to mark a file as untracked
597
597
598 This function is to be called outside of "update/merge" case. For
598 This function is to be called outside of "update/merge" case. For
599 example by a command like `hg remove X`.
599 example by a command like `hg remove X`.
600
600
601 return True the file was previously tracked, False otherwise.
601 return True the file was previously tracked, False otherwise.
602 """
602 """
603 ret = self._map.set_untracked(filename)
603 ret = self._map.set_untracked(filename)
604 if ret:
604 if ret:
605 self._dirty = True
605 self._dirty = True
606 self._dirty_tracked_set = True
606 self._dirty_tracked_set = True
607 return ret
607 return ret
608
608
609 @requires_not_changing_parents
609 @requires_not_changing_parents
610 def set_clean(self, filename, parentfiledata):
610 def set_clean(self, filename, parentfiledata):
611 """record that the current state of the file on disk is known to be clean"""
611 """record that the current state of the file on disk is known to be clean"""
612 self._dirty = True
612 self._dirty = True
613 if not self._map[filename].tracked:
613 if not self._map[filename].tracked:
614 self._check_new_tracked_filename(filename)
614 self._check_new_tracked_filename(filename)
615 (mode, size, mtime) = parentfiledata
615 (mode, size, mtime) = parentfiledata
616 self._map.set_clean(filename, mode, size, mtime)
616 self._map.set_clean(filename, mode, size, mtime)
617
617
618 @requires_not_changing_parents
618 @requires_not_changing_parents
619 def set_possibly_dirty(self, filename):
619 def set_possibly_dirty(self, filename):
620 """record that the current state of the file on disk is unknown"""
620 """record that the current state of the file on disk is unknown"""
621 self._dirty = True
621 self._dirty = True
622 self._map.set_possibly_dirty(filename)
622 self._map.set_possibly_dirty(filename)
623
623
624 @requires_changing_parents
624 @requires_changing_parents
625 def update_file_p1(
625 def update_file_p1(
626 self,
626 self,
627 filename,
627 filename,
628 p1_tracked,
628 p1_tracked,
629 ):
629 ):
630 """Set a file as tracked in the parent (or not)
630 """Set a file as tracked in the parent (or not)
631
631
632 This is to be called when adjust the dirstate to a new parent after an history
632 This is to be called when adjust the dirstate to a new parent after an history
633 rewriting operation.
633 rewriting operation.
634
634
635 It should not be called during a merge (p2 != nullid) and only within
635 It should not be called during a merge (p2 != nullid) and only within
636 a `with dirstate.changing_parents(repo):` context.
636 a `with dirstate.changing_parents(repo):` context.
637 """
637 """
638 if self.in_merge:
638 if self.in_merge:
639 msg = b'update_file_reference should not be called when merging'
639 msg = b'update_file_reference should not be called when merging'
640 raise error.ProgrammingError(msg)
640 raise error.ProgrammingError(msg)
641 entry = self._map.get(filename)
641 entry = self._map.get(filename)
642 if entry is None:
642 if entry is None:
643 wc_tracked = False
643 wc_tracked = False
644 else:
644 else:
645 wc_tracked = entry.tracked
645 wc_tracked = entry.tracked
646 if not (p1_tracked or wc_tracked):
646 if not (p1_tracked or wc_tracked):
647 # the file is no longer relevant to anyone
647 # the file is no longer relevant to anyone
648 if self._map.get(filename) is not None:
648 if self._map.get(filename) is not None:
649 self._map.reset_state(filename)
649 self._map.reset_state(filename)
650 self._dirty = True
650 self._dirty = True
651 elif (not p1_tracked) and wc_tracked:
651 elif (not p1_tracked) and wc_tracked:
652 if entry is not None and entry.added:
652 if entry is not None and entry.added:
653 return # avoid dropping copy information (maybe?)
653 return # avoid dropping copy information (maybe?)
654
654
655 self._map.reset_state(
655 self._map.reset_state(
656 filename,
656 filename,
657 wc_tracked,
657 wc_tracked,
658 p1_tracked,
658 p1_tracked,
659 # the underlying reference might have changed, we will have to
659 # the underlying reference might have changed, we will have to
660 # check it.
660 # check it.
661 has_meaningful_mtime=False,
661 has_meaningful_mtime=False,
662 )
662 )
663
663
664 @requires_changing_parents
664 @requires_changing_parents
665 def update_file(
665 def update_file(
666 self,
666 self,
667 filename,
667 filename,
668 wc_tracked,
668 wc_tracked,
669 p1_tracked,
669 p1_tracked,
670 p2_info=False,
670 p2_info=False,
671 possibly_dirty=False,
671 possibly_dirty=False,
672 parentfiledata=None,
672 parentfiledata=None,
673 ):
673 ):
674 """update the information about a file in the dirstate
674 """update the information about a file in the dirstate
675
675
676 This is to be called when the direstates parent changes to keep track
676 This is to be called when the direstates parent changes to keep track
677 of what is the file situation in regards to the working copy and its parent.
677 of what is the file situation in regards to the working copy and its parent.
678
678
679 This function must be called within a `dirstate.changing_parents` context.
679 This function must be called within a `dirstate.changing_parents` context.
680
680
681 note: the API is at an early stage and we might need to adjust it
681 note: the API is at an early stage and we might need to adjust it
682 depending of what information ends up being relevant and useful to
682 depending of what information ends up being relevant and useful to
683 other processing.
683 other processing.
684 """
684 """
685 self._update_file(
685 self._update_file(
686 filename=filename,
686 filename=filename,
687 wc_tracked=wc_tracked,
687 wc_tracked=wc_tracked,
688 p1_tracked=p1_tracked,
688 p1_tracked=p1_tracked,
689 p2_info=p2_info,
689 p2_info=p2_info,
690 possibly_dirty=possibly_dirty,
690 possibly_dirty=possibly_dirty,
691 parentfiledata=parentfiledata,
691 parentfiledata=parentfiledata,
692 )
692 )
693
693
694 # XXX since this make the dirstate dirty, we should enforce that it is done
694 # XXX since this make the dirstate dirty, we should enforce that it is done
695 # withing an appropriate change-context that scope the change and ensure it
695 # withing an appropriate change-context that scope the change and ensure it
696 # eventually get written on disk (or rolled back)
696 # eventually get written on disk (or rolled back)
697 def hacky_extension_update_file(self, *args, **kwargs):
697 def hacky_extension_update_file(self, *args, **kwargs):
698 """NEVER USE THIS, YOU DO NOT NEED IT
698 """NEVER USE THIS, YOU DO NOT NEED IT
699
699
700 This function is a variant of "update_file" to be called by a small set
700 This function is a variant of "update_file" to be called by a small set
701 of extensions, it also adjust the internal state of file, but can be
701 of extensions, it also adjust the internal state of file, but can be
702 called outside an `changing_parents` context.
702 called outside an `changing_parents` context.
703
703
704 A very small number of extension meddle with the working copy content
704 A very small number of extension meddle with the working copy content
705 in a way that requires to adjust the dirstate accordingly. At the time
705 in a way that requires to adjust the dirstate accordingly. At the time
706 this command is written they are :
706 this command is written they are :
707 - keyword,
707 - keyword,
708 - largefile,
708 - largefile,
709 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
709 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
710
710
711 This function could probably be replaced by more semantic one (like
711 This function could probably be replaced by more semantic one (like
712 "adjust expected size" or "always revalidate file content", etc)
712 "adjust expected size" or "always revalidate file content", etc)
713 however at the time where this is writen, this is too much of a detour
713 however at the time where this is writen, this is too much of a detour
714 to be considered.
714 to be considered.
715 """
715 """
716 self._update_file(
716 self._update_file(
717 *args,
717 *args,
718 **kwargs,
718 **kwargs,
719 )
719 )
720
720
721 def _update_file(
721 def _update_file(
722 self,
722 self,
723 filename,
723 filename,
724 wc_tracked,
724 wc_tracked,
725 p1_tracked,
725 p1_tracked,
726 p2_info=False,
726 p2_info=False,
727 possibly_dirty=False,
727 possibly_dirty=False,
728 parentfiledata=None,
728 parentfiledata=None,
729 ):
729 ):
730
730
731 # note: I do not think we need to double check name clash here since we
731 # note: I do not think we need to double check name clash here since we
732 # are in a update/merge case that should already have taken care of
732 # are in a update/merge case that should already have taken care of
733 # this. The test agrees
733 # this. The test agrees
734
734
735 self._dirty = True
735 self._dirty = True
736 old_entry = self._map.get(filename)
736 old_entry = self._map.get(filename)
737 if old_entry is None:
737 if old_entry is None:
738 prev_tracked = False
738 prev_tracked = False
739 else:
739 else:
740 prev_tracked = old_entry.tracked
740 prev_tracked = old_entry.tracked
741 if prev_tracked != wc_tracked:
741 if prev_tracked != wc_tracked:
742 self._dirty_tracked_set = True
742 self._dirty_tracked_set = True
743
743
744 self._map.reset_state(
744 self._map.reset_state(
745 filename,
745 filename,
746 wc_tracked,
746 wc_tracked,
747 p1_tracked,
747 p1_tracked,
748 p2_info=p2_info,
748 p2_info=p2_info,
749 has_meaningful_mtime=not possibly_dirty,
749 has_meaningful_mtime=not possibly_dirty,
750 parentfiledata=parentfiledata,
750 parentfiledata=parentfiledata,
751 )
751 )
752
752
753 def _check_new_tracked_filename(self, filename):
753 def _check_new_tracked_filename(self, filename):
754 scmutil.checkfilename(filename)
754 scmutil.checkfilename(filename)
755 if self._map.hastrackeddir(filename):
755 if self._map.hastrackeddir(filename):
756 msg = _(b'directory %r already in dirstate')
756 msg = _(b'directory %r already in dirstate')
757 msg %= pycompat.bytestr(filename)
757 msg %= pycompat.bytestr(filename)
758 raise error.Abort(msg)
758 raise error.Abort(msg)
759 # shadows
759 # shadows
760 for d in pathutil.finddirs(filename):
760 for d in pathutil.finddirs(filename):
761 if self._map.hastrackeddir(d):
761 if self._map.hastrackeddir(d):
762 break
762 break
763 entry = self._map.get(d)
763 entry = self._map.get(d)
764 if entry is not None and not entry.removed:
764 if entry is not None and not entry.removed:
765 msg = _(b'file %r in dirstate clashes with %r')
765 msg = _(b'file %r in dirstate clashes with %r')
766 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
766 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
767 raise error.Abort(msg)
767 raise error.Abort(msg)
768 self._check_sparse(filename)
768 self._check_sparse(filename)
769
769
770 def _check_sparse(self, filename):
770 def _check_sparse(self, filename):
771 """Check that a filename is inside the sparse profile"""
771 """Check that a filename is inside the sparse profile"""
772 sparsematch = self._sparsematcher
772 sparsematch = self._sparsematcher
773 if sparsematch is not None and not sparsematch.always():
773 if sparsematch is not None and not sparsematch.always():
774 if not sparsematch(filename):
774 if not sparsematch(filename):
775 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
775 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
776 hint = _(
776 hint = _(
777 b'include file with `hg debugsparse --include <pattern>` or use '
777 b'include file with `hg debugsparse --include <pattern>` or use '
778 b'`hg add -s <file>` to include file directory while adding'
778 b'`hg add -s <file>` to include file directory while adding'
779 )
779 )
780 raise error.Abort(msg % filename, hint=hint)
780 raise error.Abort(msg % filename, hint=hint)
781
781
782 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
782 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
783 if exists is None:
783 if exists is None:
784 exists = os.path.lexists(os.path.join(self._root, path))
784 exists = os.path.lexists(os.path.join(self._root, path))
785 if not exists:
785 if not exists:
786 # Maybe a path component exists
786 # Maybe a path component exists
787 if not ignoremissing and b'/' in path:
787 if not ignoremissing and b'/' in path:
788 d, f = path.rsplit(b'/', 1)
788 d, f = path.rsplit(b'/', 1)
789 d = self._normalize(d, False, ignoremissing, None)
789 d = self._normalize(d, False, ignoremissing, None)
790 folded = d + b"/" + f
790 folded = d + b"/" + f
791 else:
791 else:
792 # No path components, preserve original case
792 # No path components, preserve original case
793 folded = path
793 folded = path
794 else:
794 else:
795 # recursively normalize leading directory components
795 # recursively normalize leading directory components
796 # against dirstate
796 # against dirstate
797 if b'/' in normed:
797 if b'/' in normed:
798 d, f = normed.rsplit(b'/', 1)
798 d, f = normed.rsplit(b'/', 1)
799 d = self._normalize(d, False, ignoremissing, True)
799 d = self._normalize(d, False, ignoremissing, True)
800 r = self._root + b"/" + d
800 r = self._root + b"/" + d
801 folded = d + b"/" + util.fspath(f, r)
801 folded = d + b"/" + util.fspath(f, r)
802 else:
802 else:
803 folded = util.fspath(normed, self._root)
803 folded = util.fspath(normed, self._root)
804 storemap[normed] = folded
804 storemap[normed] = folded
805
805
806 return folded
806 return folded
807
807
808 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
808 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
809 normed = util.normcase(path)
809 normed = util.normcase(path)
810 folded = self._map.filefoldmap.get(normed, None)
810 folded = self._map.filefoldmap.get(normed, None)
811 if folded is None:
811 if folded is None:
812 if isknown:
812 if isknown:
813 folded = path
813 folded = path
814 else:
814 else:
815 folded = self._discoverpath(
815 folded = self._discoverpath(
816 path, normed, ignoremissing, exists, self._map.filefoldmap
816 path, normed, ignoremissing, exists, self._map.filefoldmap
817 )
817 )
818 return folded
818 return folded
819
819
820 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
820 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
821 normed = util.normcase(path)
821 normed = util.normcase(path)
822 folded = self._map.filefoldmap.get(normed, None)
822 folded = self._map.filefoldmap.get(normed, None)
823 if folded is None:
823 if folded is None:
824 folded = self._map.dirfoldmap.get(normed, None)
824 folded = self._map.dirfoldmap.get(normed, None)
825 if folded is None:
825 if folded is None:
826 if isknown:
826 if isknown:
827 folded = path
827 folded = path
828 else:
828 else:
829 # store discovered result in dirfoldmap so that future
829 # store discovered result in dirfoldmap so that future
830 # normalizefile calls don't start matching directories
830 # normalizefile calls don't start matching directories
831 folded = self._discoverpath(
831 folded = self._discoverpath(
832 path, normed, ignoremissing, exists, self._map.dirfoldmap
832 path, normed, ignoremissing, exists, self._map.dirfoldmap
833 )
833 )
834 return folded
834 return folded
835
835
836 def normalize(self, path, isknown=False, ignoremissing=False):
836 def normalize(self, path, isknown=False, ignoremissing=False):
837 """
837 """
838 normalize the case of a pathname when on a casefolding filesystem
838 normalize the case of a pathname when on a casefolding filesystem
839
839
840 isknown specifies whether the filename came from walking the
840 isknown specifies whether the filename came from walking the
841 disk, to avoid extra filesystem access.
841 disk, to avoid extra filesystem access.
842
842
843 If ignoremissing is True, missing path are returned
843 If ignoremissing is True, missing path are returned
844 unchanged. Otherwise, we try harder to normalize possibly
844 unchanged. Otherwise, we try harder to normalize possibly
845 existing path components.
845 existing path components.
846
846
847 The normalized case is determined based on the following precedence:
847 The normalized case is determined based on the following precedence:
848
848
849 - version of name already stored in the dirstate
849 - version of name already stored in the dirstate
850 - version of name stored on disk
850 - version of name stored on disk
851 - version provided via command arguments
851 - version provided via command arguments
852 """
852 """
853
853
854 if self._checkcase:
854 if self._checkcase:
855 return self._normalize(path, isknown, ignoremissing)
855 return self._normalize(path, isknown, ignoremissing)
856 return path
856 return path
857
857
858 # XXX since this make the dirstate dirty, we should enforce that it is done
858 # XXX since this make the dirstate dirty, we should enforce that it is done
859 # withing an appropriate change-context that scope the change and ensure it
859 # withing an appropriate change-context that scope the change and ensure it
860 # eventually get written on disk (or rolled back)
860 # eventually get written on disk (or rolled back)
861 # XXX this method is barely used, as a result:
862 # - its semantic is unclear
863 # - do we really needs it ?
861 def clear(self):
864 def clear(self):
862 self._map.clear()
865 self._map.clear()
863 self._dirty = True
866 self._dirty = True
864
867
865 # XXX since this make the dirstate dirty, we should enforce that it is done
868 # XXX since this make the dirstate dirty, we should enforce that it is done
866 # withing an appropriate change-context that scope the change and ensure it
869 # withing an appropriate change-context that scope the change and ensure it
867 # eventually get written on disk (or rolled back)
870 # eventually get written on disk (or rolled back)
868 def rebuild(self, parent, allfiles, changedfiles=None):
871 def rebuild(self, parent, allfiles, changedfiles=None):
869 matcher = self._sparsematcher
872 matcher = self._sparsematcher
870 if matcher is not None and not matcher.always():
873 if matcher is not None and not matcher.always():
871 # should not add non-matching files
874 # should not add non-matching files
872 allfiles = [f for f in allfiles if matcher(f)]
875 allfiles = [f for f in allfiles if matcher(f)]
873 if changedfiles:
876 if changedfiles:
874 changedfiles = [f for f in changedfiles if matcher(f)]
877 changedfiles = [f for f in changedfiles if matcher(f)]
875
878
876 if changedfiles is not None:
879 if changedfiles is not None:
877 # these files will be deleted from the dirstate when they are
880 # these files will be deleted from the dirstate when they are
878 # not found to be in allfiles
881 # not found to be in allfiles
879 dirstatefilestoremove = {f for f in self if not matcher(f)}
882 dirstatefilestoremove = {f for f in self if not matcher(f)}
880 changedfiles = dirstatefilestoremove.union(changedfiles)
883 changedfiles = dirstatefilestoremove.union(changedfiles)
881
884
882 if changedfiles is None:
885 if changedfiles is None:
883 # Rebuild entire dirstate
886 # Rebuild entire dirstate
884 to_lookup = allfiles
887 to_lookup = allfiles
885 to_drop = []
888 to_drop = []
886 self.clear()
889 self.clear()
887 elif len(changedfiles) < 10:
890 elif len(changedfiles) < 10:
888 # Avoid turning allfiles into a set, which can be expensive if it's
891 # Avoid turning allfiles into a set, which can be expensive if it's
889 # large.
892 # large.
890 to_lookup = []
893 to_lookup = []
891 to_drop = []
894 to_drop = []
892 for f in changedfiles:
895 for f in changedfiles:
893 if f in allfiles:
896 if f in allfiles:
894 to_lookup.append(f)
897 to_lookup.append(f)
895 else:
898 else:
896 to_drop.append(f)
899 to_drop.append(f)
897 else:
900 else:
898 changedfilesset = set(changedfiles)
901 changedfilesset = set(changedfiles)
899 to_lookup = changedfilesset & set(allfiles)
902 to_lookup = changedfilesset & set(allfiles)
900 to_drop = changedfilesset - to_lookup
903 to_drop = changedfilesset - to_lookup
901
904
902 if self._origpl is None:
905 if self._origpl is None:
903 self._origpl = self._pl
906 self._origpl = self._pl
904 self._map.setparents(parent, self._nodeconstants.nullid)
907 self._map.setparents(parent, self._nodeconstants.nullid)
905
908
906 for f in to_lookup:
909 for f in to_lookup:
907 if self.in_merge:
910 if self.in_merge:
908 self.set_tracked(f)
911 self.set_tracked(f)
909 else:
912 else:
910 self._map.reset_state(
913 self._map.reset_state(
911 f,
914 f,
912 wc_tracked=True,
915 wc_tracked=True,
913 p1_tracked=True,
916 p1_tracked=True,
914 )
917 )
915 for f in to_drop:
918 for f in to_drop:
916 self._map.reset_state(f)
919 self._map.reset_state(f)
917
920
918 self._dirty = True
921 self._dirty = True
919
922
920 def identity(self):
923 def identity(self):
921 """Return identity of dirstate itself to detect changing in storage
924 """Return identity of dirstate itself to detect changing in storage
922
925
923 If identity of previous dirstate is equal to this, writing
926 If identity of previous dirstate is equal to this, writing
924 changes based on the former dirstate out can keep consistency.
927 changes based on the former dirstate out can keep consistency.
925 """
928 """
926 return self._map.identity
929 return self._map.identity
927
930
928 def write(self, tr):
931 def write(self, tr):
929 if not self._dirty:
932 if not self._dirty:
930 return
933 return
931
934
932 write_key = self._use_tracked_hint and self._dirty_tracked_set
935 write_key = self._use_tracked_hint and self._dirty_tracked_set
933 if tr:
936 if tr:
934 # make sure we invalidate the current change on abort
937 # make sure we invalidate the current change on abort
935 if tr is not None:
938 if tr is not None:
936 tr.addabort(
939 tr.addabort(
937 b'dirstate-invalidate',
940 b'dirstate-invalidate',
938 lambda tr: self.invalidate(),
941 lambda tr: self.invalidate(),
939 )
942 )
940 # delay writing in-memory changes out
943 # delay writing in-memory changes out
941 tr.addfilegenerator(
944 tr.addfilegenerator(
942 b'dirstate-1-main',
945 b'dirstate-1-main',
943 (self._filename,),
946 (self._filename,),
944 lambda f: self._writedirstate(tr, f),
947 lambda f: self._writedirstate(tr, f),
945 location=b'plain',
948 location=b'plain',
946 post_finalize=True,
949 post_finalize=True,
947 )
950 )
948 if write_key:
951 if write_key:
949 tr.addfilegenerator(
952 tr.addfilegenerator(
950 b'dirstate-2-key-post',
953 b'dirstate-2-key-post',
951 (self._filename_th,),
954 (self._filename_th,),
952 lambda f: self._write_tracked_hint(tr, f),
955 lambda f: self._write_tracked_hint(tr, f),
953 location=b'plain',
956 location=b'plain',
954 post_finalize=True,
957 post_finalize=True,
955 )
958 )
956 return
959 return
957
960
958 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
961 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
959 with file(self._filename) as f:
962 with file(self._filename) as f:
960 self._writedirstate(tr, f)
963 self._writedirstate(tr, f)
961 if write_key:
964 if write_key:
962 # we update the key-file after writing to make sure reader have a
965 # we update the key-file after writing to make sure reader have a
963 # key that match the newly written content
966 # key that match the newly written content
964 with file(self._filename_th) as f:
967 with file(self._filename_th) as f:
965 self._write_tracked_hint(tr, f)
968 self._write_tracked_hint(tr, f)
966
969
967 def delete_tracked_hint(self):
970 def delete_tracked_hint(self):
968 """remove the tracked_hint file
971 """remove the tracked_hint file
969
972
970 To be used by format downgrades operation"""
973 To be used by format downgrades operation"""
971 self._opener.unlink(self._filename_th)
974 self._opener.unlink(self._filename_th)
972 self._use_tracked_hint = False
975 self._use_tracked_hint = False
973
976
974 def addparentchangecallback(self, category, callback):
977 def addparentchangecallback(self, category, callback):
975 """add a callback to be called when the wd parents are changed
978 """add a callback to be called when the wd parents are changed
976
979
977 Callback will be called with the following arguments:
980 Callback will be called with the following arguments:
978 dirstate, (oldp1, oldp2), (newp1, newp2)
981 dirstate, (oldp1, oldp2), (newp1, newp2)
979
982
980 Category is a unique identifier to allow overwriting an old callback
983 Category is a unique identifier to allow overwriting an old callback
981 with a newer callback.
984 with a newer callback.
982 """
985 """
983 self._plchangecallbacks[category] = callback
986 self._plchangecallbacks[category] = callback
984
987
985 def _writedirstate(self, tr, st):
988 def _writedirstate(self, tr, st):
986 # notify callbacks about parents change
989 # notify callbacks about parents change
987 if self._origpl is not None and self._origpl != self._pl:
990 if self._origpl is not None and self._origpl != self._pl:
988 for c, callback in sorted(self._plchangecallbacks.items()):
991 for c, callback in sorted(self._plchangecallbacks.items()):
989 callback(self, self._origpl, self._pl)
992 callback(self, self._origpl, self._pl)
990 self._origpl = None
993 self._origpl = None
991 self._map.write(tr, st)
994 self._map.write(tr, st)
992 self._dirty = False
995 self._dirty = False
993 self._dirty_tracked_set = False
996 self._dirty_tracked_set = False
994
997
995 def _write_tracked_hint(self, tr, f):
998 def _write_tracked_hint(self, tr, f):
996 key = node.hex(uuid.uuid4().bytes)
999 key = node.hex(uuid.uuid4().bytes)
997 f.write(b"1\n%s\n" % key) # 1 is the format version
1000 f.write(b"1\n%s\n" % key) # 1 is the format version
998
1001
999 def _dirignore(self, f):
1002 def _dirignore(self, f):
1000 if self._ignore(f):
1003 if self._ignore(f):
1001 return True
1004 return True
1002 for p in pathutil.finddirs(f):
1005 for p in pathutil.finddirs(f):
1003 if self._ignore(p):
1006 if self._ignore(p):
1004 return True
1007 return True
1005 return False
1008 return False
1006
1009
1007 def _ignorefiles(self):
1010 def _ignorefiles(self):
1008 files = []
1011 files = []
1009 if os.path.exists(self._join(b'.hgignore')):
1012 if os.path.exists(self._join(b'.hgignore')):
1010 files.append(self._join(b'.hgignore'))
1013 files.append(self._join(b'.hgignore'))
1011 for name, path in self._ui.configitems(b"ui"):
1014 for name, path in self._ui.configitems(b"ui"):
1012 if name == b'ignore' or name.startswith(b'ignore.'):
1015 if name == b'ignore' or name.startswith(b'ignore.'):
1013 # we need to use os.path.join here rather than self._join
1016 # we need to use os.path.join here rather than self._join
1014 # because path is arbitrary and user-specified
1017 # because path is arbitrary and user-specified
1015 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1018 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1016 return files
1019 return files
1017
1020
1018 def _ignorefileandline(self, f):
1021 def _ignorefileandline(self, f):
1019 files = collections.deque(self._ignorefiles())
1022 files = collections.deque(self._ignorefiles())
1020 visited = set()
1023 visited = set()
1021 while files:
1024 while files:
1022 i = files.popleft()
1025 i = files.popleft()
1023 patterns = matchmod.readpatternfile(
1026 patterns = matchmod.readpatternfile(
1024 i, self._ui.warn, sourceinfo=True
1027 i, self._ui.warn, sourceinfo=True
1025 )
1028 )
1026 for pattern, lineno, line in patterns:
1029 for pattern, lineno, line in patterns:
1027 kind, p = matchmod._patsplit(pattern, b'glob')
1030 kind, p = matchmod._patsplit(pattern, b'glob')
1028 if kind == b"subinclude":
1031 if kind == b"subinclude":
1029 if p not in visited:
1032 if p not in visited:
1030 files.append(p)
1033 files.append(p)
1031 continue
1034 continue
1032 m = matchmod.match(
1035 m = matchmod.match(
1033 self._root, b'', [], [pattern], warn=self._ui.warn
1036 self._root, b'', [], [pattern], warn=self._ui.warn
1034 )
1037 )
1035 if m(f):
1038 if m(f):
1036 return (i, lineno, line)
1039 return (i, lineno, line)
1037 visited.add(i)
1040 visited.add(i)
1038 return (None, -1, b"")
1041 return (None, -1, b"")
1039
1042
1040 def _walkexplicit(self, match, subrepos):
1043 def _walkexplicit(self, match, subrepos):
1041 """Get stat data about the files explicitly specified by match.
1044 """Get stat data about the files explicitly specified by match.
1042
1045
1043 Return a triple (results, dirsfound, dirsnotfound).
1046 Return a triple (results, dirsfound, dirsnotfound).
1044 - results is a mapping from filename to stat result. It also contains
1047 - results is a mapping from filename to stat result. It also contains
1045 listings mapping subrepos and .hg to None.
1048 listings mapping subrepos and .hg to None.
1046 - dirsfound is a list of files found to be directories.
1049 - dirsfound is a list of files found to be directories.
1047 - dirsnotfound is a list of files that the dirstate thinks are
1050 - dirsnotfound is a list of files that the dirstate thinks are
1048 directories and that were not found."""
1051 directories and that were not found."""
1049
1052
1050 def badtype(mode):
1053 def badtype(mode):
1051 kind = _(b'unknown')
1054 kind = _(b'unknown')
1052 if stat.S_ISCHR(mode):
1055 if stat.S_ISCHR(mode):
1053 kind = _(b'character device')
1056 kind = _(b'character device')
1054 elif stat.S_ISBLK(mode):
1057 elif stat.S_ISBLK(mode):
1055 kind = _(b'block device')
1058 kind = _(b'block device')
1056 elif stat.S_ISFIFO(mode):
1059 elif stat.S_ISFIFO(mode):
1057 kind = _(b'fifo')
1060 kind = _(b'fifo')
1058 elif stat.S_ISSOCK(mode):
1061 elif stat.S_ISSOCK(mode):
1059 kind = _(b'socket')
1062 kind = _(b'socket')
1060 elif stat.S_ISDIR(mode):
1063 elif stat.S_ISDIR(mode):
1061 kind = _(b'directory')
1064 kind = _(b'directory')
1062 return _(b'unsupported file type (type is %s)') % kind
1065 return _(b'unsupported file type (type is %s)') % kind
1063
1066
1064 badfn = match.bad
1067 badfn = match.bad
1065 dmap = self._map
1068 dmap = self._map
1066 lstat = os.lstat
1069 lstat = os.lstat
1067 getkind = stat.S_IFMT
1070 getkind = stat.S_IFMT
1068 dirkind = stat.S_IFDIR
1071 dirkind = stat.S_IFDIR
1069 regkind = stat.S_IFREG
1072 regkind = stat.S_IFREG
1070 lnkkind = stat.S_IFLNK
1073 lnkkind = stat.S_IFLNK
1071 join = self._join
1074 join = self._join
1072 dirsfound = []
1075 dirsfound = []
1073 foundadd = dirsfound.append
1076 foundadd = dirsfound.append
1074 dirsnotfound = []
1077 dirsnotfound = []
1075 notfoundadd = dirsnotfound.append
1078 notfoundadd = dirsnotfound.append
1076
1079
1077 if not match.isexact() and self._checkcase:
1080 if not match.isexact() and self._checkcase:
1078 normalize = self._normalize
1081 normalize = self._normalize
1079 else:
1082 else:
1080 normalize = None
1083 normalize = None
1081
1084
1082 files = sorted(match.files())
1085 files = sorted(match.files())
1083 subrepos.sort()
1086 subrepos.sort()
1084 i, j = 0, 0
1087 i, j = 0, 0
1085 while i < len(files) and j < len(subrepos):
1088 while i < len(files) and j < len(subrepos):
1086 subpath = subrepos[j] + b"/"
1089 subpath = subrepos[j] + b"/"
1087 if files[i] < subpath:
1090 if files[i] < subpath:
1088 i += 1
1091 i += 1
1089 continue
1092 continue
1090 while i < len(files) and files[i].startswith(subpath):
1093 while i < len(files) and files[i].startswith(subpath):
1091 del files[i]
1094 del files[i]
1092 j += 1
1095 j += 1
1093
1096
1094 if not files or b'' in files:
1097 if not files or b'' in files:
1095 files = [b'']
1098 files = [b'']
1096 # constructing the foldmap is expensive, so don't do it for the
1099 # constructing the foldmap is expensive, so don't do it for the
1097 # common case where files is ['']
1100 # common case where files is ['']
1098 normalize = None
1101 normalize = None
1099 results = dict.fromkeys(subrepos)
1102 results = dict.fromkeys(subrepos)
1100 results[b'.hg'] = None
1103 results[b'.hg'] = None
1101
1104
1102 for ff in files:
1105 for ff in files:
1103 if normalize:
1106 if normalize:
1104 nf = normalize(ff, False, True)
1107 nf = normalize(ff, False, True)
1105 else:
1108 else:
1106 nf = ff
1109 nf = ff
1107 if nf in results:
1110 if nf in results:
1108 continue
1111 continue
1109
1112
1110 try:
1113 try:
1111 st = lstat(join(nf))
1114 st = lstat(join(nf))
1112 kind = getkind(st.st_mode)
1115 kind = getkind(st.st_mode)
1113 if kind == dirkind:
1116 if kind == dirkind:
1114 if nf in dmap:
1117 if nf in dmap:
1115 # file replaced by dir on disk but still in dirstate
1118 # file replaced by dir on disk but still in dirstate
1116 results[nf] = None
1119 results[nf] = None
1117 foundadd((nf, ff))
1120 foundadd((nf, ff))
1118 elif kind == regkind or kind == lnkkind:
1121 elif kind == regkind or kind == lnkkind:
1119 results[nf] = st
1122 results[nf] = st
1120 else:
1123 else:
1121 badfn(ff, badtype(kind))
1124 badfn(ff, badtype(kind))
1122 if nf in dmap:
1125 if nf in dmap:
1123 results[nf] = None
1126 results[nf] = None
1124 except (OSError) as inst:
1127 except (OSError) as inst:
1125 # nf not found on disk - it is dirstate only
1128 # nf not found on disk - it is dirstate only
1126 if nf in dmap: # does it exactly match a missing file?
1129 if nf in dmap: # does it exactly match a missing file?
1127 results[nf] = None
1130 results[nf] = None
1128 else: # does it match a missing directory?
1131 else: # does it match a missing directory?
1129 if self._map.hasdir(nf):
1132 if self._map.hasdir(nf):
1130 notfoundadd(nf)
1133 notfoundadd(nf)
1131 else:
1134 else:
1132 badfn(ff, encoding.strtolocal(inst.strerror))
1135 badfn(ff, encoding.strtolocal(inst.strerror))
1133
1136
1134 # match.files() may contain explicitly-specified paths that shouldn't
1137 # match.files() may contain explicitly-specified paths that shouldn't
1135 # be taken; drop them from the list of files found. dirsfound/notfound
1138 # be taken; drop them from the list of files found. dirsfound/notfound
1136 # aren't filtered here because they will be tested later.
1139 # aren't filtered here because they will be tested later.
1137 if match.anypats():
1140 if match.anypats():
1138 for f in list(results):
1141 for f in list(results):
1139 if f == b'.hg' or f in subrepos:
1142 if f == b'.hg' or f in subrepos:
1140 # keep sentinel to disable further out-of-repo walks
1143 # keep sentinel to disable further out-of-repo walks
1141 continue
1144 continue
1142 if not match(f):
1145 if not match(f):
1143 del results[f]
1146 del results[f]
1144
1147
1145 # Case insensitive filesystems cannot rely on lstat() failing to detect
1148 # Case insensitive filesystems cannot rely on lstat() failing to detect
1146 # a case-only rename. Prune the stat object for any file that does not
1149 # a case-only rename. Prune the stat object for any file that does not
1147 # match the case in the filesystem, if there are multiple files that
1150 # match the case in the filesystem, if there are multiple files that
1148 # normalize to the same path.
1151 # normalize to the same path.
1149 if match.isexact() and self._checkcase:
1152 if match.isexact() and self._checkcase:
1150 normed = {}
1153 normed = {}
1151
1154
1152 for f, st in results.items():
1155 for f, st in results.items():
1153 if st is None:
1156 if st is None:
1154 continue
1157 continue
1155
1158
1156 nc = util.normcase(f)
1159 nc = util.normcase(f)
1157 paths = normed.get(nc)
1160 paths = normed.get(nc)
1158
1161
1159 if paths is None:
1162 if paths is None:
1160 paths = set()
1163 paths = set()
1161 normed[nc] = paths
1164 normed[nc] = paths
1162
1165
1163 paths.add(f)
1166 paths.add(f)
1164
1167
1165 for norm, paths in normed.items():
1168 for norm, paths in normed.items():
1166 if len(paths) > 1:
1169 if len(paths) > 1:
1167 for path in paths:
1170 for path in paths:
1168 folded = self._discoverpath(
1171 folded = self._discoverpath(
1169 path, norm, True, None, self._map.dirfoldmap
1172 path, norm, True, None, self._map.dirfoldmap
1170 )
1173 )
1171 if path != folded:
1174 if path != folded:
1172 results[path] = None
1175 results[path] = None
1173
1176
1174 return results, dirsfound, dirsnotfound
1177 return results, dirsfound, dirsnotfound
1175
1178
1176 def walk(self, match, subrepos, unknown, ignored, full=True):
1179 def walk(self, match, subrepos, unknown, ignored, full=True):
1177 """
1180 """
1178 Walk recursively through the directory tree, finding all files
1181 Walk recursively through the directory tree, finding all files
1179 matched by match.
1182 matched by match.
1180
1183
1181 If full is False, maybe skip some known-clean files.
1184 If full is False, maybe skip some known-clean files.
1182
1185
1183 Return a dict mapping filename to stat-like object (either
1186 Return a dict mapping filename to stat-like object (either
1184 mercurial.osutil.stat instance or return value of os.stat()).
1187 mercurial.osutil.stat instance or return value of os.stat()).
1185
1188
1186 """
1189 """
1187 # full is a flag that extensions that hook into walk can use -- this
1190 # full is a flag that extensions that hook into walk can use -- this
1188 # implementation doesn't use it at all. This satisfies the contract
1191 # implementation doesn't use it at all. This satisfies the contract
1189 # because we only guarantee a "maybe".
1192 # because we only guarantee a "maybe".
1190
1193
1191 if ignored:
1194 if ignored:
1192 ignore = util.never
1195 ignore = util.never
1193 dirignore = util.never
1196 dirignore = util.never
1194 elif unknown:
1197 elif unknown:
1195 ignore = self._ignore
1198 ignore = self._ignore
1196 dirignore = self._dirignore
1199 dirignore = self._dirignore
1197 else:
1200 else:
1198 # if not unknown and not ignored, drop dir recursion and step 2
1201 # if not unknown and not ignored, drop dir recursion and step 2
1199 ignore = util.always
1202 ignore = util.always
1200 dirignore = util.always
1203 dirignore = util.always
1201
1204
1202 if self._sparsematchfn is not None:
1205 if self._sparsematchfn is not None:
1203 em = matchmod.exact(match.files())
1206 em = matchmod.exact(match.files())
1204 sm = matchmod.unionmatcher([self._sparsematcher, em])
1207 sm = matchmod.unionmatcher([self._sparsematcher, em])
1205 match = matchmod.intersectmatchers(match, sm)
1208 match = matchmod.intersectmatchers(match, sm)
1206
1209
1207 matchfn = match.matchfn
1210 matchfn = match.matchfn
1208 matchalways = match.always()
1211 matchalways = match.always()
1209 matchtdir = match.traversedir
1212 matchtdir = match.traversedir
1210 dmap = self._map
1213 dmap = self._map
1211 listdir = util.listdir
1214 listdir = util.listdir
1212 lstat = os.lstat
1215 lstat = os.lstat
1213 dirkind = stat.S_IFDIR
1216 dirkind = stat.S_IFDIR
1214 regkind = stat.S_IFREG
1217 regkind = stat.S_IFREG
1215 lnkkind = stat.S_IFLNK
1218 lnkkind = stat.S_IFLNK
1216 join = self._join
1219 join = self._join
1217
1220
1218 exact = skipstep3 = False
1221 exact = skipstep3 = False
1219 if match.isexact(): # match.exact
1222 if match.isexact(): # match.exact
1220 exact = True
1223 exact = True
1221 dirignore = util.always # skip step 2
1224 dirignore = util.always # skip step 2
1222 elif match.prefix(): # match.match, no patterns
1225 elif match.prefix(): # match.match, no patterns
1223 skipstep3 = True
1226 skipstep3 = True
1224
1227
1225 if not exact and self._checkcase:
1228 if not exact and self._checkcase:
1226 normalize = self._normalize
1229 normalize = self._normalize
1227 normalizefile = self._normalizefile
1230 normalizefile = self._normalizefile
1228 skipstep3 = False
1231 skipstep3 = False
1229 else:
1232 else:
1230 normalize = self._normalize
1233 normalize = self._normalize
1231 normalizefile = None
1234 normalizefile = None
1232
1235
1233 # step 1: find all explicit files
1236 # step 1: find all explicit files
1234 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1237 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1235 if matchtdir:
1238 if matchtdir:
1236 for d in work:
1239 for d in work:
1237 matchtdir(d[0])
1240 matchtdir(d[0])
1238 for d in dirsnotfound:
1241 for d in dirsnotfound:
1239 matchtdir(d)
1242 matchtdir(d)
1240
1243
1241 skipstep3 = skipstep3 and not (work or dirsnotfound)
1244 skipstep3 = skipstep3 and not (work or dirsnotfound)
1242 work = [d for d in work if not dirignore(d[0])]
1245 work = [d for d in work if not dirignore(d[0])]
1243
1246
1244 # step 2: visit subdirectories
1247 # step 2: visit subdirectories
1245 def traverse(work, alreadynormed):
1248 def traverse(work, alreadynormed):
1246 wadd = work.append
1249 wadd = work.append
1247 while work:
1250 while work:
1248 tracing.counter('dirstate.walk work', len(work))
1251 tracing.counter('dirstate.walk work', len(work))
1249 nd = work.pop()
1252 nd = work.pop()
1250 visitentries = match.visitchildrenset(nd)
1253 visitentries = match.visitchildrenset(nd)
1251 if not visitentries:
1254 if not visitentries:
1252 continue
1255 continue
1253 if visitentries == b'this' or visitentries == b'all':
1256 if visitentries == b'this' or visitentries == b'all':
1254 visitentries = None
1257 visitentries = None
1255 skip = None
1258 skip = None
1256 if nd != b'':
1259 if nd != b'':
1257 skip = b'.hg'
1260 skip = b'.hg'
1258 try:
1261 try:
1259 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1262 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1260 entries = listdir(join(nd), stat=True, skip=skip)
1263 entries = listdir(join(nd), stat=True, skip=skip)
1261 except (PermissionError, FileNotFoundError) as inst:
1264 except (PermissionError, FileNotFoundError) as inst:
1262 match.bad(
1265 match.bad(
1263 self.pathto(nd), encoding.strtolocal(inst.strerror)
1266 self.pathto(nd), encoding.strtolocal(inst.strerror)
1264 )
1267 )
1265 continue
1268 continue
1266 for f, kind, st in entries:
1269 for f, kind, st in entries:
1267 # Some matchers may return files in the visitentries set,
1270 # Some matchers may return files in the visitentries set,
1268 # instead of 'this', if the matcher explicitly mentions them
1271 # instead of 'this', if the matcher explicitly mentions them
1269 # and is not an exactmatcher. This is acceptable; we do not
1272 # and is not an exactmatcher. This is acceptable; we do not
1270 # make any hard assumptions about file-or-directory below
1273 # make any hard assumptions about file-or-directory below
1271 # based on the presence of `f` in visitentries. If
1274 # based on the presence of `f` in visitentries. If
1272 # visitchildrenset returned a set, we can always skip the
1275 # visitchildrenset returned a set, we can always skip the
1273 # entries *not* in the set it provided regardless of whether
1276 # entries *not* in the set it provided regardless of whether
1274 # they're actually a file or a directory.
1277 # they're actually a file or a directory.
1275 if visitentries and f not in visitentries:
1278 if visitentries and f not in visitentries:
1276 continue
1279 continue
1277 if normalizefile:
1280 if normalizefile:
1278 # even though f might be a directory, we're only
1281 # even though f might be a directory, we're only
1279 # interested in comparing it to files currently in the
1282 # interested in comparing it to files currently in the
1280 # dmap -- therefore normalizefile is enough
1283 # dmap -- therefore normalizefile is enough
1281 nf = normalizefile(
1284 nf = normalizefile(
1282 nd and (nd + b"/" + f) or f, True, True
1285 nd and (nd + b"/" + f) or f, True, True
1283 )
1286 )
1284 else:
1287 else:
1285 nf = nd and (nd + b"/" + f) or f
1288 nf = nd and (nd + b"/" + f) or f
1286 if nf not in results:
1289 if nf not in results:
1287 if kind == dirkind:
1290 if kind == dirkind:
1288 if not ignore(nf):
1291 if not ignore(nf):
1289 if matchtdir:
1292 if matchtdir:
1290 matchtdir(nf)
1293 matchtdir(nf)
1291 wadd(nf)
1294 wadd(nf)
1292 if nf in dmap and (matchalways or matchfn(nf)):
1295 if nf in dmap and (matchalways or matchfn(nf)):
1293 results[nf] = None
1296 results[nf] = None
1294 elif kind == regkind or kind == lnkkind:
1297 elif kind == regkind or kind == lnkkind:
1295 if nf in dmap:
1298 if nf in dmap:
1296 if matchalways or matchfn(nf):
1299 if matchalways or matchfn(nf):
1297 results[nf] = st
1300 results[nf] = st
1298 elif (matchalways or matchfn(nf)) and not ignore(
1301 elif (matchalways or matchfn(nf)) and not ignore(
1299 nf
1302 nf
1300 ):
1303 ):
1301 # unknown file -- normalize if necessary
1304 # unknown file -- normalize if necessary
1302 if not alreadynormed:
1305 if not alreadynormed:
1303 nf = normalize(nf, False, True)
1306 nf = normalize(nf, False, True)
1304 results[nf] = st
1307 results[nf] = st
1305 elif nf in dmap and (matchalways or matchfn(nf)):
1308 elif nf in dmap and (matchalways or matchfn(nf)):
1306 results[nf] = None
1309 results[nf] = None
1307
1310
1308 for nd, d in work:
1311 for nd, d in work:
1309 # alreadynormed means that processwork doesn't have to do any
1312 # alreadynormed means that processwork doesn't have to do any
1310 # expensive directory normalization
1313 # expensive directory normalization
1311 alreadynormed = not normalize or nd == d
1314 alreadynormed = not normalize or nd == d
1312 traverse([d], alreadynormed)
1315 traverse([d], alreadynormed)
1313
1316
1314 for s in subrepos:
1317 for s in subrepos:
1315 del results[s]
1318 del results[s]
1316 del results[b'.hg']
1319 del results[b'.hg']
1317
1320
1318 # step 3: visit remaining files from dmap
1321 # step 3: visit remaining files from dmap
1319 if not skipstep3 and not exact:
1322 if not skipstep3 and not exact:
1320 # If a dmap file is not in results yet, it was either
1323 # If a dmap file is not in results yet, it was either
1321 # a) not matching matchfn b) ignored, c) missing, or d) under a
1324 # a) not matching matchfn b) ignored, c) missing, or d) under a
1322 # symlink directory.
1325 # symlink directory.
1323 if not results and matchalways:
1326 if not results and matchalways:
1324 visit = [f for f in dmap]
1327 visit = [f for f in dmap]
1325 else:
1328 else:
1326 visit = [f for f in dmap if f not in results and matchfn(f)]
1329 visit = [f for f in dmap if f not in results and matchfn(f)]
1327 visit.sort()
1330 visit.sort()
1328
1331
1329 if unknown:
1332 if unknown:
1330 # unknown == True means we walked all dirs under the roots
1333 # unknown == True means we walked all dirs under the roots
1331 # that wasn't ignored, and everything that matched was stat'ed
1334 # that wasn't ignored, and everything that matched was stat'ed
1332 # and is already in results.
1335 # and is already in results.
1333 # The rest must thus be ignored or under a symlink.
1336 # The rest must thus be ignored or under a symlink.
1334 audit_path = pathutil.pathauditor(self._root, cached=True)
1337 audit_path = pathutil.pathauditor(self._root, cached=True)
1335
1338
1336 for nf in iter(visit):
1339 for nf in iter(visit):
1337 # If a stat for the same file was already added with a
1340 # If a stat for the same file was already added with a
1338 # different case, don't add one for this, since that would
1341 # different case, don't add one for this, since that would
1339 # make it appear as if the file exists under both names
1342 # make it appear as if the file exists under both names
1340 # on disk.
1343 # on disk.
1341 if (
1344 if (
1342 normalizefile
1345 normalizefile
1343 and normalizefile(nf, True, True) in results
1346 and normalizefile(nf, True, True) in results
1344 ):
1347 ):
1345 results[nf] = None
1348 results[nf] = None
1346 # Report ignored items in the dmap as long as they are not
1349 # Report ignored items in the dmap as long as they are not
1347 # under a symlink directory.
1350 # under a symlink directory.
1348 elif audit_path.check(nf):
1351 elif audit_path.check(nf):
1349 try:
1352 try:
1350 results[nf] = lstat(join(nf))
1353 results[nf] = lstat(join(nf))
1351 # file was just ignored, no links, and exists
1354 # file was just ignored, no links, and exists
1352 except OSError:
1355 except OSError:
1353 # file doesn't exist
1356 # file doesn't exist
1354 results[nf] = None
1357 results[nf] = None
1355 else:
1358 else:
1356 # It's either missing or under a symlink directory
1359 # It's either missing or under a symlink directory
1357 # which we in this case report as missing
1360 # which we in this case report as missing
1358 results[nf] = None
1361 results[nf] = None
1359 else:
1362 else:
1360 # We may not have walked the full directory tree above,
1363 # We may not have walked the full directory tree above,
1361 # so stat and check everything we missed.
1364 # so stat and check everything we missed.
1362 iv = iter(visit)
1365 iv = iter(visit)
1363 for st in util.statfiles([join(i) for i in visit]):
1366 for st in util.statfiles([join(i) for i in visit]):
1364 results[next(iv)] = st
1367 results[next(iv)] = st
1365 return results
1368 return results
1366
1369
1367 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1370 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1368 if self._sparsematchfn is not None:
1371 if self._sparsematchfn is not None:
1369 em = matchmod.exact(matcher.files())
1372 em = matchmod.exact(matcher.files())
1370 sm = matchmod.unionmatcher([self._sparsematcher, em])
1373 sm = matchmod.unionmatcher([self._sparsematcher, em])
1371 matcher = matchmod.intersectmatchers(matcher, sm)
1374 matcher = matchmod.intersectmatchers(matcher, sm)
1372 # Force Rayon (Rust parallelism library) to respect the number of
1375 # Force Rayon (Rust parallelism library) to respect the number of
1373 # workers. This is a temporary workaround until Rust code knows
1376 # workers. This is a temporary workaround until Rust code knows
1374 # how to read the config file.
1377 # how to read the config file.
1375 numcpus = self._ui.configint(b"worker", b"numcpus")
1378 numcpus = self._ui.configint(b"worker", b"numcpus")
1376 if numcpus is not None:
1379 if numcpus is not None:
1377 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1380 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1378
1381
1379 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1382 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1380 if not workers_enabled:
1383 if not workers_enabled:
1381 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1384 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1382
1385
1383 (
1386 (
1384 lookup,
1387 lookup,
1385 modified,
1388 modified,
1386 added,
1389 added,
1387 removed,
1390 removed,
1388 deleted,
1391 deleted,
1389 clean,
1392 clean,
1390 ignored,
1393 ignored,
1391 unknown,
1394 unknown,
1392 warnings,
1395 warnings,
1393 bad,
1396 bad,
1394 traversed,
1397 traversed,
1395 dirty,
1398 dirty,
1396 ) = rustmod.status(
1399 ) = rustmod.status(
1397 self._map._map,
1400 self._map._map,
1398 matcher,
1401 matcher,
1399 self._rootdir,
1402 self._rootdir,
1400 self._ignorefiles(),
1403 self._ignorefiles(),
1401 self._checkexec,
1404 self._checkexec,
1402 bool(list_clean),
1405 bool(list_clean),
1403 bool(list_ignored),
1406 bool(list_ignored),
1404 bool(list_unknown),
1407 bool(list_unknown),
1405 bool(matcher.traversedir),
1408 bool(matcher.traversedir),
1406 )
1409 )
1407
1410
1408 self._dirty |= dirty
1411 self._dirty |= dirty
1409
1412
1410 if matcher.traversedir:
1413 if matcher.traversedir:
1411 for dir in traversed:
1414 for dir in traversed:
1412 matcher.traversedir(dir)
1415 matcher.traversedir(dir)
1413
1416
1414 if self._ui.warn:
1417 if self._ui.warn:
1415 for item in warnings:
1418 for item in warnings:
1416 if isinstance(item, tuple):
1419 if isinstance(item, tuple):
1417 file_path, syntax = item
1420 file_path, syntax = item
1418 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1421 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1419 file_path,
1422 file_path,
1420 syntax,
1423 syntax,
1421 )
1424 )
1422 self._ui.warn(msg)
1425 self._ui.warn(msg)
1423 else:
1426 else:
1424 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1427 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1425 self._ui.warn(
1428 self._ui.warn(
1426 msg
1429 msg
1427 % (
1430 % (
1428 pathutil.canonpath(
1431 pathutil.canonpath(
1429 self._rootdir, self._rootdir, item
1432 self._rootdir, self._rootdir, item
1430 ),
1433 ),
1431 b"No such file or directory",
1434 b"No such file or directory",
1432 )
1435 )
1433 )
1436 )
1434
1437
1435 for fn, message in bad:
1438 for fn, message in bad:
1436 matcher.bad(fn, encoding.strtolocal(message))
1439 matcher.bad(fn, encoding.strtolocal(message))
1437
1440
1438 status = scmutil.status(
1441 status = scmutil.status(
1439 modified=modified,
1442 modified=modified,
1440 added=added,
1443 added=added,
1441 removed=removed,
1444 removed=removed,
1442 deleted=deleted,
1445 deleted=deleted,
1443 unknown=unknown,
1446 unknown=unknown,
1444 ignored=ignored,
1447 ignored=ignored,
1445 clean=clean,
1448 clean=clean,
1446 )
1449 )
1447 return (lookup, status)
1450 return (lookup, status)
1448
1451
1449 # XXX since this can make the dirstate dirty (through rust), we should
1452 # XXX since this can make the dirstate dirty (through rust), we should
1450 # enforce that it is done withing an appropriate change-context that scope
1453 # enforce that it is done withing an appropriate change-context that scope
1451 # the change and ensure it eventually get written on disk (or rolled back)
1454 # the change and ensure it eventually get written on disk (or rolled back)
1452 def status(self, match, subrepos, ignored, clean, unknown):
1455 def status(self, match, subrepos, ignored, clean, unknown):
1453 """Determine the status of the working copy relative to the
1456 """Determine the status of the working copy relative to the
1454 dirstate and return a pair of (unsure, status), where status is of type
1457 dirstate and return a pair of (unsure, status), where status is of type
1455 scmutil.status and:
1458 scmutil.status and:
1456
1459
1457 unsure:
1460 unsure:
1458 files that might have been modified since the dirstate was
1461 files that might have been modified since the dirstate was
1459 written, but need to be read to be sure (size is the same
1462 written, but need to be read to be sure (size is the same
1460 but mtime differs)
1463 but mtime differs)
1461 status.modified:
1464 status.modified:
1462 files that have definitely been modified since the dirstate
1465 files that have definitely been modified since the dirstate
1463 was written (different size or mode)
1466 was written (different size or mode)
1464 status.clean:
1467 status.clean:
1465 files that have definitely not been modified since the
1468 files that have definitely not been modified since the
1466 dirstate was written
1469 dirstate was written
1467 """
1470 """
1468 listignored, listclean, listunknown = ignored, clean, unknown
1471 listignored, listclean, listunknown = ignored, clean, unknown
1469 lookup, modified, added, unknown, ignored = [], [], [], [], []
1472 lookup, modified, added, unknown, ignored = [], [], [], [], []
1470 removed, deleted, clean = [], [], []
1473 removed, deleted, clean = [], [], []
1471
1474
1472 dmap = self._map
1475 dmap = self._map
1473 dmap.preload()
1476 dmap.preload()
1474
1477
1475 use_rust = True
1478 use_rust = True
1476
1479
1477 allowed_matchers = (
1480 allowed_matchers = (
1478 matchmod.alwaysmatcher,
1481 matchmod.alwaysmatcher,
1479 matchmod.differencematcher,
1482 matchmod.differencematcher,
1480 matchmod.exactmatcher,
1483 matchmod.exactmatcher,
1481 matchmod.includematcher,
1484 matchmod.includematcher,
1482 matchmod.intersectionmatcher,
1485 matchmod.intersectionmatcher,
1483 matchmod.nevermatcher,
1486 matchmod.nevermatcher,
1484 matchmod.unionmatcher,
1487 matchmod.unionmatcher,
1485 )
1488 )
1486
1489
1487 if rustmod is None:
1490 if rustmod is None:
1488 use_rust = False
1491 use_rust = False
1489 elif self._checkcase:
1492 elif self._checkcase:
1490 # Case-insensitive filesystems are not handled yet
1493 # Case-insensitive filesystems are not handled yet
1491 use_rust = False
1494 use_rust = False
1492 elif subrepos:
1495 elif subrepos:
1493 use_rust = False
1496 use_rust = False
1494 elif not isinstance(match, allowed_matchers):
1497 elif not isinstance(match, allowed_matchers):
1495 # Some matchers have yet to be implemented
1498 # Some matchers have yet to be implemented
1496 use_rust = False
1499 use_rust = False
1497
1500
1498 # Get the time from the filesystem so we can disambiguate files that
1501 # Get the time from the filesystem so we can disambiguate files that
1499 # appear modified in the present or future.
1502 # appear modified in the present or future.
1500 try:
1503 try:
1501 mtime_boundary = timestamp.get_fs_now(self._opener)
1504 mtime_boundary = timestamp.get_fs_now(self._opener)
1502 except OSError:
1505 except OSError:
1503 # In largefiles or readonly context
1506 # In largefiles or readonly context
1504 mtime_boundary = None
1507 mtime_boundary = None
1505
1508
1506 if use_rust:
1509 if use_rust:
1507 try:
1510 try:
1508 res = self._rust_status(
1511 res = self._rust_status(
1509 match, listclean, listignored, listunknown
1512 match, listclean, listignored, listunknown
1510 )
1513 )
1511 return res + (mtime_boundary,)
1514 return res + (mtime_boundary,)
1512 except rustmod.FallbackError:
1515 except rustmod.FallbackError:
1513 pass
1516 pass
1514
1517
1515 def noop(f):
1518 def noop(f):
1516 pass
1519 pass
1517
1520
1518 dcontains = dmap.__contains__
1521 dcontains = dmap.__contains__
1519 dget = dmap.__getitem__
1522 dget = dmap.__getitem__
1520 ladd = lookup.append # aka "unsure"
1523 ladd = lookup.append # aka "unsure"
1521 madd = modified.append
1524 madd = modified.append
1522 aadd = added.append
1525 aadd = added.append
1523 uadd = unknown.append if listunknown else noop
1526 uadd = unknown.append if listunknown else noop
1524 iadd = ignored.append if listignored else noop
1527 iadd = ignored.append if listignored else noop
1525 radd = removed.append
1528 radd = removed.append
1526 dadd = deleted.append
1529 dadd = deleted.append
1527 cadd = clean.append if listclean else noop
1530 cadd = clean.append if listclean else noop
1528 mexact = match.exact
1531 mexact = match.exact
1529 dirignore = self._dirignore
1532 dirignore = self._dirignore
1530 checkexec = self._checkexec
1533 checkexec = self._checkexec
1531 checklink = self._checklink
1534 checklink = self._checklink
1532 copymap = self._map.copymap
1535 copymap = self._map.copymap
1533
1536
1534 # We need to do full walks when either
1537 # We need to do full walks when either
1535 # - we're listing all clean files, or
1538 # - we're listing all clean files, or
1536 # - match.traversedir does something, because match.traversedir should
1539 # - match.traversedir does something, because match.traversedir should
1537 # be called for every dir in the working dir
1540 # be called for every dir in the working dir
1538 full = listclean or match.traversedir is not None
1541 full = listclean or match.traversedir is not None
1539 for fn, st in self.walk(
1542 for fn, st in self.walk(
1540 match, subrepos, listunknown, listignored, full=full
1543 match, subrepos, listunknown, listignored, full=full
1541 ).items():
1544 ).items():
1542 if not dcontains(fn):
1545 if not dcontains(fn):
1543 if (listignored or mexact(fn)) and dirignore(fn):
1546 if (listignored or mexact(fn)) and dirignore(fn):
1544 if listignored:
1547 if listignored:
1545 iadd(fn)
1548 iadd(fn)
1546 else:
1549 else:
1547 uadd(fn)
1550 uadd(fn)
1548 continue
1551 continue
1549
1552
1550 t = dget(fn)
1553 t = dget(fn)
1551 mode = t.mode
1554 mode = t.mode
1552 size = t.size
1555 size = t.size
1553
1556
1554 if not st and t.tracked:
1557 if not st and t.tracked:
1555 dadd(fn)
1558 dadd(fn)
1556 elif t.p2_info:
1559 elif t.p2_info:
1557 madd(fn)
1560 madd(fn)
1558 elif t.added:
1561 elif t.added:
1559 aadd(fn)
1562 aadd(fn)
1560 elif t.removed:
1563 elif t.removed:
1561 radd(fn)
1564 radd(fn)
1562 elif t.tracked:
1565 elif t.tracked:
1563 if not checklink and t.has_fallback_symlink:
1566 if not checklink and t.has_fallback_symlink:
1564 # If the file system does not support symlink, the mode
1567 # If the file system does not support symlink, the mode
1565 # might not be correctly stored in the dirstate, so do not
1568 # might not be correctly stored in the dirstate, so do not
1566 # trust it.
1569 # trust it.
1567 ladd(fn)
1570 ladd(fn)
1568 elif not checkexec and t.has_fallback_exec:
1571 elif not checkexec and t.has_fallback_exec:
1569 # If the file system does not support exec bits, the mode
1572 # If the file system does not support exec bits, the mode
1570 # might not be correctly stored in the dirstate, so do not
1573 # might not be correctly stored in the dirstate, so do not
1571 # trust it.
1574 # trust it.
1572 ladd(fn)
1575 ladd(fn)
1573 elif (
1576 elif (
1574 size >= 0
1577 size >= 0
1575 and (
1578 and (
1576 (size != st.st_size and size != st.st_size & _rangemask)
1579 (size != st.st_size and size != st.st_size & _rangemask)
1577 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1580 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1578 )
1581 )
1579 or fn in copymap
1582 or fn in copymap
1580 ):
1583 ):
1581 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1584 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1582 # issue6456: Size returned may be longer due to
1585 # issue6456: Size returned may be longer due to
1583 # encryption on EXT-4 fscrypt, undecided.
1586 # encryption on EXT-4 fscrypt, undecided.
1584 ladd(fn)
1587 ladd(fn)
1585 else:
1588 else:
1586 madd(fn)
1589 madd(fn)
1587 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1590 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1588 # There might be a change in the future if for example the
1591 # There might be a change in the future if for example the
1589 # internal clock is off, but this is a case where the issues
1592 # internal clock is off, but this is a case where the issues
1590 # the user would face would be a lot worse and there is
1593 # the user would face would be a lot worse and there is
1591 # nothing we can really do.
1594 # nothing we can really do.
1592 ladd(fn)
1595 ladd(fn)
1593 elif listclean:
1596 elif listclean:
1594 cadd(fn)
1597 cadd(fn)
1595 status = scmutil.status(
1598 status = scmutil.status(
1596 modified, added, removed, deleted, unknown, ignored, clean
1599 modified, added, removed, deleted, unknown, ignored, clean
1597 )
1600 )
1598 return (lookup, status, mtime_boundary)
1601 return (lookup, status, mtime_boundary)
1599
1602
1600 def matches(self, match):
1603 def matches(self, match):
1601 """
1604 """
1602 return files in the dirstate (in whatever state) filtered by match
1605 return files in the dirstate (in whatever state) filtered by match
1603 """
1606 """
1604 dmap = self._map
1607 dmap = self._map
1605 if rustmod is not None:
1608 if rustmod is not None:
1606 dmap = self._map._map
1609 dmap = self._map._map
1607
1610
1608 if match.always():
1611 if match.always():
1609 return dmap.keys()
1612 return dmap.keys()
1610 files = match.files()
1613 files = match.files()
1611 if match.isexact():
1614 if match.isexact():
1612 # fast path -- filter the other way around, since typically files is
1615 # fast path -- filter the other way around, since typically files is
1613 # much smaller than dmap
1616 # much smaller than dmap
1614 return [f for f in files if f in dmap]
1617 return [f for f in files if f in dmap]
1615 if match.prefix() and all(fn in dmap for fn in files):
1618 if match.prefix() and all(fn in dmap for fn in files):
1616 # fast path -- all the values are known to be files, so just return
1619 # fast path -- all the values are known to be files, so just return
1617 # that
1620 # that
1618 return list(files)
1621 return list(files)
1619 return [f for f in dmap if match(f)]
1622 return [f for f in dmap if match(f)]
1620
1623
1621 def _actualfilename(self, tr):
1624 def _actualfilename(self, tr):
1622 if tr:
1625 if tr:
1623 return self._pendingfilename
1626 return self._pendingfilename
1624 else:
1627 else:
1625 return self._filename
1628 return self._filename
1626
1629
1627 def all_file_names(self):
1630 def all_file_names(self):
1628 """list all filename currently used by this dirstate
1631 """list all filename currently used by this dirstate
1629
1632
1630 This is only used to do `hg rollback` related backup in the transaction
1633 This is only used to do `hg rollback` related backup in the transaction
1631 """
1634 """
1632 if not self._opener.exists(self._filename):
1635 if not self._opener.exists(self._filename):
1633 # no data every written to disk yet
1636 # no data every written to disk yet
1634 return ()
1637 return ()
1635 elif self._use_dirstate_v2:
1638 elif self._use_dirstate_v2:
1636 return (
1639 return (
1637 self._filename,
1640 self._filename,
1638 self._map.docket.data_filename(),
1641 self._map.docket.data_filename(),
1639 )
1642 )
1640 else:
1643 else:
1641 return (self._filename,)
1644 return (self._filename,)
1642
1645
1643 def verify(self, m1, m2, p1, narrow_matcher=None):
1646 def verify(self, m1, m2, p1, narrow_matcher=None):
1644 """
1647 """
1645 check the dirstate contents against the parent manifest and yield errors
1648 check the dirstate contents against the parent manifest and yield errors
1646 """
1649 """
1647 missing_from_p1 = _(
1650 missing_from_p1 = _(
1648 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1651 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1649 )
1652 )
1650 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1653 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1651 missing_from_ps = _(
1654 missing_from_ps = _(
1652 b"%s marked as modified, but not in either manifest\n"
1655 b"%s marked as modified, but not in either manifest\n"
1653 )
1656 )
1654 missing_from_ds = _(
1657 missing_from_ds = _(
1655 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1658 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1656 )
1659 )
1657 for f, entry in self.items():
1660 for f, entry in self.items():
1658 if entry.p1_tracked:
1661 if entry.p1_tracked:
1659 if entry.modified and f not in m1 and f not in m2:
1662 if entry.modified and f not in m1 and f not in m2:
1660 yield missing_from_ps % f
1663 yield missing_from_ps % f
1661 elif f not in m1:
1664 elif f not in m1:
1662 yield missing_from_p1 % (f, node.short(p1))
1665 yield missing_from_p1 % (f, node.short(p1))
1663 if entry.added and f in m1:
1666 if entry.added and f in m1:
1664 yield unexpected_in_p1 % f
1667 yield unexpected_in_p1 % f
1665 for f in m1:
1668 for f in m1:
1666 if narrow_matcher is not None and not narrow_matcher(f):
1669 if narrow_matcher is not None and not narrow_matcher(f):
1667 continue
1670 continue
1668 entry = self.get_entry(f)
1671 entry = self.get_entry(f)
1669 if not entry.p1_tracked:
1672 if not entry.p1_tracked:
1670 yield missing_from_ds % (f, node.short(p1))
1673 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now