##// END OF EJS Templates
dirstate: simplify the invalidation management on context exit...
marmoute -
r51015:a7d11833 default
parent child Browse files
Show More
@@ -1,1682 +1,1677 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136 def __init__(
136 def __init__(
137 self,
137 self,
138 opener,
138 opener,
139 ui,
139 ui,
140 root,
140 root,
141 validate,
141 validate,
142 sparsematchfn,
142 sparsematchfn,
143 nodeconstants,
143 nodeconstants,
144 use_dirstate_v2,
144 use_dirstate_v2,
145 use_tracked_hint=False,
145 use_tracked_hint=False,
146 ):
146 ):
147 """Create a new dirstate object.
147 """Create a new dirstate object.
148
148
149 opener is an open()-like callable that can be used to open the
149 opener is an open()-like callable that can be used to open the
150 dirstate file; root is the root of the directory tracked by
150 dirstate file; root is the root of the directory tracked by
151 the dirstate.
151 the dirstate.
152 """
152 """
153 self._use_dirstate_v2 = use_dirstate_v2
153 self._use_dirstate_v2 = use_dirstate_v2
154 self._use_tracked_hint = use_tracked_hint
154 self._use_tracked_hint = use_tracked_hint
155 self._nodeconstants = nodeconstants
155 self._nodeconstants = nodeconstants
156 self._opener = opener
156 self._opener = opener
157 self._validate = validate
157 self._validate = validate
158 self._root = root
158 self._root = root
159 # Either build a sparse-matcher or None if sparse is disabled
159 # Either build a sparse-matcher or None if sparse is disabled
160 self._sparsematchfn = sparsematchfn
160 self._sparsematchfn = sparsematchfn
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
162 # UNC path pointing to root share (issue4557)
162 # UNC path pointing to root share (issue4557)
163 self._rootdir = pathutil.normasprefix(root)
163 self._rootdir = pathutil.normasprefix(root)
164 # True is any internal state may be different
164 # True is any internal state may be different
165 self._dirty = False
165 self._dirty = False
166 # True if the set of tracked file may be different
166 # True if the set of tracked file may be different
167 self._dirty_tracked_set = False
167 self._dirty_tracked_set = False
168 self._ui = ui
168 self._ui = ui
169 self._filecache = {}
169 self._filecache = {}
170 # nesting level of `changing_parents` context
170 # nesting level of `changing_parents` context
171 self._changing_level = 0
171 self._changing_level = 0
172 # the change currently underway
172 # the change currently underway
173 self._change_type = None
173 self._change_type = None
174 # True if the current dirstate changing operations have been
174 # True if the current dirstate changing operations have been
175 # invalidated (used to make sure all nested contexts have been exited)
175 # invalidated (used to make sure all nested contexts have been exited)
176 self._invalidated_context = False
176 self._invalidated_context = False
177 self._filename = b'dirstate'
177 self._filename = b'dirstate'
178 self._filename_th = b'dirstate-tracked-hint'
178 self._filename_th = b'dirstate-tracked-hint'
179 self._pendingfilename = b'%s.pending' % self._filename
179 self._pendingfilename = b'%s.pending' % self._filename
180 self._plchangecallbacks = {}
180 self._plchangecallbacks = {}
181 self._origpl = None
181 self._origpl = None
182 self._mapcls = dirstatemap.dirstatemap
182 self._mapcls = dirstatemap.dirstatemap
183 # Access and cache cwd early, so we don't access it for the first time
183 # Access and cache cwd early, so we don't access it for the first time
184 # after a working-copy update caused it to not exist (accessing it then
184 # after a working-copy update caused it to not exist (accessing it then
185 # raises an exception).
185 # raises an exception).
186 self._cwd
186 self._cwd
187
187
188 def prefetch_parents(self):
188 def prefetch_parents(self):
189 """make sure the parents are loaded
189 """make sure the parents are loaded
190
190
191 Used to avoid a race condition.
191 Used to avoid a race condition.
192 """
192 """
193 self._pl
193 self._pl
194
194
195 @contextlib.contextmanager
195 @contextlib.contextmanager
196 @check_invalidated
196 @check_invalidated
197 def _changing(self, repo, change_type):
197 def _changing(self, repo, change_type):
198 if repo.currentwlock() is None:
198 if repo.currentwlock() is None:
199 msg = b"trying to change the dirstate without holding the wlock"
199 msg = b"trying to change the dirstate without holding the wlock"
200 raise error.ProgrammingError(msg)
200 raise error.ProgrammingError(msg)
201
201
202 has_tr = repo.currenttransaction() is not None
202 has_tr = repo.currenttransaction() is not None
203 if not has_tr and self._changing_level == 0 and self._dirty:
203 if not has_tr and self._changing_level == 0 and self._dirty:
204 msg = "entering a changing context, but dirstate is already dirty"
204 msg = "entering a changing context, but dirstate is already dirty"
205 raise error.ProgrammingError(msg)
205 raise error.ProgrammingError(msg)
206
206
207 assert self._changing_level >= 0
207 # different type of change are mutually exclusive
208 # different type of change are mutually exclusive
208 if self._change_type is None:
209 if self._change_type is None:
209 assert self._changing_level == 0
210 assert self._changing_level == 0
210 self._change_type = change_type
211 self._change_type = change_type
211 elif self._change_type != change_type:
212 elif self._change_type != change_type:
212 msg = (
213 msg = (
213 'trying to open "%s" dirstate-changing context while a "%s" is'
214 'trying to open "%s" dirstate-changing context while a "%s" is'
214 ' already open'
215 ' already open'
215 )
216 )
216 msg %= (change_type, self._change_type)
217 msg %= (change_type, self._change_type)
217 raise error.ProgrammingError(msg)
218 raise error.ProgrammingError(msg)
219 should_write = False
218 self._changing_level += 1
220 self._changing_level += 1
219 try:
221 try:
220 yield
222 yield
221 except: # re-raises
223 except: # re-raises
222 self.invalidate()
224 self.invalidate() # this will set `_invalidated_context`
223 raise
225 raise
224 finally:
226 finally:
225 tr = repo.currenttransaction()
227 assert self._changing_level > 0
226 if self._changing_level > 0:
228 self._changing_level -= 1
229 # If the dirstate is being invalidated, call invalidate again.
230 # This will throw away anything added by a upper context and
231 # reset the `_invalidated_context` flag when relevant
232 if self._changing_level <= 0:
233 self._change_type = None
234 assert self._changing_level == 0
227 if self._invalidated_context:
235 if self._invalidated_context:
228 # make sure we invalidate anything an upper context might
236 # make sure we invalidate anything an upper context might
229 # have changed.
237 # have changed.
230 self.invalidate()
238 self.invalidate()
231 self._changing_level -= 1
232 # The invalidation is complete once we exit the final context
233 # manager
234 if self._changing_level <= 0:
235 self._change_type = None
236 assert self._changing_level == 0
237 if self._invalidated_context:
238 self._invalidated_context = False
239 else:
239 else:
240 # When an exception occured, `_invalidated_context`
240 should_write = self._changing_level <= 0
241 # would have been set to True by the `invalidate`
241 tr = repo.currenttransaction()
242 # call earlier.
243 #
244 # We don't have more straightforward code, because the
245 # Exception catching (and the associated `invalidate`
246 # calling) might have been called by a nested context
247 # instead of the top level one.
248 self.write(tr)
249 if has_tr != (tr is not None):
242 if has_tr != (tr is not None):
250 if has_tr:
243 if has_tr:
251 m = "transaction vanished while changing dirstate"
244 m = "transaction vanished while changing dirstate"
252 else:
245 else:
253 m = "transaction appeared while changing dirstate"
246 m = "transaction appeared while changing dirstate"
254 raise error.ProgrammingError(m)
247 raise error.ProgrammingError(m)
248 if should_write:
249 self.write(tr)
255
250
256 @contextlib.contextmanager
251 @contextlib.contextmanager
257 def changing_parents(self, repo):
252 def changing_parents(self, repo):
258 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
253 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
259 yield c
254 yield c
260
255
261 @contextlib.contextmanager
256 @contextlib.contextmanager
262 def changing_files(self, repo):
257 def changing_files(self, repo):
263 with self._changing(repo, CHANGE_TYPE_FILES) as c:
258 with self._changing(repo, CHANGE_TYPE_FILES) as c:
264 yield c
259 yield c
265
260
266 # here to help migration to the new code
261 # here to help migration to the new code
267 def parentchange(self):
262 def parentchange(self):
268 msg = (
263 msg = (
269 "Mercurial 6.4 and later requires call to "
264 "Mercurial 6.4 and later requires call to "
270 "`dirstate.changing_parents(repo)`"
265 "`dirstate.changing_parents(repo)`"
271 )
266 )
272 raise error.ProgrammingError(msg)
267 raise error.ProgrammingError(msg)
273
268
274 @property
269 @property
275 def is_changing_any(self):
270 def is_changing_any(self):
276 """Returns true if the dirstate is in the middle of a set of changes.
271 """Returns true if the dirstate is in the middle of a set of changes.
277
272
278 This returns True for any kind of change.
273 This returns True for any kind of change.
279 """
274 """
280 return self._changing_level > 0
275 return self._changing_level > 0
281
276
282 def pendingparentchange(self):
277 def pendingparentchange(self):
283 return self.is_changing_parent()
278 return self.is_changing_parent()
284
279
285 def is_changing_parent(self):
280 def is_changing_parent(self):
286 """Returns true if the dirstate is in the middle of a set of changes
281 """Returns true if the dirstate is in the middle of a set of changes
287 that modify the dirstate parent.
282 that modify the dirstate parent.
288 """
283 """
289 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
284 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
290 return self.is_changing_parents
285 return self.is_changing_parents
291
286
292 @property
287 @property
293 def is_changing_parents(self):
288 def is_changing_parents(self):
294 """Returns true if the dirstate is in the middle of a set of changes
289 """Returns true if the dirstate is in the middle of a set of changes
295 that modify the dirstate parent.
290 that modify the dirstate parent.
296 """
291 """
297 if self._changing_level <= 0:
292 if self._changing_level <= 0:
298 return False
293 return False
299 return self._change_type == CHANGE_TYPE_PARENTS
294 return self._change_type == CHANGE_TYPE_PARENTS
300
295
301 @property
296 @property
302 def is_changing_files(self):
297 def is_changing_files(self):
303 """Returns true if the dirstate is in the middle of a set of changes
298 """Returns true if the dirstate is in the middle of a set of changes
304 that modify the files tracked or their sources.
299 that modify the files tracked or their sources.
305 """
300 """
306 if self._changing_level <= 0:
301 if self._changing_level <= 0:
307 return False
302 return False
308 return self._change_type == CHANGE_TYPE_FILES
303 return self._change_type == CHANGE_TYPE_FILES
309
304
310 @propertycache
305 @propertycache
311 def _map(self):
306 def _map(self):
312 """Return the dirstate contents (see documentation for dirstatemap)."""
307 """Return the dirstate contents (see documentation for dirstatemap)."""
313 self._map = self._mapcls(
308 self._map = self._mapcls(
314 self._ui,
309 self._ui,
315 self._opener,
310 self._opener,
316 self._root,
311 self._root,
317 self._nodeconstants,
312 self._nodeconstants,
318 self._use_dirstate_v2,
313 self._use_dirstate_v2,
319 )
314 )
320 return self._map
315 return self._map
321
316
322 @property
317 @property
323 def _sparsematcher(self):
318 def _sparsematcher(self):
324 """The matcher for the sparse checkout.
319 """The matcher for the sparse checkout.
325
320
326 The working directory may not include every file from a manifest. The
321 The working directory may not include every file from a manifest. The
327 matcher obtained by this property will match a path if it is to be
322 matcher obtained by this property will match a path if it is to be
328 included in the working directory.
323 included in the working directory.
329
324
330 When sparse if disabled, return None.
325 When sparse if disabled, return None.
331 """
326 """
332 if self._sparsematchfn is None:
327 if self._sparsematchfn is None:
333 return None
328 return None
334 # TODO there is potential to cache this property. For now, the matcher
329 # TODO there is potential to cache this property. For now, the matcher
335 # is resolved on every access. (But the called function does use a
330 # is resolved on every access. (But the called function does use a
336 # cache to keep the lookup fast.)
331 # cache to keep the lookup fast.)
337 return self._sparsematchfn()
332 return self._sparsematchfn()
338
333
339 @repocache(b'branch')
334 @repocache(b'branch')
340 def _branch(self):
335 def _branch(self):
341 try:
336 try:
342 return self._opener.read(b"branch").strip() or b"default"
337 return self._opener.read(b"branch").strip() or b"default"
343 except FileNotFoundError:
338 except FileNotFoundError:
344 return b"default"
339 return b"default"
345
340
346 @property
341 @property
347 def _pl(self):
342 def _pl(self):
348 return self._map.parents()
343 return self._map.parents()
349
344
350 def hasdir(self, d):
345 def hasdir(self, d):
351 return self._map.hastrackeddir(d)
346 return self._map.hastrackeddir(d)
352
347
353 @rootcache(b'.hgignore')
348 @rootcache(b'.hgignore')
354 def _ignore(self):
349 def _ignore(self):
355 files = self._ignorefiles()
350 files = self._ignorefiles()
356 if not files:
351 if not files:
357 return matchmod.never()
352 return matchmod.never()
358
353
359 pats = [b'include:%s' % f for f in files]
354 pats = [b'include:%s' % f for f in files]
360 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
355 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
361
356
362 @propertycache
357 @propertycache
363 def _slash(self):
358 def _slash(self):
364 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
359 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
365
360
366 @propertycache
361 @propertycache
367 def _checklink(self):
362 def _checklink(self):
368 return util.checklink(self._root)
363 return util.checklink(self._root)
369
364
370 @propertycache
365 @propertycache
371 def _checkexec(self):
366 def _checkexec(self):
372 return bool(util.checkexec(self._root))
367 return bool(util.checkexec(self._root))
373
368
374 @propertycache
369 @propertycache
375 def _checkcase(self):
370 def _checkcase(self):
376 return not util.fscasesensitive(self._join(b'.hg'))
371 return not util.fscasesensitive(self._join(b'.hg'))
377
372
378 def _join(self, f):
373 def _join(self, f):
379 # much faster than os.path.join()
374 # much faster than os.path.join()
380 # it's safe because f is always a relative path
375 # it's safe because f is always a relative path
381 return self._rootdir + f
376 return self._rootdir + f
382
377
383 def flagfunc(self, buildfallback):
378 def flagfunc(self, buildfallback):
384 """build a callable that returns flags associated with a filename
379 """build a callable that returns flags associated with a filename
385
380
386 The information is extracted from three possible layers:
381 The information is extracted from three possible layers:
387 1. the file system if it supports the information
382 1. the file system if it supports the information
388 2. the "fallback" information stored in the dirstate if any
383 2. the "fallback" information stored in the dirstate if any
389 3. a more expensive mechanism inferring the flags from the parents.
384 3. a more expensive mechanism inferring the flags from the parents.
390 """
385 """
391
386
392 # small hack to cache the result of buildfallback()
387 # small hack to cache the result of buildfallback()
393 fallback_func = []
388 fallback_func = []
394
389
395 def get_flags(x):
390 def get_flags(x):
396 entry = None
391 entry = None
397 fallback_value = None
392 fallback_value = None
398 try:
393 try:
399 st = os.lstat(self._join(x))
394 st = os.lstat(self._join(x))
400 except OSError:
395 except OSError:
401 return b''
396 return b''
402
397
403 if self._checklink:
398 if self._checklink:
404 if util.statislink(st):
399 if util.statislink(st):
405 return b'l'
400 return b'l'
406 else:
401 else:
407 entry = self.get_entry(x)
402 entry = self.get_entry(x)
408 if entry.has_fallback_symlink:
403 if entry.has_fallback_symlink:
409 if entry.fallback_symlink:
404 if entry.fallback_symlink:
410 return b'l'
405 return b'l'
411 else:
406 else:
412 if not fallback_func:
407 if not fallback_func:
413 fallback_func.append(buildfallback())
408 fallback_func.append(buildfallback())
414 fallback_value = fallback_func[0](x)
409 fallback_value = fallback_func[0](x)
415 if b'l' in fallback_value:
410 if b'l' in fallback_value:
416 return b'l'
411 return b'l'
417
412
418 if self._checkexec:
413 if self._checkexec:
419 if util.statisexec(st):
414 if util.statisexec(st):
420 return b'x'
415 return b'x'
421 else:
416 else:
422 if entry is None:
417 if entry is None:
423 entry = self.get_entry(x)
418 entry = self.get_entry(x)
424 if entry.has_fallback_exec:
419 if entry.has_fallback_exec:
425 if entry.fallback_exec:
420 if entry.fallback_exec:
426 return b'x'
421 return b'x'
427 else:
422 else:
428 if fallback_value is None:
423 if fallback_value is None:
429 if not fallback_func:
424 if not fallback_func:
430 fallback_func.append(buildfallback())
425 fallback_func.append(buildfallback())
431 fallback_value = fallback_func[0](x)
426 fallback_value = fallback_func[0](x)
432 if b'x' in fallback_value:
427 if b'x' in fallback_value:
433 return b'x'
428 return b'x'
434 return b''
429 return b''
435
430
436 return get_flags
431 return get_flags
437
432
438 @propertycache
433 @propertycache
439 def _cwd(self):
434 def _cwd(self):
440 # internal config: ui.forcecwd
435 # internal config: ui.forcecwd
441 forcecwd = self._ui.config(b'ui', b'forcecwd')
436 forcecwd = self._ui.config(b'ui', b'forcecwd')
442 if forcecwd:
437 if forcecwd:
443 return forcecwd
438 return forcecwd
444 return encoding.getcwd()
439 return encoding.getcwd()
445
440
446 def getcwd(self):
441 def getcwd(self):
447 """Return the path from which a canonical path is calculated.
442 """Return the path from which a canonical path is calculated.
448
443
449 This path should be used to resolve file patterns or to convert
444 This path should be used to resolve file patterns or to convert
450 canonical paths back to file paths for display. It shouldn't be
445 canonical paths back to file paths for display. It shouldn't be
451 used to get real file paths. Use vfs functions instead.
446 used to get real file paths. Use vfs functions instead.
452 """
447 """
453 cwd = self._cwd
448 cwd = self._cwd
454 if cwd == self._root:
449 if cwd == self._root:
455 return b''
450 return b''
456 # self._root ends with a path separator if self._root is '/' or 'C:\'
451 # self._root ends with a path separator if self._root is '/' or 'C:\'
457 rootsep = self._root
452 rootsep = self._root
458 if not util.endswithsep(rootsep):
453 if not util.endswithsep(rootsep):
459 rootsep += pycompat.ossep
454 rootsep += pycompat.ossep
460 if cwd.startswith(rootsep):
455 if cwd.startswith(rootsep):
461 return cwd[len(rootsep) :]
456 return cwd[len(rootsep) :]
462 else:
457 else:
463 # we're outside the repo. return an absolute path.
458 # we're outside the repo. return an absolute path.
464 return cwd
459 return cwd
465
460
466 def pathto(self, f, cwd=None):
461 def pathto(self, f, cwd=None):
467 if cwd is None:
462 if cwd is None:
468 cwd = self.getcwd()
463 cwd = self.getcwd()
469 path = util.pathto(self._root, cwd, f)
464 path = util.pathto(self._root, cwd, f)
470 if self._slash:
465 if self._slash:
471 return util.pconvert(path)
466 return util.pconvert(path)
472 return path
467 return path
473
468
474 def get_entry(self, path):
469 def get_entry(self, path):
475 """return a DirstateItem for the associated path"""
470 """return a DirstateItem for the associated path"""
476 entry = self._map.get(path)
471 entry = self._map.get(path)
477 if entry is None:
472 if entry is None:
478 return DirstateItem()
473 return DirstateItem()
479 return entry
474 return entry
480
475
481 def __contains__(self, key):
476 def __contains__(self, key):
482 return key in self._map
477 return key in self._map
483
478
484 def __iter__(self):
479 def __iter__(self):
485 return iter(sorted(self._map))
480 return iter(sorted(self._map))
486
481
487 def items(self):
482 def items(self):
488 return self._map.items()
483 return self._map.items()
489
484
490 iteritems = items
485 iteritems = items
491
486
492 def parents(self):
487 def parents(self):
493 return [self._validate(p) for p in self._pl]
488 return [self._validate(p) for p in self._pl]
494
489
495 def p1(self):
490 def p1(self):
496 return self._validate(self._pl[0])
491 return self._validate(self._pl[0])
497
492
498 def p2(self):
493 def p2(self):
499 return self._validate(self._pl[1])
494 return self._validate(self._pl[1])
500
495
501 @property
496 @property
502 def in_merge(self):
497 def in_merge(self):
503 """True if a merge is in progress"""
498 """True if a merge is in progress"""
504 return self._pl[1] != self._nodeconstants.nullid
499 return self._pl[1] != self._nodeconstants.nullid
505
500
506 def branch(self):
501 def branch(self):
507 return encoding.tolocal(self._branch)
502 return encoding.tolocal(self._branch)
508
503
509 @requires_changing_parents
504 @requires_changing_parents
510 def setparents(self, p1, p2=None):
505 def setparents(self, p1, p2=None):
511 """Set dirstate parents to p1 and p2.
506 """Set dirstate parents to p1 and p2.
512
507
513 When moving from two parents to one, "merged" entries a
508 When moving from two parents to one, "merged" entries a
514 adjusted to normal and previous copy records discarded and
509 adjusted to normal and previous copy records discarded and
515 returned by the call.
510 returned by the call.
516
511
517 See localrepo.setparents()
512 See localrepo.setparents()
518 """
513 """
519 if p2 is None:
514 if p2 is None:
520 p2 = self._nodeconstants.nullid
515 p2 = self._nodeconstants.nullid
521 if self._changing_level == 0:
516 if self._changing_level == 0:
522 raise ValueError(
517 raise ValueError(
523 b"cannot set dirstate parent outside of "
518 b"cannot set dirstate parent outside of "
524 b"dirstate.changing_parents context manager"
519 b"dirstate.changing_parents context manager"
525 )
520 )
526
521
527 self._dirty = True
522 self._dirty = True
528 oldp2 = self._pl[1]
523 oldp2 = self._pl[1]
529 if self._origpl is None:
524 if self._origpl is None:
530 self._origpl = self._pl
525 self._origpl = self._pl
531 nullid = self._nodeconstants.nullid
526 nullid = self._nodeconstants.nullid
532 # True if we need to fold p2 related state back to a linear case
527 # True if we need to fold p2 related state back to a linear case
533 fold_p2 = oldp2 != nullid and p2 == nullid
528 fold_p2 = oldp2 != nullid and p2 == nullid
534 return self._map.setparents(p1, p2, fold_p2=fold_p2)
529 return self._map.setparents(p1, p2, fold_p2=fold_p2)
535
530
536 def setbranch(self, branch):
531 def setbranch(self, branch):
537 self.__class__._branch.set(self, encoding.fromlocal(branch))
532 self.__class__._branch.set(self, encoding.fromlocal(branch))
538 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
533 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
539 try:
534 try:
540 f.write(self._branch + b'\n')
535 f.write(self._branch + b'\n')
541 f.close()
536 f.close()
542
537
543 # make sure filecache has the correct stat info for _branch after
538 # make sure filecache has the correct stat info for _branch after
544 # replacing the underlying file
539 # replacing the underlying file
545 ce = self._filecache[b'_branch']
540 ce = self._filecache[b'_branch']
546 if ce:
541 if ce:
547 ce.refresh()
542 ce.refresh()
548 except: # re-raises
543 except: # re-raises
549 f.discard()
544 f.discard()
550 raise
545 raise
551
546
552 def invalidate(self):
547 def invalidate(self):
553 """Causes the next access to reread the dirstate.
548 """Causes the next access to reread the dirstate.
554
549
555 This is different from localrepo.invalidatedirstate() because it always
550 This is different from localrepo.invalidatedirstate() because it always
556 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
551 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
557 check whether the dirstate has changed before rereading it."""
552 check whether the dirstate has changed before rereading it."""
558
553
559 for a in ("_map", "_branch", "_ignore"):
554 for a in ("_map", "_branch", "_ignore"):
560 if a in self.__dict__:
555 if a in self.__dict__:
561 delattr(self, a)
556 delattr(self, a)
562 self._dirty = False
557 self._dirty = False
563 self._dirty_tracked_set = False
558 self._dirty_tracked_set = False
564 self._invalidated_context = self._changing_level > 0
559 self._invalidated_context = self._changing_level > 0
565 self._origpl = None
560 self._origpl = None
566
561
567 @requires_changing_any
562 @requires_changing_any
568 def copy(self, source, dest):
563 def copy(self, source, dest):
569 """Mark dest as a copy of source. Unmark dest if source is None."""
564 """Mark dest as a copy of source. Unmark dest if source is None."""
570 if source == dest:
565 if source == dest:
571 return
566 return
572 self._dirty = True
567 self._dirty = True
573 if source is not None:
568 if source is not None:
574 self._check_sparse(source)
569 self._check_sparse(source)
575 self._map.copymap[dest] = source
570 self._map.copymap[dest] = source
576 else:
571 else:
577 self._map.copymap.pop(dest, None)
572 self._map.copymap.pop(dest, None)
578
573
579 def copied(self, file):
574 def copied(self, file):
580 return self._map.copymap.get(file, None)
575 return self._map.copymap.get(file, None)
581
576
582 def copies(self):
577 def copies(self):
583 return self._map.copymap
578 return self._map.copymap
584
579
585 @requires_changing_files
580 @requires_changing_files
586 def set_tracked(self, filename, reset_copy=False):
581 def set_tracked(self, filename, reset_copy=False):
587 """a "public" method for generic code to mark a file as tracked
582 """a "public" method for generic code to mark a file as tracked
588
583
589 This function is to be called outside of "update/merge" case. For
584 This function is to be called outside of "update/merge" case. For
590 example by a command like `hg add X`.
585 example by a command like `hg add X`.
591
586
592 if reset_copy is set, any existing copy information will be dropped.
587 if reset_copy is set, any existing copy information will be dropped.
593
588
594 return True the file was previously untracked, False otherwise.
589 return True the file was previously untracked, False otherwise.
595 """
590 """
596 self._dirty = True
591 self._dirty = True
597 entry = self._map.get(filename)
592 entry = self._map.get(filename)
598 if entry is None or not entry.tracked:
593 if entry is None or not entry.tracked:
599 self._check_new_tracked_filename(filename)
594 self._check_new_tracked_filename(filename)
600 pre_tracked = self._map.set_tracked(filename)
595 pre_tracked = self._map.set_tracked(filename)
601 if reset_copy:
596 if reset_copy:
602 self._map.copymap.pop(filename, None)
597 self._map.copymap.pop(filename, None)
603 if pre_tracked:
598 if pre_tracked:
604 self._dirty_tracked_set = True
599 self._dirty_tracked_set = True
605 return pre_tracked
600 return pre_tracked
606
601
607 @requires_changing_files
602 @requires_changing_files
608 def set_untracked(self, filename):
603 def set_untracked(self, filename):
609 """a "public" method for generic code to mark a file as untracked
604 """a "public" method for generic code to mark a file as untracked
610
605
611 This function is to be called outside of "update/merge" case. For
606 This function is to be called outside of "update/merge" case. For
612 example by a command like `hg remove X`.
607 example by a command like `hg remove X`.
613
608
614 return True the file was previously tracked, False otherwise.
609 return True the file was previously tracked, False otherwise.
615 """
610 """
616 ret = self._map.set_untracked(filename)
611 ret = self._map.set_untracked(filename)
617 if ret:
612 if ret:
618 self._dirty = True
613 self._dirty = True
619 self._dirty_tracked_set = True
614 self._dirty_tracked_set = True
620 return ret
615 return ret
621
616
622 @requires_not_changing_parents
617 @requires_not_changing_parents
623 def set_clean(self, filename, parentfiledata):
618 def set_clean(self, filename, parentfiledata):
624 """record that the current state of the file on disk is known to be clean"""
619 """record that the current state of the file on disk is known to be clean"""
625 self._dirty = True
620 self._dirty = True
626 if not self._map[filename].tracked:
621 if not self._map[filename].tracked:
627 self._check_new_tracked_filename(filename)
622 self._check_new_tracked_filename(filename)
628 (mode, size, mtime) = parentfiledata
623 (mode, size, mtime) = parentfiledata
629 self._map.set_clean(filename, mode, size, mtime)
624 self._map.set_clean(filename, mode, size, mtime)
630
625
631 @requires_not_changing_parents
626 @requires_not_changing_parents
632 def set_possibly_dirty(self, filename):
627 def set_possibly_dirty(self, filename):
633 """record that the current state of the file on disk is unknown"""
628 """record that the current state of the file on disk is unknown"""
634 self._dirty = True
629 self._dirty = True
635 self._map.set_possibly_dirty(filename)
630 self._map.set_possibly_dirty(filename)
636
631
637 @requires_changing_parents
632 @requires_changing_parents
638 def update_file_p1(
633 def update_file_p1(
639 self,
634 self,
640 filename,
635 filename,
641 p1_tracked,
636 p1_tracked,
642 ):
637 ):
643 """Set a file as tracked in the parent (or not)
638 """Set a file as tracked in the parent (or not)
644
639
645 This is to be called when adjust the dirstate to a new parent after an history
640 This is to be called when adjust the dirstate to a new parent after an history
646 rewriting operation.
641 rewriting operation.
647
642
648 It should not be called during a merge (p2 != nullid) and only within
643 It should not be called during a merge (p2 != nullid) and only within
649 a `with dirstate.changing_parents(repo):` context.
644 a `with dirstate.changing_parents(repo):` context.
650 """
645 """
651 if self.in_merge:
646 if self.in_merge:
652 msg = b'update_file_reference should not be called when merging'
647 msg = b'update_file_reference should not be called when merging'
653 raise error.ProgrammingError(msg)
648 raise error.ProgrammingError(msg)
654 entry = self._map.get(filename)
649 entry = self._map.get(filename)
655 if entry is None:
650 if entry is None:
656 wc_tracked = False
651 wc_tracked = False
657 else:
652 else:
658 wc_tracked = entry.tracked
653 wc_tracked = entry.tracked
659 if not (p1_tracked or wc_tracked):
654 if not (p1_tracked or wc_tracked):
660 # the file is no longer relevant to anyone
655 # the file is no longer relevant to anyone
661 if self._map.get(filename) is not None:
656 if self._map.get(filename) is not None:
662 self._map.reset_state(filename)
657 self._map.reset_state(filename)
663 self._dirty = True
658 self._dirty = True
664 elif (not p1_tracked) and wc_tracked:
659 elif (not p1_tracked) and wc_tracked:
665 if entry is not None and entry.added:
660 if entry is not None and entry.added:
666 return # avoid dropping copy information (maybe?)
661 return # avoid dropping copy information (maybe?)
667
662
668 self._map.reset_state(
663 self._map.reset_state(
669 filename,
664 filename,
670 wc_tracked,
665 wc_tracked,
671 p1_tracked,
666 p1_tracked,
672 # the underlying reference might have changed, we will have to
667 # the underlying reference might have changed, we will have to
673 # check it.
668 # check it.
674 has_meaningful_mtime=False,
669 has_meaningful_mtime=False,
675 )
670 )
676
671
677 @requires_changing_parents
672 @requires_changing_parents
678 def update_file(
673 def update_file(
679 self,
674 self,
680 filename,
675 filename,
681 wc_tracked,
676 wc_tracked,
682 p1_tracked,
677 p1_tracked,
683 p2_info=False,
678 p2_info=False,
684 possibly_dirty=False,
679 possibly_dirty=False,
685 parentfiledata=None,
680 parentfiledata=None,
686 ):
681 ):
687 """update the information about a file in the dirstate
682 """update the information about a file in the dirstate
688
683
689 This is to be called when the direstates parent changes to keep track
684 This is to be called when the direstates parent changes to keep track
690 of what is the file situation in regards to the working copy and its parent.
685 of what is the file situation in regards to the working copy and its parent.
691
686
692 This function must be called within a `dirstate.changing_parents` context.
687 This function must be called within a `dirstate.changing_parents` context.
693
688
694 note: the API is at an early stage and we might need to adjust it
689 note: the API is at an early stage and we might need to adjust it
695 depending of what information ends up being relevant and useful to
690 depending of what information ends up being relevant and useful to
696 other processing.
691 other processing.
697 """
692 """
698 self._update_file(
693 self._update_file(
699 filename=filename,
694 filename=filename,
700 wc_tracked=wc_tracked,
695 wc_tracked=wc_tracked,
701 p1_tracked=p1_tracked,
696 p1_tracked=p1_tracked,
702 p2_info=p2_info,
697 p2_info=p2_info,
703 possibly_dirty=possibly_dirty,
698 possibly_dirty=possibly_dirty,
704 parentfiledata=parentfiledata,
699 parentfiledata=parentfiledata,
705 )
700 )
706
701
707 # XXX since this make the dirstate dirty, we should enforce that it is done
702 # XXX since this make the dirstate dirty, we should enforce that it is done
708 # withing an appropriate change-context that scope the change and ensure it
703 # withing an appropriate change-context that scope the change and ensure it
709 # eventually get written on disk (or rolled back)
704 # eventually get written on disk (or rolled back)
710 def hacky_extension_update_file(self, *args, **kwargs):
705 def hacky_extension_update_file(self, *args, **kwargs):
711 """NEVER USE THIS, YOU DO NOT NEED IT
706 """NEVER USE THIS, YOU DO NOT NEED IT
712
707
713 This function is a variant of "update_file" to be called by a small set
708 This function is a variant of "update_file" to be called by a small set
714 of extensions, it also adjust the internal state of file, but can be
709 of extensions, it also adjust the internal state of file, but can be
715 called outside an `changing_parents` context.
710 called outside an `changing_parents` context.
716
711
717 A very small number of extension meddle with the working copy content
712 A very small number of extension meddle with the working copy content
718 in a way that requires to adjust the dirstate accordingly. At the time
713 in a way that requires to adjust the dirstate accordingly. At the time
719 this command is written they are :
714 this command is written they are :
720 - keyword,
715 - keyword,
721 - largefile,
716 - largefile,
722 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
717 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
723
718
724 This function could probably be replaced by more semantic one (like
719 This function could probably be replaced by more semantic one (like
725 "adjust expected size" or "always revalidate file content", etc)
720 "adjust expected size" or "always revalidate file content", etc)
726 however at the time where this is writen, this is too much of a detour
721 however at the time where this is writen, this is too much of a detour
727 to be considered.
722 to be considered.
728 """
723 """
729 self._update_file(
724 self._update_file(
730 *args,
725 *args,
731 **kwargs,
726 **kwargs,
732 )
727 )
733
728
734 def _update_file(
729 def _update_file(
735 self,
730 self,
736 filename,
731 filename,
737 wc_tracked,
732 wc_tracked,
738 p1_tracked,
733 p1_tracked,
739 p2_info=False,
734 p2_info=False,
740 possibly_dirty=False,
735 possibly_dirty=False,
741 parentfiledata=None,
736 parentfiledata=None,
742 ):
737 ):
743
738
744 # note: I do not think we need to double check name clash here since we
739 # note: I do not think we need to double check name clash here since we
745 # are in a update/merge case that should already have taken care of
740 # are in a update/merge case that should already have taken care of
746 # this. The test agrees
741 # this. The test agrees
747
742
748 self._dirty = True
743 self._dirty = True
749 old_entry = self._map.get(filename)
744 old_entry = self._map.get(filename)
750 if old_entry is None:
745 if old_entry is None:
751 prev_tracked = False
746 prev_tracked = False
752 else:
747 else:
753 prev_tracked = old_entry.tracked
748 prev_tracked = old_entry.tracked
754 if prev_tracked != wc_tracked:
749 if prev_tracked != wc_tracked:
755 self._dirty_tracked_set = True
750 self._dirty_tracked_set = True
756
751
757 self._map.reset_state(
752 self._map.reset_state(
758 filename,
753 filename,
759 wc_tracked,
754 wc_tracked,
760 p1_tracked,
755 p1_tracked,
761 p2_info=p2_info,
756 p2_info=p2_info,
762 has_meaningful_mtime=not possibly_dirty,
757 has_meaningful_mtime=not possibly_dirty,
763 parentfiledata=parentfiledata,
758 parentfiledata=parentfiledata,
764 )
759 )
765
760
766 def _check_new_tracked_filename(self, filename):
761 def _check_new_tracked_filename(self, filename):
767 scmutil.checkfilename(filename)
762 scmutil.checkfilename(filename)
768 if self._map.hastrackeddir(filename):
763 if self._map.hastrackeddir(filename):
769 msg = _(b'directory %r already in dirstate')
764 msg = _(b'directory %r already in dirstate')
770 msg %= pycompat.bytestr(filename)
765 msg %= pycompat.bytestr(filename)
771 raise error.Abort(msg)
766 raise error.Abort(msg)
772 # shadows
767 # shadows
773 for d in pathutil.finddirs(filename):
768 for d in pathutil.finddirs(filename):
774 if self._map.hastrackeddir(d):
769 if self._map.hastrackeddir(d):
775 break
770 break
776 entry = self._map.get(d)
771 entry = self._map.get(d)
777 if entry is not None and not entry.removed:
772 if entry is not None and not entry.removed:
778 msg = _(b'file %r in dirstate clashes with %r')
773 msg = _(b'file %r in dirstate clashes with %r')
779 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
774 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
780 raise error.Abort(msg)
775 raise error.Abort(msg)
781 self._check_sparse(filename)
776 self._check_sparse(filename)
782
777
783 def _check_sparse(self, filename):
778 def _check_sparse(self, filename):
784 """Check that a filename is inside the sparse profile"""
779 """Check that a filename is inside the sparse profile"""
785 sparsematch = self._sparsematcher
780 sparsematch = self._sparsematcher
786 if sparsematch is not None and not sparsematch.always():
781 if sparsematch is not None and not sparsematch.always():
787 if not sparsematch(filename):
782 if not sparsematch(filename):
788 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
783 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
789 hint = _(
784 hint = _(
790 b'include file with `hg debugsparse --include <pattern>` or use '
785 b'include file with `hg debugsparse --include <pattern>` or use '
791 b'`hg add -s <file>` to include file directory while adding'
786 b'`hg add -s <file>` to include file directory while adding'
792 )
787 )
793 raise error.Abort(msg % filename, hint=hint)
788 raise error.Abort(msg % filename, hint=hint)
794
789
795 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
790 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
796 if exists is None:
791 if exists is None:
797 exists = os.path.lexists(os.path.join(self._root, path))
792 exists = os.path.lexists(os.path.join(self._root, path))
798 if not exists:
793 if not exists:
799 # Maybe a path component exists
794 # Maybe a path component exists
800 if not ignoremissing and b'/' in path:
795 if not ignoremissing and b'/' in path:
801 d, f = path.rsplit(b'/', 1)
796 d, f = path.rsplit(b'/', 1)
802 d = self._normalize(d, False, ignoremissing, None)
797 d = self._normalize(d, False, ignoremissing, None)
803 folded = d + b"/" + f
798 folded = d + b"/" + f
804 else:
799 else:
805 # No path components, preserve original case
800 # No path components, preserve original case
806 folded = path
801 folded = path
807 else:
802 else:
808 # recursively normalize leading directory components
803 # recursively normalize leading directory components
809 # against dirstate
804 # against dirstate
810 if b'/' in normed:
805 if b'/' in normed:
811 d, f = normed.rsplit(b'/', 1)
806 d, f = normed.rsplit(b'/', 1)
812 d = self._normalize(d, False, ignoremissing, True)
807 d = self._normalize(d, False, ignoremissing, True)
813 r = self._root + b"/" + d
808 r = self._root + b"/" + d
814 folded = d + b"/" + util.fspath(f, r)
809 folded = d + b"/" + util.fspath(f, r)
815 else:
810 else:
816 folded = util.fspath(normed, self._root)
811 folded = util.fspath(normed, self._root)
817 storemap[normed] = folded
812 storemap[normed] = folded
818
813
819 return folded
814 return folded
820
815
821 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
816 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
822 normed = util.normcase(path)
817 normed = util.normcase(path)
823 folded = self._map.filefoldmap.get(normed, None)
818 folded = self._map.filefoldmap.get(normed, None)
824 if folded is None:
819 if folded is None:
825 if isknown:
820 if isknown:
826 folded = path
821 folded = path
827 else:
822 else:
828 folded = self._discoverpath(
823 folded = self._discoverpath(
829 path, normed, ignoremissing, exists, self._map.filefoldmap
824 path, normed, ignoremissing, exists, self._map.filefoldmap
830 )
825 )
831 return folded
826 return folded
832
827
833 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
828 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
834 normed = util.normcase(path)
829 normed = util.normcase(path)
835 folded = self._map.filefoldmap.get(normed, None)
830 folded = self._map.filefoldmap.get(normed, None)
836 if folded is None:
831 if folded is None:
837 folded = self._map.dirfoldmap.get(normed, None)
832 folded = self._map.dirfoldmap.get(normed, None)
838 if folded is None:
833 if folded is None:
839 if isknown:
834 if isknown:
840 folded = path
835 folded = path
841 else:
836 else:
842 # store discovered result in dirfoldmap so that future
837 # store discovered result in dirfoldmap so that future
843 # normalizefile calls don't start matching directories
838 # normalizefile calls don't start matching directories
844 folded = self._discoverpath(
839 folded = self._discoverpath(
845 path, normed, ignoremissing, exists, self._map.dirfoldmap
840 path, normed, ignoremissing, exists, self._map.dirfoldmap
846 )
841 )
847 return folded
842 return folded
848
843
849 def normalize(self, path, isknown=False, ignoremissing=False):
844 def normalize(self, path, isknown=False, ignoremissing=False):
850 """
845 """
851 normalize the case of a pathname when on a casefolding filesystem
846 normalize the case of a pathname when on a casefolding filesystem
852
847
853 isknown specifies whether the filename came from walking the
848 isknown specifies whether the filename came from walking the
854 disk, to avoid extra filesystem access.
849 disk, to avoid extra filesystem access.
855
850
856 If ignoremissing is True, missing path are returned
851 If ignoremissing is True, missing path are returned
857 unchanged. Otherwise, we try harder to normalize possibly
852 unchanged. Otherwise, we try harder to normalize possibly
858 existing path components.
853 existing path components.
859
854
860 The normalized case is determined based on the following precedence:
855 The normalized case is determined based on the following precedence:
861
856
862 - version of name already stored in the dirstate
857 - version of name already stored in the dirstate
863 - version of name stored on disk
858 - version of name stored on disk
864 - version provided via command arguments
859 - version provided via command arguments
865 """
860 """
866
861
867 if self._checkcase:
862 if self._checkcase:
868 return self._normalize(path, isknown, ignoremissing)
863 return self._normalize(path, isknown, ignoremissing)
869 return path
864 return path
870
865
871 # XXX this method is barely used, as a result:
866 # XXX this method is barely used, as a result:
872 # - its semantic is unclear
867 # - its semantic is unclear
873 # - do we really needs it ?
868 # - do we really needs it ?
874 @requires_changing_parents
869 @requires_changing_parents
875 def clear(self):
870 def clear(self):
876 self._map.clear()
871 self._map.clear()
877 self._dirty = True
872 self._dirty = True
878
873
879 @requires_changing_parents
874 @requires_changing_parents
880 def rebuild(self, parent, allfiles, changedfiles=None):
875 def rebuild(self, parent, allfiles, changedfiles=None):
881 matcher = self._sparsematcher
876 matcher = self._sparsematcher
882 if matcher is not None and not matcher.always():
877 if matcher is not None and not matcher.always():
883 # should not add non-matching files
878 # should not add non-matching files
884 allfiles = [f for f in allfiles if matcher(f)]
879 allfiles = [f for f in allfiles if matcher(f)]
885 if changedfiles:
880 if changedfiles:
886 changedfiles = [f for f in changedfiles if matcher(f)]
881 changedfiles = [f for f in changedfiles if matcher(f)]
887
882
888 if changedfiles is not None:
883 if changedfiles is not None:
889 # these files will be deleted from the dirstate when they are
884 # these files will be deleted from the dirstate when they are
890 # not found to be in allfiles
885 # not found to be in allfiles
891 dirstatefilestoremove = {f for f in self if not matcher(f)}
886 dirstatefilestoremove = {f for f in self if not matcher(f)}
892 changedfiles = dirstatefilestoremove.union(changedfiles)
887 changedfiles = dirstatefilestoremove.union(changedfiles)
893
888
894 if changedfiles is None:
889 if changedfiles is None:
895 # Rebuild entire dirstate
890 # Rebuild entire dirstate
896 to_lookup = allfiles
891 to_lookup = allfiles
897 to_drop = []
892 to_drop = []
898 self.clear()
893 self.clear()
899 elif len(changedfiles) < 10:
894 elif len(changedfiles) < 10:
900 # Avoid turning allfiles into a set, which can be expensive if it's
895 # Avoid turning allfiles into a set, which can be expensive if it's
901 # large.
896 # large.
902 to_lookup = []
897 to_lookup = []
903 to_drop = []
898 to_drop = []
904 for f in changedfiles:
899 for f in changedfiles:
905 if f in allfiles:
900 if f in allfiles:
906 to_lookup.append(f)
901 to_lookup.append(f)
907 else:
902 else:
908 to_drop.append(f)
903 to_drop.append(f)
909 else:
904 else:
910 changedfilesset = set(changedfiles)
905 changedfilesset = set(changedfiles)
911 to_lookup = changedfilesset & set(allfiles)
906 to_lookup = changedfilesset & set(allfiles)
912 to_drop = changedfilesset - to_lookup
907 to_drop = changedfilesset - to_lookup
913
908
914 if self._origpl is None:
909 if self._origpl is None:
915 self._origpl = self._pl
910 self._origpl = self._pl
916 self._map.setparents(parent, self._nodeconstants.nullid)
911 self._map.setparents(parent, self._nodeconstants.nullid)
917
912
918 for f in to_lookup:
913 for f in to_lookup:
919 if self.in_merge:
914 if self.in_merge:
920 self.set_tracked(f)
915 self.set_tracked(f)
921 else:
916 else:
922 self._map.reset_state(
917 self._map.reset_state(
923 f,
918 f,
924 wc_tracked=True,
919 wc_tracked=True,
925 p1_tracked=True,
920 p1_tracked=True,
926 )
921 )
927 for f in to_drop:
922 for f in to_drop:
928 self._map.reset_state(f)
923 self._map.reset_state(f)
929
924
930 self._dirty = True
925 self._dirty = True
931
926
932 def identity(self):
927 def identity(self):
933 """Return identity of dirstate itself to detect changing in storage
928 """Return identity of dirstate itself to detect changing in storage
934
929
935 If identity of previous dirstate is equal to this, writing
930 If identity of previous dirstate is equal to this, writing
936 changes based on the former dirstate out can keep consistency.
931 changes based on the former dirstate out can keep consistency.
937 """
932 """
938 return self._map.identity
933 return self._map.identity
939
934
940 def write(self, tr):
935 def write(self, tr):
941 if not self._dirty:
936 if not self._dirty:
942 return
937 return
943
938
944 write_key = self._use_tracked_hint and self._dirty_tracked_set
939 write_key = self._use_tracked_hint and self._dirty_tracked_set
945 if tr:
940 if tr:
946 # make sure we invalidate the current change on abort
941 # make sure we invalidate the current change on abort
947 if tr is not None:
942 if tr is not None:
948 tr.addabort(
943 tr.addabort(
949 b'dirstate-invalidate',
944 b'dirstate-invalidate',
950 lambda tr: self.invalidate(),
945 lambda tr: self.invalidate(),
951 )
946 )
952 # delay writing in-memory changes out
947 # delay writing in-memory changes out
953 tr.addfilegenerator(
948 tr.addfilegenerator(
954 b'dirstate-1-main',
949 b'dirstate-1-main',
955 (self._filename,),
950 (self._filename,),
956 lambda f: self._writedirstate(tr, f),
951 lambda f: self._writedirstate(tr, f),
957 location=b'plain',
952 location=b'plain',
958 post_finalize=True,
953 post_finalize=True,
959 )
954 )
960 if write_key:
955 if write_key:
961 tr.addfilegenerator(
956 tr.addfilegenerator(
962 b'dirstate-2-key-post',
957 b'dirstate-2-key-post',
963 (self._filename_th,),
958 (self._filename_th,),
964 lambda f: self._write_tracked_hint(tr, f),
959 lambda f: self._write_tracked_hint(tr, f),
965 location=b'plain',
960 location=b'plain',
966 post_finalize=True,
961 post_finalize=True,
967 )
962 )
968 return
963 return
969
964
970 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
965 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
971 with file(self._filename) as f:
966 with file(self._filename) as f:
972 self._writedirstate(tr, f)
967 self._writedirstate(tr, f)
973 if write_key:
968 if write_key:
974 # we update the key-file after writing to make sure reader have a
969 # we update the key-file after writing to make sure reader have a
975 # key that match the newly written content
970 # key that match the newly written content
976 with file(self._filename_th) as f:
971 with file(self._filename_th) as f:
977 self._write_tracked_hint(tr, f)
972 self._write_tracked_hint(tr, f)
978
973
979 def delete_tracked_hint(self):
974 def delete_tracked_hint(self):
980 """remove the tracked_hint file
975 """remove the tracked_hint file
981
976
982 To be used by format downgrades operation"""
977 To be used by format downgrades operation"""
983 self._opener.unlink(self._filename_th)
978 self._opener.unlink(self._filename_th)
984 self._use_tracked_hint = False
979 self._use_tracked_hint = False
985
980
986 def addparentchangecallback(self, category, callback):
981 def addparentchangecallback(self, category, callback):
987 """add a callback to be called when the wd parents are changed
982 """add a callback to be called when the wd parents are changed
988
983
989 Callback will be called with the following arguments:
984 Callback will be called with the following arguments:
990 dirstate, (oldp1, oldp2), (newp1, newp2)
985 dirstate, (oldp1, oldp2), (newp1, newp2)
991
986
992 Category is a unique identifier to allow overwriting an old callback
987 Category is a unique identifier to allow overwriting an old callback
993 with a newer callback.
988 with a newer callback.
994 """
989 """
995 self._plchangecallbacks[category] = callback
990 self._plchangecallbacks[category] = callback
996
991
997 def _writedirstate(self, tr, st):
992 def _writedirstate(self, tr, st):
998 # notify callbacks about parents change
993 # notify callbacks about parents change
999 if self._origpl is not None and self._origpl != self._pl:
994 if self._origpl is not None and self._origpl != self._pl:
1000 for c, callback in sorted(self._plchangecallbacks.items()):
995 for c, callback in sorted(self._plchangecallbacks.items()):
1001 callback(self, self._origpl, self._pl)
996 callback(self, self._origpl, self._pl)
1002 self._origpl = None
997 self._origpl = None
1003 self._map.write(tr, st)
998 self._map.write(tr, st)
1004 self._dirty = False
999 self._dirty = False
1005 self._dirty_tracked_set = False
1000 self._dirty_tracked_set = False
1006
1001
1007 def _write_tracked_hint(self, tr, f):
1002 def _write_tracked_hint(self, tr, f):
1008 key = node.hex(uuid.uuid4().bytes)
1003 key = node.hex(uuid.uuid4().bytes)
1009 f.write(b"1\n%s\n" % key) # 1 is the format version
1004 f.write(b"1\n%s\n" % key) # 1 is the format version
1010
1005
1011 def _dirignore(self, f):
1006 def _dirignore(self, f):
1012 if self._ignore(f):
1007 if self._ignore(f):
1013 return True
1008 return True
1014 for p in pathutil.finddirs(f):
1009 for p in pathutil.finddirs(f):
1015 if self._ignore(p):
1010 if self._ignore(p):
1016 return True
1011 return True
1017 return False
1012 return False
1018
1013
1019 def _ignorefiles(self):
1014 def _ignorefiles(self):
1020 files = []
1015 files = []
1021 if os.path.exists(self._join(b'.hgignore')):
1016 if os.path.exists(self._join(b'.hgignore')):
1022 files.append(self._join(b'.hgignore'))
1017 files.append(self._join(b'.hgignore'))
1023 for name, path in self._ui.configitems(b"ui"):
1018 for name, path in self._ui.configitems(b"ui"):
1024 if name == b'ignore' or name.startswith(b'ignore.'):
1019 if name == b'ignore' or name.startswith(b'ignore.'):
1025 # we need to use os.path.join here rather than self._join
1020 # we need to use os.path.join here rather than self._join
1026 # because path is arbitrary and user-specified
1021 # because path is arbitrary and user-specified
1027 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1022 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1028 return files
1023 return files
1029
1024
1030 def _ignorefileandline(self, f):
1025 def _ignorefileandline(self, f):
1031 files = collections.deque(self._ignorefiles())
1026 files = collections.deque(self._ignorefiles())
1032 visited = set()
1027 visited = set()
1033 while files:
1028 while files:
1034 i = files.popleft()
1029 i = files.popleft()
1035 patterns = matchmod.readpatternfile(
1030 patterns = matchmod.readpatternfile(
1036 i, self._ui.warn, sourceinfo=True
1031 i, self._ui.warn, sourceinfo=True
1037 )
1032 )
1038 for pattern, lineno, line in patterns:
1033 for pattern, lineno, line in patterns:
1039 kind, p = matchmod._patsplit(pattern, b'glob')
1034 kind, p = matchmod._patsplit(pattern, b'glob')
1040 if kind == b"subinclude":
1035 if kind == b"subinclude":
1041 if p not in visited:
1036 if p not in visited:
1042 files.append(p)
1037 files.append(p)
1043 continue
1038 continue
1044 m = matchmod.match(
1039 m = matchmod.match(
1045 self._root, b'', [], [pattern], warn=self._ui.warn
1040 self._root, b'', [], [pattern], warn=self._ui.warn
1046 )
1041 )
1047 if m(f):
1042 if m(f):
1048 return (i, lineno, line)
1043 return (i, lineno, line)
1049 visited.add(i)
1044 visited.add(i)
1050 return (None, -1, b"")
1045 return (None, -1, b"")
1051
1046
1052 def _walkexplicit(self, match, subrepos):
1047 def _walkexplicit(self, match, subrepos):
1053 """Get stat data about the files explicitly specified by match.
1048 """Get stat data about the files explicitly specified by match.
1054
1049
1055 Return a triple (results, dirsfound, dirsnotfound).
1050 Return a triple (results, dirsfound, dirsnotfound).
1056 - results is a mapping from filename to stat result. It also contains
1051 - results is a mapping from filename to stat result. It also contains
1057 listings mapping subrepos and .hg to None.
1052 listings mapping subrepos and .hg to None.
1058 - dirsfound is a list of files found to be directories.
1053 - dirsfound is a list of files found to be directories.
1059 - dirsnotfound is a list of files that the dirstate thinks are
1054 - dirsnotfound is a list of files that the dirstate thinks are
1060 directories and that were not found."""
1055 directories and that were not found."""
1061
1056
1062 def badtype(mode):
1057 def badtype(mode):
1063 kind = _(b'unknown')
1058 kind = _(b'unknown')
1064 if stat.S_ISCHR(mode):
1059 if stat.S_ISCHR(mode):
1065 kind = _(b'character device')
1060 kind = _(b'character device')
1066 elif stat.S_ISBLK(mode):
1061 elif stat.S_ISBLK(mode):
1067 kind = _(b'block device')
1062 kind = _(b'block device')
1068 elif stat.S_ISFIFO(mode):
1063 elif stat.S_ISFIFO(mode):
1069 kind = _(b'fifo')
1064 kind = _(b'fifo')
1070 elif stat.S_ISSOCK(mode):
1065 elif stat.S_ISSOCK(mode):
1071 kind = _(b'socket')
1066 kind = _(b'socket')
1072 elif stat.S_ISDIR(mode):
1067 elif stat.S_ISDIR(mode):
1073 kind = _(b'directory')
1068 kind = _(b'directory')
1074 return _(b'unsupported file type (type is %s)') % kind
1069 return _(b'unsupported file type (type is %s)') % kind
1075
1070
1076 badfn = match.bad
1071 badfn = match.bad
1077 dmap = self._map
1072 dmap = self._map
1078 lstat = os.lstat
1073 lstat = os.lstat
1079 getkind = stat.S_IFMT
1074 getkind = stat.S_IFMT
1080 dirkind = stat.S_IFDIR
1075 dirkind = stat.S_IFDIR
1081 regkind = stat.S_IFREG
1076 regkind = stat.S_IFREG
1082 lnkkind = stat.S_IFLNK
1077 lnkkind = stat.S_IFLNK
1083 join = self._join
1078 join = self._join
1084 dirsfound = []
1079 dirsfound = []
1085 foundadd = dirsfound.append
1080 foundadd = dirsfound.append
1086 dirsnotfound = []
1081 dirsnotfound = []
1087 notfoundadd = dirsnotfound.append
1082 notfoundadd = dirsnotfound.append
1088
1083
1089 if not match.isexact() and self._checkcase:
1084 if not match.isexact() and self._checkcase:
1090 normalize = self._normalize
1085 normalize = self._normalize
1091 else:
1086 else:
1092 normalize = None
1087 normalize = None
1093
1088
1094 files = sorted(match.files())
1089 files = sorted(match.files())
1095 subrepos.sort()
1090 subrepos.sort()
1096 i, j = 0, 0
1091 i, j = 0, 0
1097 while i < len(files) and j < len(subrepos):
1092 while i < len(files) and j < len(subrepos):
1098 subpath = subrepos[j] + b"/"
1093 subpath = subrepos[j] + b"/"
1099 if files[i] < subpath:
1094 if files[i] < subpath:
1100 i += 1
1095 i += 1
1101 continue
1096 continue
1102 while i < len(files) and files[i].startswith(subpath):
1097 while i < len(files) and files[i].startswith(subpath):
1103 del files[i]
1098 del files[i]
1104 j += 1
1099 j += 1
1105
1100
1106 if not files or b'' in files:
1101 if not files or b'' in files:
1107 files = [b'']
1102 files = [b'']
1108 # constructing the foldmap is expensive, so don't do it for the
1103 # constructing the foldmap is expensive, so don't do it for the
1109 # common case where files is ['']
1104 # common case where files is ['']
1110 normalize = None
1105 normalize = None
1111 results = dict.fromkeys(subrepos)
1106 results = dict.fromkeys(subrepos)
1112 results[b'.hg'] = None
1107 results[b'.hg'] = None
1113
1108
1114 for ff in files:
1109 for ff in files:
1115 if normalize:
1110 if normalize:
1116 nf = normalize(ff, False, True)
1111 nf = normalize(ff, False, True)
1117 else:
1112 else:
1118 nf = ff
1113 nf = ff
1119 if nf in results:
1114 if nf in results:
1120 continue
1115 continue
1121
1116
1122 try:
1117 try:
1123 st = lstat(join(nf))
1118 st = lstat(join(nf))
1124 kind = getkind(st.st_mode)
1119 kind = getkind(st.st_mode)
1125 if kind == dirkind:
1120 if kind == dirkind:
1126 if nf in dmap:
1121 if nf in dmap:
1127 # file replaced by dir on disk but still in dirstate
1122 # file replaced by dir on disk but still in dirstate
1128 results[nf] = None
1123 results[nf] = None
1129 foundadd((nf, ff))
1124 foundadd((nf, ff))
1130 elif kind == regkind or kind == lnkkind:
1125 elif kind == regkind or kind == lnkkind:
1131 results[nf] = st
1126 results[nf] = st
1132 else:
1127 else:
1133 badfn(ff, badtype(kind))
1128 badfn(ff, badtype(kind))
1134 if nf in dmap:
1129 if nf in dmap:
1135 results[nf] = None
1130 results[nf] = None
1136 except (OSError) as inst:
1131 except (OSError) as inst:
1137 # nf not found on disk - it is dirstate only
1132 # nf not found on disk - it is dirstate only
1138 if nf in dmap: # does it exactly match a missing file?
1133 if nf in dmap: # does it exactly match a missing file?
1139 results[nf] = None
1134 results[nf] = None
1140 else: # does it match a missing directory?
1135 else: # does it match a missing directory?
1141 if self._map.hasdir(nf):
1136 if self._map.hasdir(nf):
1142 notfoundadd(nf)
1137 notfoundadd(nf)
1143 else:
1138 else:
1144 badfn(ff, encoding.strtolocal(inst.strerror))
1139 badfn(ff, encoding.strtolocal(inst.strerror))
1145
1140
1146 # match.files() may contain explicitly-specified paths that shouldn't
1141 # match.files() may contain explicitly-specified paths that shouldn't
1147 # be taken; drop them from the list of files found. dirsfound/notfound
1142 # be taken; drop them from the list of files found. dirsfound/notfound
1148 # aren't filtered here because they will be tested later.
1143 # aren't filtered here because they will be tested later.
1149 if match.anypats():
1144 if match.anypats():
1150 for f in list(results):
1145 for f in list(results):
1151 if f == b'.hg' or f in subrepos:
1146 if f == b'.hg' or f in subrepos:
1152 # keep sentinel to disable further out-of-repo walks
1147 # keep sentinel to disable further out-of-repo walks
1153 continue
1148 continue
1154 if not match(f):
1149 if not match(f):
1155 del results[f]
1150 del results[f]
1156
1151
1157 # Case insensitive filesystems cannot rely on lstat() failing to detect
1152 # Case insensitive filesystems cannot rely on lstat() failing to detect
1158 # a case-only rename. Prune the stat object for any file that does not
1153 # a case-only rename. Prune the stat object for any file that does not
1159 # match the case in the filesystem, if there are multiple files that
1154 # match the case in the filesystem, if there are multiple files that
1160 # normalize to the same path.
1155 # normalize to the same path.
1161 if match.isexact() and self._checkcase:
1156 if match.isexact() and self._checkcase:
1162 normed = {}
1157 normed = {}
1163
1158
1164 for f, st in results.items():
1159 for f, st in results.items():
1165 if st is None:
1160 if st is None:
1166 continue
1161 continue
1167
1162
1168 nc = util.normcase(f)
1163 nc = util.normcase(f)
1169 paths = normed.get(nc)
1164 paths = normed.get(nc)
1170
1165
1171 if paths is None:
1166 if paths is None:
1172 paths = set()
1167 paths = set()
1173 normed[nc] = paths
1168 normed[nc] = paths
1174
1169
1175 paths.add(f)
1170 paths.add(f)
1176
1171
1177 for norm, paths in normed.items():
1172 for norm, paths in normed.items():
1178 if len(paths) > 1:
1173 if len(paths) > 1:
1179 for path in paths:
1174 for path in paths:
1180 folded = self._discoverpath(
1175 folded = self._discoverpath(
1181 path, norm, True, None, self._map.dirfoldmap
1176 path, norm, True, None, self._map.dirfoldmap
1182 )
1177 )
1183 if path != folded:
1178 if path != folded:
1184 results[path] = None
1179 results[path] = None
1185
1180
1186 return results, dirsfound, dirsnotfound
1181 return results, dirsfound, dirsnotfound
1187
1182
1188 def walk(self, match, subrepos, unknown, ignored, full=True):
1183 def walk(self, match, subrepos, unknown, ignored, full=True):
1189 """
1184 """
1190 Walk recursively through the directory tree, finding all files
1185 Walk recursively through the directory tree, finding all files
1191 matched by match.
1186 matched by match.
1192
1187
1193 If full is False, maybe skip some known-clean files.
1188 If full is False, maybe skip some known-clean files.
1194
1189
1195 Return a dict mapping filename to stat-like object (either
1190 Return a dict mapping filename to stat-like object (either
1196 mercurial.osutil.stat instance or return value of os.stat()).
1191 mercurial.osutil.stat instance or return value of os.stat()).
1197
1192
1198 """
1193 """
1199 # full is a flag that extensions that hook into walk can use -- this
1194 # full is a flag that extensions that hook into walk can use -- this
1200 # implementation doesn't use it at all. This satisfies the contract
1195 # implementation doesn't use it at all. This satisfies the contract
1201 # because we only guarantee a "maybe".
1196 # because we only guarantee a "maybe".
1202
1197
1203 if ignored:
1198 if ignored:
1204 ignore = util.never
1199 ignore = util.never
1205 dirignore = util.never
1200 dirignore = util.never
1206 elif unknown:
1201 elif unknown:
1207 ignore = self._ignore
1202 ignore = self._ignore
1208 dirignore = self._dirignore
1203 dirignore = self._dirignore
1209 else:
1204 else:
1210 # if not unknown and not ignored, drop dir recursion and step 2
1205 # if not unknown and not ignored, drop dir recursion and step 2
1211 ignore = util.always
1206 ignore = util.always
1212 dirignore = util.always
1207 dirignore = util.always
1213
1208
1214 if self._sparsematchfn is not None:
1209 if self._sparsematchfn is not None:
1215 em = matchmod.exact(match.files())
1210 em = matchmod.exact(match.files())
1216 sm = matchmod.unionmatcher([self._sparsematcher, em])
1211 sm = matchmod.unionmatcher([self._sparsematcher, em])
1217 match = matchmod.intersectmatchers(match, sm)
1212 match = matchmod.intersectmatchers(match, sm)
1218
1213
1219 matchfn = match.matchfn
1214 matchfn = match.matchfn
1220 matchalways = match.always()
1215 matchalways = match.always()
1221 matchtdir = match.traversedir
1216 matchtdir = match.traversedir
1222 dmap = self._map
1217 dmap = self._map
1223 listdir = util.listdir
1218 listdir = util.listdir
1224 lstat = os.lstat
1219 lstat = os.lstat
1225 dirkind = stat.S_IFDIR
1220 dirkind = stat.S_IFDIR
1226 regkind = stat.S_IFREG
1221 regkind = stat.S_IFREG
1227 lnkkind = stat.S_IFLNK
1222 lnkkind = stat.S_IFLNK
1228 join = self._join
1223 join = self._join
1229
1224
1230 exact = skipstep3 = False
1225 exact = skipstep3 = False
1231 if match.isexact(): # match.exact
1226 if match.isexact(): # match.exact
1232 exact = True
1227 exact = True
1233 dirignore = util.always # skip step 2
1228 dirignore = util.always # skip step 2
1234 elif match.prefix(): # match.match, no patterns
1229 elif match.prefix(): # match.match, no patterns
1235 skipstep3 = True
1230 skipstep3 = True
1236
1231
1237 if not exact and self._checkcase:
1232 if not exact and self._checkcase:
1238 normalize = self._normalize
1233 normalize = self._normalize
1239 normalizefile = self._normalizefile
1234 normalizefile = self._normalizefile
1240 skipstep3 = False
1235 skipstep3 = False
1241 else:
1236 else:
1242 normalize = self._normalize
1237 normalize = self._normalize
1243 normalizefile = None
1238 normalizefile = None
1244
1239
1245 # step 1: find all explicit files
1240 # step 1: find all explicit files
1246 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1241 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1247 if matchtdir:
1242 if matchtdir:
1248 for d in work:
1243 for d in work:
1249 matchtdir(d[0])
1244 matchtdir(d[0])
1250 for d in dirsnotfound:
1245 for d in dirsnotfound:
1251 matchtdir(d)
1246 matchtdir(d)
1252
1247
1253 skipstep3 = skipstep3 and not (work or dirsnotfound)
1248 skipstep3 = skipstep3 and not (work or dirsnotfound)
1254 work = [d for d in work if not dirignore(d[0])]
1249 work = [d for d in work if not dirignore(d[0])]
1255
1250
1256 # step 2: visit subdirectories
1251 # step 2: visit subdirectories
1257 def traverse(work, alreadynormed):
1252 def traverse(work, alreadynormed):
1258 wadd = work.append
1253 wadd = work.append
1259 while work:
1254 while work:
1260 tracing.counter('dirstate.walk work', len(work))
1255 tracing.counter('dirstate.walk work', len(work))
1261 nd = work.pop()
1256 nd = work.pop()
1262 visitentries = match.visitchildrenset(nd)
1257 visitentries = match.visitchildrenset(nd)
1263 if not visitentries:
1258 if not visitentries:
1264 continue
1259 continue
1265 if visitentries == b'this' or visitentries == b'all':
1260 if visitentries == b'this' or visitentries == b'all':
1266 visitentries = None
1261 visitentries = None
1267 skip = None
1262 skip = None
1268 if nd != b'':
1263 if nd != b'':
1269 skip = b'.hg'
1264 skip = b'.hg'
1270 try:
1265 try:
1271 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1266 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1272 entries = listdir(join(nd), stat=True, skip=skip)
1267 entries = listdir(join(nd), stat=True, skip=skip)
1273 except (PermissionError, FileNotFoundError) as inst:
1268 except (PermissionError, FileNotFoundError) as inst:
1274 match.bad(
1269 match.bad(
1275 self.pathto(nd), encoding.strtolocal(inst.strerror)
1270 self.pathto(nd), encoding.strtolocal(inst.strerror)
1276 )
1271 )
1277 continue
1272 continue
1278 for f, kind, st in entries:
1273 for f, kind, st in entries:
1279 # Some matchers may return files in the visitentries set,
1274 # Some matchers may return files in the visitentries set,
1280 # instead of 'this', if the matcher explicitly mentions them
1275 # instead of 'this', if the matcher explicitly mentions them
1281 # and is not an exactmatcher. This is acceptable; we do not
1276 # and is not an exactmatcher. This is acceptable; we do not
1282 # make any hard assumptions about file-or-directory below
1277 # make any hard assumptions about file-or-directory below
1283 # based on the presence of `f` in visitentries. If
1278 # based on the presence of `f` in visitentries. If
1284 # visitchildrenset returned a set, we can always skip the
1279 # visitchildrenset returned a set, we can always skip the
1285 # entries *not* in the set it provided regardless of whether
1280 # entries *not* in the set it provided regardless of whether
1286 # they're actually a file or a directory.
1281 # they're actually a file or a directory.
1287 if visitentries and f not in visitentries:
1282 if visitentries and f not in visitentries:
1288 continue
1283 continue
1289 if normalizefile:
1284 if normalizefile:
1290 # even though f might be a directory, we're only
1285 # even though f might be a directory, we're only
1291 # interested in comparing it to files currently in the
1286 # interested in comparing it to files currently in the
1292 # dmap -- therefore normalizefile is enough
1287 # dmap -- therefore normalizefile is enough
1293 nf = normalizefile(
1288 nf = normalizefile(
1294 nd and (nd + b"/" + f) or f, True, True
1289 nd and (nd + b"/" + f) or f, True, True
1295 )
1290 )
1296 else:
1291 else:
1297 nf = nd and (nd + b"/" + f) or f
1292 nf = nd and (nd + b"/" + f) or f
1298 if nf not in results:
1293 if nf not in results:
1299 if kind == dirkind:
1294 if kind == dirkind:
1300 if not ignore(nf):
1295 if not ignore(nf):
1301 if matchtdir:
1296 if matchtdir:
1302 matchtdir(nf)
1297 matchtdir(nf)
1303 wadd(nf)
1298 wadd(nf)
1304 if nf in dmap and (matchalways or matchfn(nf)):
1299 if nf in dmap and (matchalways or matchfn(nf)):
1305 results[nf] = None
1300 results[nf] = None
1306 elif kind == regkind or kind == lnkkind:
1301 elif kind == regkind or kind == lnkkind:
1307 if nf in dmap:
1302 if nf in dmap:
1308 if matchalways or matchfn(nf):
1303 if matchalways or matchfn(nf):
1309 results[nf] = st
1304 results[nf] = st
1310 elif (matchalways or matchfn(nf)) and not ignore(
1305 elif (matchalways or matchfn(nf)) and not ignore(
1311 nf
1306 nf
1312 ):
1307 ):
1313 # unknown file -- normalize if necessary
1308 # unknown file -- normalize if necessary
1314 if not alreadynormed:
1309 if not alreadynormed:
1315 nf = normalize(nf, False, True)
1310 nf = normalize(nf, False, True)
1316 results[nf] = st
1311 results[nf] = st
1317 elif nf in dmap and (matchalways or matchfn(nf)):
1312 elif nf in dmap and (matchalways or matchfn(nf)):
1318 results[nf] = None
1313 results[nf] = None
1319
1314
1320 for nd, d in work:
1315 for nd, d in work:
1321 # alreadynormed means that processwork doesn't have to do any
1316 # alreadynormed means that processwork doesn't have to do any
1322 # expensive directory normalization
1317 # expensive directory normalization
1323 alreadynormed = not normalize or nd == d
1318 alreadynormed = not normalize or nd == d
1324 traverse([d], alreadynormed)
1319 traverse([d], alreadynormed)
1325
1320
1326 for s in subrepos:
1321 for s in subrepos:
1327 del results[s]
1322 del results[s]
1328 del results[b'.hg']
1323 del results[b'.hg']
1329
1324
1330 # step 3: visit remaining files from dmap
1325 # step 3: visit remaining files from dmap
1331 if not skipstep3 and not exact:
1326 if not skipstep3 and not exact:
1332 # If a dmap file is not in results yet, it was either
1327 # If a dmap file is not in results yet, it was either
1333 # a) not matching matchfn b) ignored, c) missing, or d) under a
1328 # a) not matching matchfn b) ignored, c) missing, or d) under a
1334 # symlink directory.
1329 # symlink directory.
1335 if not results and matchalways:
1330 if not results and matchalways:
1336 visit = [f for f in dmap]
1331 visit = [f for f in dmap]
1337 else:
1332 else:
1338 visit = [f for f in dmap if f not in results and matchfn(f)]
1333 visit = [f for f in dmap if f not in results and matchfn(f)]
1339 visit.sort()
1334 visit.sort()
1340
1335
1341 if unknown:
1336 if unknown:
1342 # unknown == True means we walked all dirs under the roots
1337 # unknown == True means we walked all dirs under the roots
1343 # that wasn't ignored, and everything that matched was stat'ed
1338 # that wasn't ignored, and everything that matched was stat'ed
1344 # and is already in results.
1339 # and is already in results.
1345 # The rest must thus be ignored or under a symlink.
1340 # The rest must thus be ignored or under a symlink.
1346 audit_path = pathutil.pathauditor(self._root, cached=True)
1341 audit_path = pathutil.pathauditor(self._root, cached=True)
1347
1342
1348 for nf in iter(visit):
1343 for nf in iter(visit):
1349 # If a stat for the same file was already added with a
1344 # If a stat for the same file was already added with a
1350 # different case, don't add one for this, since that would
1345 # different case, don't add one for this, since that would
1351 # make it appear as if the file exists under both names
1346 # make it appear as if the file exists under both names
1352 # on disk.
1347 # on disk.
1353 if (
1348 if (
1354 normalizefile
1349 normalizefile
1355 and normalizefile(nf, True, True) in results
1350 and normalizefile(nf, True, True) in results
1356 ):
1351 ):
1357 results[nf] = None
1352 results[nf] = None
1358 # Report ignored items in the dmap as long as they are not
1353 # Report ignored items in the dmap as long as they are not
1359 # under a symlink directory.
1354 # under a symlink directory.
1360 elif audit_path.check(nf):
1355 elif audit_path.check(nf):
1361 try:
1356 try:
1362 results[nf] = lstat(join(nf))
1357 results[nf] = lstat(join(nf))
1363 # file was just ignored, no links, and exists
1358 # file was just ignored, no links, and exists
1364 except OSError:
1359 except OSError:
1365 # file doesn't exist
1360 # file doesn't exist
1366 results[nf] = None
1361 results[nf] = None
1367 else:
1362 else:
1368 # It's either missing or under a symlink directory
1363 # It's either missing or under a symlink directory
1369 # which we in this case report as missing
1364 # which we in this case report as missing
1370 results[nf] = None
1365 results[nf] = None
1371 else:
1366 else:
1372 # We may not have walked the full directory tree above,
1367 # We may not have walked the full directory tree above,
1373 # so stat and check everything we missed.
1368 # so stat and check everything we missed.
1374 iv = iter(visit)
1369 iv = iter(visit)
1375 for st in util.statfiles([join(i) for i in visit]):
1370 for st in util.statfiles([join(i) for i in visit]):
1376 results[next(iv)] = st
1371 results[next(iv)] = st
1377 return results
1372 return results
1378
1373
1379 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1374 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1380 if self._sparsematchfn is not None:
1375 if self._sparsematchfn is not None:
1381 em = matchmod.exact(matcher.files())
1376 em = matchmod.exact(matcher.files())
1382 sm = matchmod.unionmatcher([self._sparsematcher, em])
1377 sm = matchmod.unionmatcher([self._sparsematcher, em])
1383 matcher = matchmod.intersectmatchers(matcher, sm)
1378 matcher = matchmod.intersectmatchers(matcher, sm)
1384 # Force Rayon (Rust parallelism library) to respect the number of
1379 # Force Rayon (Rust parallelism library) to respect the number of
1385 # workers. This is a temporary workaround until Rust code knows
1380 # workers. This is a temporary workaround until Rust code knows
1386 # how to read the config file.
1381 # how to read the config file.
1387 numcpus = self._ui.configint(b"worker", b"numcpus")
1382 numcpus = self._ui.configint(b"worker", b"numcpus")
1388 if numcpus is not None:
1383 if numcpus is not None:
1389 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1384 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1390
1385
1391 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1386 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1392 if not workers_enabled:
1387 if not workers_enabled:
1393 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1388 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1394
1389
1395 (
1390 (
1396 lookup,
1391 lookup,
1397 modified,
1392 modified,
1398 added,
1393 added,
1399 removed,
1394 removed,
1400 deleted,
1395 deleted,
1401 clean,
1396 clean,
1402 ignored,
1397 ignored,
1403 unknown,
1398 unknown,
1404 warnings,
1399 warnings,
1405 bad,
1400 bad,
1406 traversed,
1401 traversed,
1407 dirty,
1402 dirty,
1408 ) = rustmod.status(
1403 ) = rustmod.status(
1409 self._map._map,
1404 self._map._map,
1410 matcher,
1405 matcher,
1411 self._rootdir,
1406 self._rootdir,
1412 self._ignorefiles(),
1407 self._ignorefiles(),
1413 self._checkexec,
1408 self._checkexec,
1414 bool(list_clean),
1409 bool(list_clean),
1415 bool(list_ignored),
1410 bool(list_ignored),
1416 bool(list_unknown),
1411 bool(list_unknown),
1417 bool(matcher.traversedir),
1412 bool(matcher.traversedir),
1418 )
1413 )
1419
1414
1420 self._dirty |= dirty
1415 self._dirty |= dirty
1421
1416
1422 if matcher.traversedir:
1417 if matcher.traversedir:
1423 for dir in traversed:
1418 for dir in traversed:
1424 matcher.traversedir(dir)
1419 matcher.traversedir(dir)
1425
1420
1426 if self._ui.warn:
1421 if self._ui.warn:
1427 for item in warnings:
1422 for item in warnings:
1428 if isinstance(item, tuple):
1423 if isinstance(item, tuple):
1429 file_path, syntax = item
1424 file_path, syntax = item
1430 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1425 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1431 file_path,
1426 file_path,
1432 syntax,
1427 syntax,
1433 )
1428 )
1434 self._ui.warn(msg)
1429 self._ui.warn(msg)
1435 else:
1430 else:
1436 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1431 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1437 self._ui.warn(
1432 self._ui.warn(
1438 msg
1433 msg
1439 % (
1434 % (
1440 pathutil.canonpath(
1435 pathutil.canonpath(
1441 self._rootdir, self._rootdir, item
1436 self._rootdir, self._rootdir, item
1442 ),
1437 ),
1443 b"No such file or directory",
1438 b"No such file or directory",
1444 )
1439 )
1445 )
1440 )
1446
1441
1447 for fn, message in bad:
1442 for fn, message in bad:
1448 matcher.bad(fn, encoding.strtolocal(message))
1443 matcher.bad(fn, encoding.strtolocal(message))
1449
1444
1450 status = scmutil.status(
1445 status = scmutil.status(
1451 modified=modified,
1446 modified=modified,
1452 added=added,
1447 added=added,
1453 removed=removed,
1448 removed=removed,
1454 deleted=deleted,
1449 deleted=deleted,
1455 unknown=unknown,
1450 unknown=unknown,
1456 ignored=ignored,
1451 ignored=ignored,
1457 clean=clean,
1452 clean=clean,
1458 )
1453 )
1459 return (lookup, status)
1454 return (lookup, status)
1460
1455
1461 # XXX since this can make the dirstate dirty (through rust), we should
1456 # XXX since this can make the dirstate dirty (through rust), we should
1462 # enforce that it is done withing an appropriate change-context that scope
1457 # enforce that it is done withing an appropriate change-context that scope
1463 # the change and ensure it eventually get written on disk (or rolled back)
1458 # the change and ensure it eventually get written on disk (or rolled back)
1464 def status(self, match, subrepos, ignored, clean, unknown):
1459 def status(self, match, subrepos, ignored, clean, unknown):
1465 """Determine the status of the working copy relative to the
1460 """Determine the status of the working copy relative to the
1466 dirstate and return a pair of (unsure, status), where status is of type
1461 dirstate and return a pair of (unsure, status), where status is of type
1467 scmutil.status and:
1462 scmutil.status and:
1468
1463
1469 unsure:
1464 unsure:
1470 files that might have been modified since the dirstate was
1465 files that might have been modified since the dirstate was
1471 written, but need to be read to be sure (size is the same
1466 written, but need to be read to be sure (size is the same
1472 but mtime differs)
1467 but mtime differs)
1473 status.modified:
1468 status.modified:
1474 files that have definitely been modified since the dirstate
1469 files that have definitely been modified since the dirstate
1475 was written (different size or mode)
1470 was written (different size or mode)
1476 status.clean:
1471 status.clean:
1477 files that have definitely not been modified since the
1472 files that have definitely not been modified since the
1478 dirstate was written
1473 dirstate was written
1479 """
1474 """
1480 listignored, listclean, listunknown = ignored, clean, unknown
1475 listignored, listclean, listunknown = ignored, clean, unknown
1481 lookup, modified, added, unknown, ignored = [], [], [], [], []
1476 lookup, modified, added, unknown, ignored = [], [], [], [], []
1482 removed, deleted, clean = [], [], []
1477 removed, deleted, clean = [], [], []
1483
1478
1484 dmap = self._map
1479 dmap = self._map
1485 dmap.preload()
1480 dmap.preload()
1486
1481
1487 use_rust = True
1482 use_rust = True
1488
1483
1489 allowed_matchers = (
1484 allowed_matchers = (
1490 matchmod.alwaysmatcher,
1485 matchmod.alwaysmatcher,
1491 matchmod.differencematcher,
1486 matchmod.differencematcher,
1492 matchmod.exactmatcher,
1487 matchmod.exactmatcher,
1493 matchmod.includematcher,
1488 matchmod.includematcher,
1494 matchmod.intersectionmatcher,
1489 matchmod.intersectionmatcher,
1495 matchmod.nevermatcher,
1490 matchmod.nevermatcher,
1496 matchmod.unionmatcher,
1491 matchmod.unionmatcher,
1497 )
1492 )
1498
1493
1499 if rustmod is None:
1494 if rustmod is None:
1500 use_rust = False
1495 use_rust = False
1501 elif self._checkcase:
1496 elif self._checkcase:
1502 # Case-insensitive filesystems are not handled yet
1497 # Case-insensitive filesystems are not handled yet
1503 use_rust = False
1498 use_rust = False
1504 elif subrepos:
1499 elif subrepos:
1505 use_rust = False
1500 use_rust = False
1506 elif not isinstance(match, allowed_matchers):
1501 elif not isinstance(match, allowed_matchers):
1507 # Some matchers have yet to be implemented
1502 # Some matchers have yet to be implemented
1508 use_rust = False
1503 use_rust = False
1509
1504
1510 # Get the time from the filesystem so we can disambiguate files that
1505 # Get the time from the filesystem so we can disambiguate files that
1511 # appear modified in the present or future.
1506 # appear modified in the present or future.
1512 try:
1507 try:
1513 mtime_boundary = timestamp.get_fs_now(self._opener)
1508 mtime_boundary = timestamp.get_fs_now(self._opener)
1514 except OSError:
1509 except OSError:
1515 # In largefiles or readonly context
1510 # In largefiles or readonly context
1516 mtime_boundary = None
1511 mtime_boundary = None
1517
1512
1518 if use_rust:
1513 if use_rust:
1519 try:
1514 try:
1520 res = self._rust_status(
1515 res = self._rust_status(
1521 match, listclean, listignored, listunknown
1516 match, listclean, listignored, listunknown
1522 )
1517 )
1523 return res + (mtime_boundary,)
1518 return res + (mtime_boundary,)
1524 except rustmod.FallbackError:
1519 except rustmod.FallbackError:
1525 pass
1520 pass
1526
1521
1527 def noop(f):
1522 def noop(f):
1528 pass
1523 pass
1529
1524
1530 dcontains = dmap.__contains__
1525 dcontains = dmap.__contains__
1531 dget = dmap.__getitem__
1526 dget = dmap.__getitem__
1532 ladd = lookup.append # aka "unsure"
1527 ladd = lookup.append # aka "unsure"
1533 madd = modified.append
1528 madd = modified.append
1534 aadd = added.append
1529 aadd = added.append
1535 uadd = unknown.append if listunknown else noop
1530 uadd = unknown.append if listunknown else noop
1536 iadd = ignored.append if listignored else noop
1531 iadd = ignored.append if listignored else noop
1537 radd = removed.append
1532 radd = removed.append
1538 dadd = deleted.append
1533 dadd = deleted.append
1539 cadd = clean.append if listclean else noop
1534 cadd = clean.append if listclean else noop
1540 mexact = match.exact
1535 mexact = match.exact
1541 dirignore = self._dirignore
1536 dirignore = self._dirignore
1542 checkexec = self._checkexec
1537 checkexec = self._checkexec
1543 checklink = self._checklink
1538 checklink = self._checklink
1544 copymap = self._map.copymap
1539 copymap = self._map.copymap
1545
1540
1546 # We need to do full walks when either
1541 # We need to do full walks when either
1547 # - we're listing all clean files, or
1542 # - we're listing all clean files, or
1548 # - match.traversedir does something, because match.traversedir should
1543 # - match.traversedir does something, because match.traversedir should
1549 # be called for every dir in the working dir
1544 # be called for every dir in the working dir
1550 full = listclean or match.traversedir is not None
1545 full = listclean or match.traversedir is not None
1551 for fn, st in self.walk(
1546 for fn, st in self.walk(
1552 match, subrepos, listunknown, listignored, full=full
1547 match, subrepos, listunknown, listignored, full=full
1553 ).items():
1548 ).items():
1554 if not dcontains(fn):
1549 if not dcontains(fn):
1555 if (listignored or mexact(fn)) and dirignore(fn):
1550 if (listignored or mexact(fn)) and dirignore(fn):
1556 if listignored:
1551 if listignored:
1557 iadd(fn)
1552 iadd(fn)
1558 else:
1553 else:
1559 uadd(fn)
1554 uadd(fn)
1560 continue
1555 continue
1561
1556
1562 t = dget(fn)
1557 t = dget(fn)
1563 mode = t.mode
1558 mode = t.mode
1564 size = t.size
1559 size = t.size
1565
1560
1566 if not st and t.tracked:
1561 if not st and t.tracked:
1567 dadd(fn)
1562 dadd(fn)
1568 elif t.p2_info:
1563 elif t.p2_info:
1569 madd(fn)
1564 madd(fn)
1570 elif t.added:
1565 elif t.added:
1571 aadd(fn)
1566 aadd(fn)
1572 elif t.removed:
1567 elif t.removed:
1573 radd(fn)
1568 radd(fn)
1574 elif t.tracked:
1569 elif t.tracked:
1575 if not checklink and t.has_fallback_symlink:
1570 if not checklink and t.has_fallback_symlink:
1576 # If the file system does not support symlink, the mode
1571 # If the file system does not support symlink, the mode
1577 # might not be correctly stored in the dirstate, so do not
1572 # might not be correctly stored in the dirstate, so do not
1578 # trust it.
1573 # trust it.
1579 ladd(fn)
1574 ladd(fn)
1580 elif not checkexec and t.has_fallback_exec:
1575 elif not checkexec and t.has_fallback_exec:
1581 # If the file system does not support exec bits, the mode
1576 # If the file system does not support exec bits, the mode
1582 # might not be correctly stored in the dirstate, so do not
1577 # might not be correctly stored in the dirstate, so do not
1583 # trust it.
1578 # trust it.
1584 ladd(fn)
1579 ladd(fn)
1585 elif (
1580 elif (
1586 size >= 0
1581 size >= 0
1587 and (
1582 and (
1588 (size != st.st_size and size != st.st_size & _rangemask)
1583 (size != st.st_size and size != st.st_size & _rangemask)
1589 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1584 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1590 )
1585 )
1591 or fn in copymap
1586 or fn in copymap
1592 ):
1587 ):
1593 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1588 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1594 # issue6456: Size returned may be longer due to
1589 # issue6456: Size returned may be longer due to
1595 # encryption on EXT-4 fscrypt, undecided.
1590 # encryption on EXT-4 fscrypt, undecided.
1596 ladd(fn)
1591 ladd(fn)
1597 else:
1592 else:
1598 madd(fn)
1593 madd(fn)
1599 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1594 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1600 # There might be a change in the future if for example the
1595 # There might be a change in the future if for example the
1601 # internal clock is off, but this is a case where the issues
1596 # internal clock is off, but this is a case where the issues
1602 # the user would face would be a lot worse and there is
1597 # the user would face would be a lot worse and there is
1603 # nothing we can really do.
1598 # nothing we can really do.
1604 ladd(fn)
1599 ladd(fn)
1605 elif listclean:
1600 elif listclean:
1606 cadd(fn)
1601 cadd(fn)
1607 status = scmutil.status(
1602 status = scmutil.status(
1608 modified, added, removed, deleted, unknown, ignored, clean
1603 modified, added, removed, deleted, unknown, ignored, clean
1609 )
1604 )
1610 return (lookup, status, mtime_boundary)
1605 return (lookup, status, mtime_boundary)
1611
1606
1612 def matches(self, match):
1607 def matches(self, match):
1613 """
1608 """
1614 return files in the dirstate (in whatever state) filtered by match
1609 return files in the dirstate (in whatever state) filtered by match
1615 """
1610 """
1616 dmap = self._map
1611 dmap = self._map
1617 if rustmod is not None:
1612 if rustmod is not None:
1618 dmap = self._map._map
1613 dmap = self._map._map
1619
1614
1620 if match.always():
1615 if match.always():
1621 return dmap.keys()
1616 return dmap.keys()
1622 files = match.files()
1617 files = match.files()
1623 if match.isexact():
1618 if match.isexact():
1624 # fast path -- filter the other way around, since typically files is
1619 # fast path -- filter the other way around, since typically files is
1625 # much smaller than dmap
1620 # much smaller than dmap
1626 return [f for f in files if f in dmap]
1621 return [f for f in files if f in dmap]
1627 if match.prefix() and all(fn in dmap for fn in files):
1622 if match.prefix() and all(fn in dmap for fn in files):
1628 # fast path -- all the values are known to be files, so just return
1623 # fast path -- all the values are known to be files, so just return
1629 # that
1624 # that
1630 return list(files)
1625 return list(files)
1631 return [f for f in dmap if match(f)]
1626 return [f for f in dmap if match(f)]
1632
1627
1633 def _actualfilename(self, tr):
1628 def _actualfilename(self, tr):
1634 if tr:
1629 if tr:
1635 return self._pendingfilename
1630 return self._pendingfilename
1636 else:
1631 else:
1637 return self._filename
1632 return self._filename
1638
1633
1639 def all_file_names(self):
1634 def all_file_names(self):
1640 """list all filename currently used by this dirstate
1635 """list all filename currently used by this dirstate
1641
1636
1642 This is only used to do `hg rollback` related backup in the transaction
1637 This is only used to do `hg rollback` related backup in the transaction
1643 """
1638 """
1644 if not self._opener.exists(self._filename):
1639 if not self._opener.exists(self._filename):
1645 # no data every written to disk yet
1640 # no data every written to disk yet
1646 return ()
1641 return ()
1647 elif self._use_dirstate_v2:
1642 elif self._use_dirstate_v2:
1648 return (
1643 return (
1649 self._filename,
1644 self._filename,
1650 self._map.docket.data_filename(),
1645 self._map.docket.data_filename(),
1651 )
1646 )
1652 else:
1647 else:
1653 return (self._filename,)
1648 return (self._filename,)
1654
1649
1655 def verify(self, m1, m2, p1, narrow_matcher=None):
1650 def verify(self, m1, m2, p1, narrow_matcher=None):
1656 """
1651 """
1657 check the dirstate contents against the parent manifest and yield errors
1652 check the dirstate contents against the parent manifest and yield errors
1658 """
1653 """
1659 missing_from_p1 = _(
1654 missing_from_p1 = _(
1660 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1655 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1661 )
1656 )
1662 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1657 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1663 missing_from_ps = _(
1658 missing_from_ps = _(
1664 b"%s marked as modified, but not in either manifest\n"
1659 b"%s marked as modified, but not in either manifest\n"
1665 )
1660 )
1666 missing_from_ds = _(
1661 missing_from_ds = _(
1667 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1662 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1668 )
1663 )
1669 for f, entry in self.items():
1664 for f, entry in self.items():
1670 if entry.p1_tracked:
1665 if entry.p1_tracked:
1671 if entry.modified and f not in m1 and f not in m2:
1666 if entry.modified and f not in m1 and f not in m2:
1672 yield missing_from_ps % f
1667 yield missing_from_ps % f
1673 elif f not in m1:
1668 elif f not in m1:
1674 yield missing_from_p1 % (f, node.short(p1))
1669 yield missing_from_p1 % (f, node.short(p1))
1675 if entry.added and f in m1:
1670 if entry.added and f in m1:
1676 yield unexpected_in_p1 % f
1671 yield unexpected_in_p1 % f
1677 for f in m1:
1672 for f in m1:
1678 if narrow_matcher is not None and not narrow_matcher(f):
1673 if narrow_matcher is not None and not narrow_matcher(f):
1679 continue
1674 continue
1680 entry = self.get_entry(f)
1675 entry = self.get_entry(f)
1681 if not entry.p1_tracked:
1676 if not entry.p1_tracked:
1682 yield missing_from_ds % (f, node.short(p1))
1677 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now