##// END OF EJS Templates
dirstate: cleanup the `_map` property cache...
marmoute -
r51024:3c6546b1 default
parent child Browse files
Show More
@@ -1,1707 +1,1706 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136
136
137 # used by largefile to avoid overwritting transaction callbacK
137 # used by largefile to avoid overwritting transaction callbacK
138 _tr_key_suffix = b''
138 _tr_key_suffix = b''
139
139
140 def __init__(
140 def __init__(
141 self,
141 self,
142 opener,
142 opener,
143 ui,
143 ui,
144 root,
144 root,
145 validate,
145 validate,
146 sparsematchfn,
146 sparsematchfn,
147 nodeconstants,
147 nodeconstants,
148 use_dirstate_v2,
148 use_dirstate_v2,
149 use_tracked_hint=False,
149 use_tracked_hint=False,
150 ):
150 ):
151 """Create a new dirstate object.
151 """Create a new dirstate object.
152
152
153 opener is an open()-like callable that can be used to open the
153 opener is an open()-like callable that can be used to open the
154 dirstate file; root is the root of the directory tracked by
154 dirstate file; root is the root of the directory tracked by
155 the dirstate.
155 the dirstate.
156 """
156 """
157 self._use_dirstate_v2 = use_dirstate_v2
157 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_tracked_hint = use_tracked_hint
158 self._use_tracked_hint = use_tracked_hint
159 self._nodeconstants = nodeconstants
159 self._nodeconstants = nodeconstants
160 self._opener = opener
160 self._opener = opener
161 self._validate = validate
161 self._validate = validate
162 self._root = root
162 self._root = root
163 # Either build a sparse-matcher or None if sparse is disabled
163 # Either build a sparse-matcher or None if sparse is disabled
164 self._sparsematchfn = sparsematchfn
164 self._sparsematchfn = sparsematchfn
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # UNC path pointing to root share (issue4557)
166 # UNC path pointing to root share (issue4557)
167 self._rootdir = pathutil.normasprefix(root)
167 self._rootdir = pathutil.normasprefix(root)
168 # True is any internal state may be different
168 # True is any internal state may be different
169 self._dirty = False
169 self._dirty = False
170 # True if the set of tracked file may be different
170 # True if the set of tracked file may be different
171 self._dirty_tracked_set = False
171 self._dirty_tracked_set = False
172 self._ui = ui
172 self._ui = ui
173 self._filecache = {}
173 self._filecache = {}
174 # nesting level of `changing_parents` context
174 # nesting level of `changing_parents` context
175 self._changing_level = 0
175 self._changing_level = 0
176 # the change currently underway
176 # the change currently underway
177 self._change_type = None
177 self._change_type = None
178 # True if the current dirstate changing operations have been
178 # True if the current dirstate changing operations have been
179 # invalidated (used to make sure all nested contexts have been exited)
179 # invalidated (used to make sure all nested contexts have been exited)
180 self._invalidated_context = False
180 self._invalidated_context = False
181 self._attached_to_a_transaction = False
181 self._attached_to_a_transaction = False
182 self._filename = b'dirstate'
182 self._filename = b'dirstate'
183 self._filename_th = b'dirstate-tracked-hint'
183 self._filename_th = b'dirstate-tracked-hint'
184 self._pendingfilename = b'%s.pending' % self._filename
184 self._pendingfilename = b'%s.pending' % self._filename
185 self._plchangecallbacks = {}
185 self._plchangecallbacks = {}
186 self._origpl = None
186 self._origpl = None
187 self._mapcls = dirstatemap.dirstatemap
187 self._mapcls = dirstatemap.dirstatemap
188 # Access and cache cwd early, so we don't access it for the first time
188 # Access and cache cwd early, so we don't access it for the first time
189 # after a working-copy update caused it to not exist (accessing it then
189 # after a working-copy update caused it to not exist (accessing it then
190 # raises an exception).
190 # raises an exception).
191 self._cwd
191 self._cwd
192
192
193 def refresh(self):
193 def refresh(self):
194 if '_branch' in vars(self):
194 if '_branch' in vars(self):
195 del self._branch
195 del self._branch
196 if '_map' in vars(self) and self._map.may_need_refresh():
196 if '_map' in vars(self) and self._map.may_need_refresh():
197 self.invalidate()
197 self.invalidate()
198
198
199 def prefetch_parents(self):
199 def prefetch_parents(self):
200 """make sure the parents are loaded
200 """make sure the parents are loaded
201
201
202 Used to avoid a race condition.
202 Used to avoid a race condition.
203 """
203 """
204 self._pl
204 self._pl
205
205
206 @contextlib.contextmanager
206 @contextlib.contextmanager
207 @check_invalidated
207 @check_invalidated
208 def _changing(self, repo, change_type):
208 def _changing(self, repo, change_type):
209 if repo.currentwlock() is None:
209 if repo.currentwlock() is None:
210 msg = b"trying to change the dirstate without holding the wlock"
210 msg = b"trying to change the dirstate without holding the wlock"
211 raise error.ProgrammingError(msg)
211 raise error.ProgrammingError(msg)
212
212
213 has_tr = repo.currenttransaction() is not None
213 has_tr = repo.currenttransaction() is not None
214 if not has_tr and self._changing_level == 0 and self._dirty:
214 if not has_tr and self._changing_level == 0 and self._dirty:
215 msg = "entering a changing context, but dirstate is already dirty"
215 msg = "entering a changing context, but dirstate is already dirty"
216 raise error.ProgrammingError(msg)
216 raise error.ProgrammingError(msg)
217
217
218 assert self._changing_level >= 0
218 assert self._changing_level >= 0
219 # different type of change are mutually exclusive
219 # different type of change are mutually exclusive
220 if self._change_type is None:
220 if self._change_type is None:
221 assert self._changing_level == 0
221 assert self._changing_level == 0
222 self._change_type = change_type
222 self._change_type = change_type
223 elif self._change_type != change_type:
223 elif self._change_type != change_type:
224 msg = (
224 msg = (
225 'trying to open "%s" dirstate-changing context while a "%s" is'
225 'trying to open "%s" dirstate-changing context while a "%s" is'
226 ' already open'
226 ' already open'
227 )
227 )
228 msg %= (change_type, self._change_type)
228 msg %= (change_type, self._change_type)
229 raise error.ProgrammingError(msg)
229 raise error.ProgrammingError(msg)
230 should_write = False
230 should_write = False
231 self._changing_level += 1
231 self._changing_level += 1
232 try:
232 try:
233 yield
233 yield
234 except: # re-raises
234 except: # re-raises
235 self.invalidate() # this will set `_invalidated_context`
235 self.invalidate() # this will set `_invalidated_context`
236 raise
236 raise
237 finally:
237 finally:
238 assert self._changing_level > 0
238 assert self._changing_level > 0
239 self._changing_level -= 1
239 self._changing_level -= 1
240 # If the dirstate is being invalidated, call invalidate again.
240 # If the dirstate is being invalidated, call invalidate again.
241 # This will throw away anything added by a upper context and
241 # This will throw away anything added by a upper context and
242 # reset the `_invalidated_context` flag when relevant
242 # reset the `_invalidated_context` flag when relevant
243 if self._changing_level <= 0:
243 if self._changing_level <= 0:
244 self._change_type = None
244 self._change_type = None
245 assert self._changing_level == 0
245 assert self._changing_level == 0
246 if self._invalidated_context:
246 if self._invalidated_context:
247 # make sure we invalidate anything an upper context might
247 # make sure we invalidate anything an upper context might
248 # have changed.
248 # have changed.
249 self.invalidate()
249 self.invalidate()
250 else:
250 else:
251 should_write = self._changing_level <= 0
251 should_write = self._changing_level <= 0
252 tr = repo.currenttransaction()
252 tr = repo.currenttransaction()
253 if has_tr != (tr is not None):
253 if has_tr != (tr is not None):
254 if has_tr:
254 if has_tr:
255 m = "transaction vanished while changing dirstate"
255 m = "transaction vanished while changing dirstate"
256 else:
256 else:
257 m = "transaction appeared while changing dirstate"
257 m = "transaction appeared while changing dirstate"
258 raise error.ProgrammingError(m)
258 raise error.ProgrammingError(m)
259 if should_write:
259 if should_write:
260 self.write(tr)
260 self.write(tr)
261
261
262 @contextlib.contextmanager
262 @contextlib.contextmanager
263 def changing_parents(self, repo):
263 def changing_parents(self, repo):
264 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
264 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
265 yield c
265 yield c
266
266
267 @contextlib.contextmanager
267 @contextlib.contextmanager
268 def changing_files(self, repo):
268 def changing_files(self, repo):
269 with self._changing(repo, CHANGE_TYPE_FILES) as c:
269 with self._changing(repo, CHANGE_TYPE_FILES) as c:
270 yield c
270 yield c
271
271
272 # here to help migration to the new code
272 # here to help migration to the new code
273 def parentchange(self):
273 def parentchange(self):
274 msg = (
274 msg = (
275 "Mercurial 6.4 and later requires call to "
275 "Mercurial 6.4 and later requires call to "
276 "`dirstate.changing_parents(repo)`"
276 "`dirstate.changing_parents(repo)`"
277 )
277 )
278 raise error.ProgrammingError(msg)
278 raise error.ProgrammingError(msg)
279
279
280 @property
280 @property
281 def is_changing_any(self):
281 def is_changing_any(self):
282 """Returns true if the dirstate is in the middle of a set of changes.
282 """Returns true if the dirstate is in the middle of a set of changes.
283
283
284 This returns True for any kind of change.
284 This returns True for any kind of change.
285 """
285 """
286 return self._changing_level > 0
286 return self._changing_level > 0
287
287
288 def pendingparentchange(self):
288 def pendingparentchange(self):
289 return self.is_changing_parent()
289 return self.is_changing_parent()
290
290
291 def is_changing_parent(self):
291 def is_changing_parent(self):
292 """Returns true if the dirstate is in the middle of a set of changes
292 """Returns true if the dirstate is in the middle of a set of changes
293 that modify the dirstate parent.
293 that modify the dirstate parent.
294 """
294 """
295 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
295 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
296 return self.is_changing_parents
296 return self.is_changing_parents
297
297
298 @property
298 @property
299 def is_changing_parents(self):
299 def is_changing_parents(self):
300 """Returns true if the dirstate is in the middle of a set of changes
300 """Returns true if the dirstate is in the middle of a set of changes
301 that modify the dirstate parent.
301 that modify the dirstate parent.
302 """
302 """
303 if self._changing_level <= 0:
303 if self._changing_level <= 0:
304 return False
304 return False
305 return self._change_type == CHANGE_TYPE_PARENTS
305 return self._change_type == CHANGE_TYPE_PARENTS
306
306
307 @property
307 @property
308 def is_changing_files(self):
308 def is_changing_files(self):
309 """Returns true if the dirstate is in the middle of a set of changes
309 """Returns true if the dirstate is in the middle of a set of changes
310 that modify the files tracked or their sources.
310 that modify the files tracked or their sources.
311 """
311 """
312 if self._changing_level <= 0:
312 if self._changing_level <= 0:
313 return False
313 return False
314 return self._change_type == CHANGE_TYPE_FILES
314 return self._change_type == CHANGE_TYPE_FILES
315
315
316 @propertycache
316 @propertycache
317 def _map(self):
317 def _map(self):
318 """Return the dirstate contents (see documentation for dirstatemap)."""
318 """Return the dirstate contents (see documentation for dirstatemap)."""
319 self._map = self._mapcls(
319 return self._mapcls(
320 self._ui,
320 self._ui,
321 self._opener,
321 self._opener,
322 self._root,
322 self._root,
323 self._nodeconstants,
323 self._nodeconstants,
324 self._use_dirstate_v2,
324 self._use_dirstate_v2,
325 )
325 )
326 return self._map
327
326
328 @property
327 @property
329 def _sparsematcher(self):
328 def _sparsematcher(self):
330 """The matcher for the sparse checkout.
329 """The matcher for the sparse checkout.
331
330
332 The working directory may not include every file from a manifest. The
331 The working directory may not include every file from a manifest. The
333 matcher obtained by this property will match a path if it is to be
332 matcher obtained by this property will match a path if it is to be
334 included in the working directory.
333 included in the working directory.
335
334
336 When sparse if disabled, return None.
335 When sparse if disabled, return None.
337 """
336 """
338 if self._sparsematchfn is None:
337 if self._sparsematchfn is None:
339 return None
338 return None
340 # TODO there is potential to cache this property. For now, the matcher
339 # TODO there is potential to cache this property. For now, the matcher
341 # is resolved on every access. (But the called function does use a
340 # is resolved on every access. (But the called function does use a
342 # cache to keep the lookup fast.)
341 # cache to keep the lookup fast.)
343 return self._sparsematchfn()
342 return self._sparsematchfn()
344
343
345 @repocache(b'branch')
344 @repocache(b'branch')
346 def _branch(self):
345 def _branch(self):
347 try:
346 try:
348 return self._opener.read(b"branch").strip() or b"default"
347 return self._opener.read(b"branch").strip() or b"default"
349 except FileNotFoundError:
348 except FileNotFoundError:
350 return b"default"
349 return b"default"
351
350
352 @property
351 @property
353 def _pl(self):
352 def _pl(self):
354 return self._map.parents()
353 return self._map.parents()
355
354
356 def hasdir(self, d):
355 def hasdir(self, d):
357 return self._map.hastrackeddir(d)
356 return self._map.hastrackeddir(d)
358
357
359 @rootcache(b'.hgignore')
358 @rootcache(b'.hgignore')
360 def _ignore(self):
359 def _ignore(self):
361 files = self._ignorefiles()
360 files = self._ignorefiles()
362 if not files:
361 if not files:
363 return matchmod.never()
362 return matchmod.never()
364
363
365 pats = [b'include:%s' % f for f in files]
364 pats = [b'include:%s' % f for f in files]
366 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
365 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
367
366
368 @propertycache
367 @propertycache
369 def _slash(self):
368 def _slash(self):
370 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
369 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
371
370
372 @propertycache
371 @propertycache
373 def _checklink(self):
372 def _checklink(self):
374 return util.checklink(self._root)
373 return util.checklink(self._root)
375
374
376 @propertycache
375 @propertycache
377 def _checkexec(self):
376 def _checkexec(self):
378 return bool(util.checkexec(self._root))
377 return bool(util.checkexec(self._root))
379
378
380 @propertycache
379 @propertycache
381 def _checkcase(self):
380 def _checkcase(self):
382 return not util.fscasesensitive(self._join(b'.hg'))
381 return not util.fscasesensitive(self._join(b'.hg'))
383
382
384 def _join(self, f):
383 def _join(self, f):
385 # much faster than os.path.join()
384 # much faster than os.path.join()
386 # it's safe because f is always a relative path
385 # it's safe because f is always a relative path
387 return self._rootdir + f
386 return self._rootdir + f
388
387
389 def flagfunc(self, buildfallback):
388 def flagfunc(self, buildfallback):
390 """build a callable that returns flags associated with a filename
389 """build a callable that returns flags associated with a filename
391
390
392 The information is extracted from three possible layers:
391 The information is extracted from three possible layers:
393 1. the file system if it supports the information
392 1. the file system if it supports the information
394 2. the "fallback" information stored in the dirstate if any
393 2. the "fallback" information stored in the dirstate if any
395 3. a more expensive mechanism inferring the flags from the parents.
394 3. a more expensive mechanism inferring the flags from the parents.
396 """
395 """
397
396
398 # small hack to cache the result of buildfallback()
397 # small hack to cache the result of buildfallback()
399 fallback_func = []
398 fallback_func = []
400
399
401 def get_flags(x):
400 def get_flags(x):
402 entry = None
401 entry = None
403 fallback_value = None
402 fallback_value = None
404 try:
403 try:
405 st = os.lstat(self._join(x))
404 st = os.lstat(self._join(x))
406 except OSError:
405 except OSError:
407 return b''
406 return b''
408
407
409 if self._checklink:
408 if self._checklink:
410 if util.statislink(st):
409 if util.statislink(st):
411 return b'l'
410 return b'l'
412 else:
411 else:
413 entry = self.get_entry(x)
412 entry = self.get_entry(x)
414 if entry.has_fallback_symlink:
413 if entry.has_fallback_symlink:
415 if entry.fallback_symlink:
414 if entry.fallback_symlink:
416 return b'l'
415 return b'l'
417 else:
416 else:
418 if not fallback_func:
417 if not fallback_func:
419 fallback_func.append(buildfallback())
418 fallback_func.append(buildfallback())
420 fallback_value = fallback_func[0](x)
419 fallback_value = fallback_func[0](x)
421 if b'l' in fallback_value:
420 if b'l' in fallback_value:
422 return b'l'
421 return b'l'
423
422
424 if self._checkexec:
423 if self._checkexec:
425 if util.statisexec(st):
424 if util.statisexec(st):
426 return b'x'
425 return b'x'
427 else:
426 else:
428 if entry is None:
427 if entry is None:
429 entry = self.get_entry(x)
428 entry = self.get_entry(x)
430 if entry.has_fallback_exec:
429 if entry.has_fallback_exec:
431 if entry.fallback_exec:
430 if entry.fallback_exec:
432 return b'x'
431 return b'x'
433 else:
432 else:
434 if fallback_value is None:
433 if fallback_value is None:
435 if not fallback_func:
434 if not fallback_func:
436 fallback_func.append(buildfallback())
435 fallback_func.append(buildfallback())
437 fallback_value = fallback_func[0](x)
436 fallback_value = fallback_func[0](x)
438 if b'x' in fallback_value:
437 if b'x' in fallback_value:
439 return b'x'
438 return b'x'
440 return b''
439 return b''
441
440
442 return get_flags
441 return get_flags
443
442
444 @propertycache
443 @propertycache
445 def _cwd(self):
444 def _cwd(self):
446 # internal config: ui.forcecwd
445 # internal config: ui.forcecwd
447 forcecwd = self._ui.config(b'ui', b'forcecwd')
446 forcecwd = self._ui.config(b'ui', b'forcecwd')
448 if forcecwd:
447 if forcecwd:
449 return forcecwd
448 return forcecwd
450 return encoding.getcwd()
449 return encoding.getcwd()
451
450
452 def getcwd(self):
451 def getcwd(self):
453 """Return the path from which a canonical path is calculated.
452 """Return the path from which a canonical path is calculated.
454
453
455 This path should be used to resolve file patterns or to convert
454 This path should be used to resolve file patterns or to convert
456 canonical paths back to file paths for display. It shouldn't be
455 canonical paths back to file paths for display. It shouldn't be
457 used to get real file paths. Use vfs functions instead.
456 used to get real file paths. Use vfs functions instead.
458 """
457 """
459 cwd = self._cwd
458 cwd = self._cwd
460 if cwd == self._root:
459 if cwd == self._root:
461 return b''
460 return b''
462 # self._root ends with a path separator if self._root is '/' or 'C:\'
461 # self._root ends with a path separator if self._root is '/' or 'C:\'
463 rootsep = self._root
462 rootsep = self._root
464 if not util.endswithsep(rootsep):
463 if not util.endswithsep(rootsep):
465 rootsep += pycompat.ossep
464 rootsep += pycompat.ossep
466 if cwd.startswith(rootsep):
465 if cwd.startswith(rootsep):
467 return cwd[len(rootsep) :]
466 return cwd[len(rootsep) :]
468 else:
467 else:
469 # we're outside the repo. return an absolute path.
468 # we're outside the repo. return an absolute path.
470 return cwd
469 return cwd
471
470
472 def pathto(self, f, cwd=None):
471 def pathto(self, f, cwd=None):
473 if cwd is None:
472 if cwd is None:
474 cwd = self.getcwd()
473 cwd = self.getcwd()
475 path = util.pathto(self._root, cwd, f)
474 path = util.pathto(self._root, cwd, f)
476 if self._slash:
475 if self._slash:
477 return util.pconvert(path)
476 return util.pconvert(path)
478 return path
477 return path
479
478
480 def get_entry(self, path):
479 def get_entry(self, path):
481 """return a DirstateItem for the associated path"""
480 """return a DirstateItem for the associated path"""
482 entry = self._map.get(path)
481 entry = self._map.get(path)
483 if entry is None:
482 if entry is None:
484 return DirstateItem()
483 return DirstateItem()
485 return entry
484 return entry
486
485
487 def __contains__(self, key):
486 def __contains__(self, key):
488 return key in self._map
487 return key in self._map
489
488
490 def __iter__(self):
489 def __iter__(self):
491 return iter(sorted(self._map))
490 return iter(sorted(self._map))
492
491
493 def items(self):
492 def items(self):
494 return self._map.items()
493 return self._map.items()
495
494
496 iteritems = items
495 iteritems = items
497
496
498 def parents(self):
497 def parents(self):
499 return [self._validate(p) for p in self._pl]
498 return [self._validate(p) for p in self._pl]
500
499
501 def p1(self):
500 def p1(self):
502 return self._validate(self._pl[0])
501 return self._validate(self._pl[0])
503
502
504 def p2(self):
503 def p2(self):
505 return self._validate(self._pl[1])
504 return self._validate(self._pl[1])
506
505
507 @property
506 @property
508 def in_merge(self):
507 def in_merge(self):
509 """True if a merge is in progress"""
508 """True if a merge is in progress"""
510 return self._pl[1] != self._nodeconstants.nullid
509 return self._pl[1] != self._nodeconstants.nullid
511
510
512 def branch(self):
511 def branch(self):
513 return encoding.tolocal(self._branch)
512 return encoding.tolocal(self._branch)
514
513
515 @requires_changing_parents
514 @requires_changing_parents
516 def setparents(self, p1, p2=None):
515 def setparents(self, p1, p2=None):
517 """Set dirstate parents to p1 and p2.
516 """Set dirstate parents to p1 and p2.
518
517
519 When moving from two parents to one, "merged" entries a
518 When moving from two parents to one, "merged" entries a
520 adjusted to normal and previous copy records discarded and
519 adjusted to normal and previous copy records discarded and
521 returned by the call.
520 returned by the call.
522
521
523 See localrepo.setparents()
522 See localrepo.setparents()
524 """
523 """
525 if p2 is None:
524 if p2 is None:
526 p2 = self._nodeconstants.nullid
525 p2 = self._nodeconstants.nullid
527 if self._changing_level == 0:
526 if self._changing_level == 0:
528 raise ValueError(
527 raise ValueError(
529 b"cannot set dirstate parent outside of "
528 b"cannot set dirstate parent outside of "
530 b"dirstate.changing_parents context manager"
529 b"dirstate.changing_parents context manager"
531 )
530 )
532
531
533 self._dirty = True
532 self._dirty = True
534 oldp2 = self._pl[1]
533 oldp2 = self._pl[1]
535 if self._origpl is None:
534 if self._origpl is None:
536 self._origpl = self._pl
535 self._origpl = self._pl
537 nullid = self._nodeconstants.nullid
536 nullid = self._nodeconstants.nullid
538 # True if we need to fold p2 related state back to a linear case
537 # True if we need to fold p2 related state back to a linear case
539 fold_p2 = oldp2 != nullid and p2 == nullid
538 fold_p2 = oldp2 != nullid and p2 == nullid
540 return self._map.setparents(p1, p2, fold_p2=fold_p2)
539 return self._map.setparents(p1, p2, fold_p2=fold_p2)
541
540
542 def setbranch(self, branch):
541 def setbranch(self, branch):
543 self.__class__._branch.set(self, encoding.fromlocal(branch))
542 self.__class__._branch.set(self, encoding.fromlocal(branch))
544 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
543 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
545 try:
544 try:
546 f.write(self._branch + b'\n')
545 f.write(self._branch + b'\n')
547 f.close()
546 f.close()
548
547
549 # make sure filecache has the correct stat info for _branch after
548 # make sure filecache has the correct stat info for _branch after
550 # replacing the underlying file
549 # replacing the underlying file
551 ce = self._filecache[b'_branch']
550 ce = self._filecache[b'_branch']
552 if ce:
551 if ce:
553 ce.refresh()
552 ce.refresh()
554 except: # re-raises
553 except: # re-raises
555 f.discard()
554 f.discard()
556 raise
555 raise
557
556
558 def invalidate(self):
557 def invalidate(self):
559 """Causes the next access to reread the dirstate.
558 """Causes the next access to reread the dirstate.
560
559
561 This is different from localrepo.invalidatedirstate() because it always
560 This is different from localrepo.invalidatedirstate() because it always
562 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
561 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
563 check whether the dirstate has changed before rereading it."""
562 check whether the dirstate has changed before rereading it."""
564
563
565 for a in ("_map", "_branch", "_ignore"):
564 for a in ("_map", "_branch", "_ignore"):
566 if a in self.__dict__:
565 if a in self.__dict__:
567 delattr(self, a)
566 delattr(self, a)
568 self._dirty = False
567 self._dirty = False
569 self._dirty_tracked_set = False
568 self._dirty_tracked_set = False
570 self._invalidated_context = (
569 self._invalidated_context = (
571 self._changing_level > 0 or self._attached_to_a_transaction
570 self._changing_level > 0 or self._attached_to_a_transaction
572 )
571 )
573 self._origpl = None
572 self._origpl = None
574
573
575 @requires_changing_any
574 @requires_changing_any
576 def copy(self, source, dest):
575 def copy(self, source, dest):
577 """Mark dest as a copy of source. Unmark dest if source is None."""
576 """Mark dest as a copy of source. Unmark dest if source is None."""
578 if source == dest:
577 if source == dest:
579 return
578 return
580 self._dirty = True
579 self._dirty = True
581 if source is not None:
580 if source is not None:
582 self._check_sparse(source)
581 self._check_sparse(source)
583 self._map.copymap[dest] = source
582 self._map.copymap[dest] = source
584 else:
583 else:
585 self._map.copymap.pop(dest, None)
584 self._map.copymap.pop(dest, None)
586
585
587 def copied(self, file):
586 def copied(self, file):
588 return self._map.copymap.get(file, None)
587 return self._map.copymap.get(file, None)
589
588
590 def copies(self):
589 def copies(self):
591 return self._map.copymap
590 return self._map.copymap
592
591
593 @requires_changing_files
592 @requires_changing_files
594 def set_tracked(self, filename, reset_copy=False):
593 def set_tracked(self, filename, reset_copy=False):
595 """a "public" method for generic code to mark a file as tracked
594 """a "public" method for generic code to mark a file as tracked
596
595
597 This function is to be called outside of "update/merge" case. For
596 This function is to be called outside of "update/merge" case. For
598 example by a command like `hg add X`.
597 example by a command like `hg add X`.
599
598
600 if reset_copy is set, any existing copy information will be dropped.
599 if reset_copy is set, any existing copy information will be dropped.
601
600
602 return True the file was previously untracked, False otherwise.
601 return True the file was previously untracked, False otherwise.
603 """
602 """
604 self._dirty = True
603 self._dirty = True
605 entry = self._map.get(filename)
604 entry = self._map.get(filename)
606 if entry is None or not entry.tracked:
605 if entry is None or not entry.tracked:
607 self._check_new_tracked_filename(filename)
606 self._check_new_tracked_filename(filename)
608 pre_tracked = self._map.set_tracked(filename)
607 pre_tracked = self._map.set_tracked(filename)
609 if reset_copy:
608 if reset_copy:
610 self._map.copymap.pop(filename, None)
609 self._map.copymap.pop(filename, None)
611 if pre_tracked:
610 if pre_tracked:
612 self._dirty_tracked_set = True
611 self._dirty_tracked_set = True
613 return pre_tracked
612 return pre_tracked
614
613
615 @requires_changing_files
614 @requires_changing_files
616 def set_untracked(self, filename):
615 def set_untracked(self, filename):
617 """a "public" method for generic code to mark a file as untracked
616 """a "public" method for generic code to mark a file as untracked
618
617
619 This function is to be called outside of "update/merge" case. For
618 This function is to be called outside of "update/merge" case. For
620 example by a command like `hg remove X`.
619 example by a command like `hg remove X`.
621
620
622 return True the file was previously tracked, False otherwise.
621 return True the file was previously tracked, False otherwise.
623 """
622 """
624 ret = self._map.set_untracked(filename)
623 ret = self._map.set_untracked(filename)
625 if ret:
624 if ret:
626 self._dirty = True
625 self._dirty = True
627 self._dirty_tracked_set = True
626 self._dirty_tracked_set = True
628 return ret
627 return ret
629
628
630 @requires_not_changing_parents
629 @requires_not_changing_parents
631 def set_clean(self, filename, parentfiledata):
630 def set_clean(self, filename, parentfiledata):
632 """record that the current state of the file on disk is known to be clean"""
631 """record that the current state of the file on disk is known to be clean"""
633 self._dirty = True
632 self._dirty = True
634 if not self._map[filename].tracked:
633 if not self._map[filename].tracked:
635 self._check_new_tracked_filename(filename)
634 self._check_new_tracked_filename(filename)
636 (mode, size, mtime) = parentfiledata
635 (mode, size, mtime) = parentfiledata
637 self._map.set_clean(filename, mode, size, mtime)
636 self._map.set_clean(filename, mode, size, mtime)
638
637
639 @requires_not_changing_parents
638 @requires_not_changing_parents
640 def set_possibly_dirty(self, filename):
639 def set_possibly_dirty(self, filename):
641 """record that the current state of the file on disk is unknown"""
640 """record that the current state of the file on disk is unknown"""
642 self._dirty = True
641 self._dirty = True
643 self._map.set_possibly_dirty(filename)
642 self._map.set_possibly_dirty(filename)
644
643
645 @requires_changing_parents
644 @requires_changing_parents
646 def update_file_p1(
645 def update_file_p1(
647 self,
646 self,
648 filename,
647 filename,
649 p1_tracked,
648 p1_tracked,
650 ):
649 ):
651 """Set a file as tracked in the parent (or not)
650 """Set a file as tracked in the parent (or not)
652
651
653 This is to be called when adjust the dirstate to a new parent after an history
652 This is to be called when adjust the dirstate to a new parent after an history
654 rewriting operation.
653 rewriting operation.
655
654
656 It should not be called during a merge (p2 != nullid) and only within
655 It should not be called during a merge (p2 != nullid) and only within
657 a `with dirstate.changing_parents(repo):` context.
656 a `with dirstate.changing_parents(repo):` context.
658 """
657 """
659 if self.in_merge:
658 if self.in_merge:
660 msg = b'update_file_reference should not be called when merging'
659 msg = b'update_file_reference should not be called when merging'
661 raise error.ProgrammingError(msg)
660 raise error.ProgrammingError(msg)
662 entry = self._map.get(filename)
661 entry = self._map.get(filename)
663 if entry is None:
662 if entry is None:
664 wc_tracked = False
663 wc_tracked = False
665 else:
664 else:
666 wc_tracked = entry.tracked
665 wc_tracked = entry.tracked
667 if not (p1_tracked or wc_tracked):
666 if not (p1_tracked or wc_tracked):
668 # the file is no longer relevant to anyone
667 # the file is no longer relevant to anyone
669 if self._map.get(filename) is not None:
668 if self._map.get(filename) is not None:
670 self._map.reset_state(filename)
669 self._map.reset_state(filename)
671 self._dirty = True
670 self._dirty = True
672 elif (not p1_tracked) and wc_tracked:
671 elif (not p1_tracked) and wc_tracked:
673 if entry is not None and entry.added:
672 if entry is not None and entry.added:
674 return # avoid dropping copy information (maybe?)
673 return # avoid dropping copy information (maybe?)
675
674
676 self._map.reset_state(
675 self._map.reset_state(
677 filename,
676 filename,
678 wc_tracked,
677 wc_tracked,
679 p1_tracked,
678 p1_tracked,
680 # the underlying reference might have changed, we will have to
679 # the underlying reference might have changed, we will have to
681 # check it.
680 # check it.
682 has_meaningful_mtime=False,
681 has_meaningful_mtime=False,
683 )
682 )
684
683
685 @requires_changing_parents
684 @requires_changing_parents
686 def update_file(
685 def update_file(
687 self,
686 self,
688 filename,
687 filename,
689 wc_tracked,
688 wc_tracked,
690 p1_tracked,
689 p1_tracked,
691 p2_info=False,
690 p2_info=False,
692 possibly_dirty=False,
691 possibly_dirty=False,
693 parentfiledata=None,
692 parentfiledata=None,
694 ):
693 ):
695 """update the information about a file in the dirstate
694 """update the information about a file in the dirstate
696
695
697 This is to be called when the direstates parent changes to keep track
696 This is to be called when the direstates parent changes to keep track
698 of what is the file situation in regards to the working copy and its parent.
697 of what is the file situation in regards to the working copy and its parent.
699
698
700 This function must be called within a `dirstate.changing_parents` context.
699 This function must be called within a `dirstate.changing_parents` context.
701
700
702 note: the API is at an early stage and we might need to adjust it
701 note: the API is at an early stage and we might need to adjust it
703 depending of what information ends up being relevant and useful to
702 depending of what information ends up being relevant and useful to
704 other processing.
703 other processing.
705 """
704 """
706 self._update_file(
705 self._update_file(
707 filename=filename,
706 filename=filename,
708 wc_tracked=wc_tracked,
707 wc_tracked=wc_tracked,
709 p1_tracked=p1_tracked,
708 p1_tracked=p1_tracked,
710 p2_info=p2_info,
709 p2_info=p2_info,
711 possibly_dirty=possibly_dirty,
710 possibly_dirty=possibly_dirty,
712 parentfiledata=parentfiledata,
711 parentfiledata=parentfiledata,
713 )
712 )
714
713
715 # XXX since this make the dirstate dirty, we should enforce that it is done
714 # XXX since this make the dirstate dirty, we should enforce that it is done
716 # withing an appropriate change-context that scope the change and ensure it
715 # withing an appropriate change-context that scope the change and ensure it
717 # eventually get written on disk (or rolled back)
716 # eventually get written on disk (or rolled back)
718 def hacky_extension_update_file(self, *args, **kwargs):
717 def hacky_extension_update_file(self, *args, **kwargs):
719 """NEVER USE THIS, YOU DO NOT NEED IT
718 """NEVER USE THIS, YOU DO NOT NEED IT
720
719
721 This function is a variant of "update_file" to be called by a small set
720 This function is a variant of "update_file" to be called by a small set
722 of extensions, it also adjust the internal state of file, but can be
721 of extensions, it also adjust the internal state of file, but can be
723 called outside an `changing_parents` context.
722 called outside an `changing_parents` context.
724
723
725 A very small number of extension meddle with the working copy content
724 A very small number of extension meddle with the working copy content
726 in a way that requires to adjust the dirstate accordingly. At the time
725 in a way that requires to adjust the dirstate accordingly. At the time
727 this command is written they are :
726 this command is written they are :
728 - keyword,
727 - keyword,
729 - largefile,
728 - largefile,
730 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
729 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
731
730
732 This function could probably be replaced by more semantic one (like
731 This function could probably be replaced by more semantic one (like
733 "adjust expected size" or "always revalidate file content", etc)
732 "adjust expected size" or "always revalidate file content", etc)
734 however at the time where this is writen, this is too much of a detour
733 however at the time where this is writen, this is too much of a detour
735 to be considered.
734 to be considered.
736 """
735 """
737 self._update_file(
736 self._update_file(
738 *args,
737 *args,
739 **kwargs,
738 **kwargs,
740 )
739 )
741
740
742 def _update_file(
741 def _update_file(
743 self,
742 self,
744 filename,
743 filename,
745 wc_tracked,
744 wc_tracked,
746 p1_tracked,
745 p1_tracked,
747 p2_info=False,
746 p2_info=False,
748 possibly_dirty=False,
747 possibly_dirty=False,
749 parentfiledata=None,
748 parentfiledata=None,
750 ):
749 ):
751
750
752 # note: I do not think we need to double check name clash here since we
751 # note: I do not think we need to double check name clash here since we
753 # are in a update/merge case that should already have taken care of
752 # are in a update/merge case that should already have taken care of
754 # this. The test agrees
753 # this. The test agrees
755
754
756 self._dirty = True
755 self._dirty = True
757 old_entry = self._map.get(filename)
756 old_entry = self._map.get(filename)
758 if old_entry is None:
757 if old_entry is None:
759 prev_tracked = False
758 prev_tracked = False
760 else:
759 else:
761 prev_tracked = old_entry.tracked
760 prev_tracked = old_entry.tracked
762 if prev_tracked != wc_tracked:
761 if prev_tracked != wc_tracked:
763 self._dirty_tracked_set = True
762 self._dirty_tracked_set = True
764
763
765 self._map.reset_state(
764 self._map.reset_state(
766 filename,
765 filename,
767 wc_tracked,
766 wc_tracked,
768 p1_tracked,
767 p1_tracked,
769 p2_info=p2_info,
768 p2_info=p2_info,
770 has_meaningful_mtime=not possibly_dirty,
769 has_meaningful_mtime=not possibly_dirty,
771 parentfiledata=parentfiledata,
770 parentfiledata=parentfiledata,
772 )
771 )
773
772
774 def _check_new_tracked_filename(self, filename):
773 def _check_new_tracked_filename(self, filename):
775 scmutil.checkfilename(filename)
774 scmutil.checkfilename(filename)
776 if self._map.hastrackeddir(filename):
775 if self._map.hastrackeddir(filename):
777 msg = _(b'directory %r already in dirstate')
776 msg = _(b'directory %r already in dirstate')
778 msg %= pycompat.bytestr(filename)
777 msg %= pycompat.bytestr(filename)
779 raise error.Abort(msg)
778 raise error.Abort(msg)
780 # shadows
779 # shadows
781 for d in pathutil.finddirs(filename):
780 for d in pathutil.finddirs(filename):
782 if self._map.hastrackeddir(d):
781 if self._map.hastrackeddir(d):
783 break
782 break
784 entry = self._map.get(d)
783 entry = self._map.get(d)
785 if entry is not None and not entry.removed:
784 if entry is not None and not entry.removed:
786 msg = _(b'file %r in dirstate clashes with %r')
785 msg = _(b'file %r in dirstate clashes with %r')
787 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
786 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
788 raise error.Abort(msg)
787 raise error.Abort(msg)
789 self._check_sparse(filename)
788 self._check_sparse(filename)
790
789
791 def _check_sparse(self, filename):
790 def _check_sparse(self, filename):
792 """Check that a filename is inside the sparse profile"""
791 """Check that a filename is inside the sparse profile"""
793 sparsematch = self._sparsematcher
792 sparsematch = self._sparsematcher
794 if sparsematch is not None and not sparsematch.always():
793 if sparsematch is not None and not sparsematch.always():
795 if not sparsematch(filename):
794 if not sparsematch(filename):
796 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
795 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
797 hint = _(
796 hint = _(
798 b'include file with `hg debugsparse --include <pattern>` or use '
797 b'include file with `hg debugsparse --include <pattern>` or use '
799 b'`hg add -s <file>` to include file directory while adding'
798 b'`hg add -s <file>` to include file directory while adding'
800 )
799 )
801 raise error.Abort(msg % filename, hint=hint)
800 raise error.Abort(msg % filename, hint=hint)
802
801
803 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
802 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
804 if exists is None:
803 if exists is None:
805 exists = os.path.lexists(os.path.join(self._root, path))
804 exists = os.path.lexists(os.path.join(self._root, path))
806 if not exists:
805 if not exists:
807 # Maybe a path component exists
806 # Maybe a path component exists
808 if not ignoremissing and b'/' in path:
807 if not ignoremissing and b'/' in path:
809 d, f = path.rsplit(b'/', 1)
808 d, f = path.rsplit(b'/', 1)
810 d = self._normalize(d, False, ignoremissing, None)
809 d = self._normalize(d, False, ignoremissing, None)
811 folded = d + b"/" + f
810 folded = d + b"/" + f
812 else:
811 else:
813 # No path components, preserve original case
812 # No path components, preserve original case
814 folded = path
813 folded = path
815 else:
814 else:
816 # recursively normalize leading directory components
815 # recursively normalize leading directory components
817 # against dirstate
816 # against dirstate
818 if b'/' in normed:
817 if b'/' in normed:
819 d, f = normed.rsplit(b'/', 1)
818 d, f = normed.rsplit(b'/', 1)
820 d = self._normalize(d, False, ignoremissing, True)
819 d = self._normalize(d, False, ignoremissing, True)
821 r = self._root + b"/" + d
820 r = self._root + b"/" + d
822 folded = d + b"/" + util.fspath(f, r)
821 folded = d + b"/" + util.fspath(f, r)
823 else:
822 else:
824 folded = util.fspath(normed, self._root)
823 folded = util.fspath(normed, self._root)
825 storemap[normed] = folded
824 storemap[normed] = folded
826
825
827 return folded
826 return folded
828
827
829 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
828 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
830 normed = util.normcase(path)
829 normed = util.normcase(path)
831 folded = self._map.filefoldmap.get(normed, None)
830 folded = self._map.filefoldmap.get(normed, None)
832 if folded is None:
831 if folded is None:
833 if isknown:
832 if isknown:
834 folded = path
833 folded = path
835 else:
834 else:
836 folded = self._discoverpath(
835 folded = self._discoverpath(
837 path, normed, ignoremissing, exists, self._map.filefoldmap
836 path, normed, ignoremissing, exists, self._map.filefoldmap
838 )
837 )
839 return folded
838 return folded
840
839
841 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
840 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
842 normed = util.normcase(path)
841 normed = util.normcase(path)
843 folded = self._map.filefoldmap.get(normed, None)
842 folded = self._map.filefoldmap.get(normed, None)
844 if folded is None:
843 if folded is None:
845 folded = self._map.dirfoldmap.get(normed, None)
844 folded = self._map.dirfoldmap.get(normed, None)
846 if folded is None:
845 if folded is None:
847 if isknown:
846 if isknown:
848 folded = path
847 folded = path
849 else:
848 else:
850 # store discovered result in dirfoldmap so that future
849 # store discovered result in dirfoldmap so that future
851 # normalizefile calls don't start matching directories
850 # normalizefile calls don't start matching directories
852 folded = self._discoverpath(
851 folded = self._discoverpath(
853 path, normed, ignoremissing, exists, self._map.dirfoldmap
852 path, normed, ignoremissing, exists, self._map.dirfoldmap
854 )
853 )
855 return folded
854 return folded
856
855
857 def normalize(self, path, isknown=False, ignoremissing=False):
856 def normalize(self, path, isknown=False, ignoremissing=False):
858 """
857 """
859 normalize the case of a pathname when on a casefolding filesystem
858 normalize the case of a pathname when on a casefolding filesystem
860
859
861 isknown specifies whether the filename came from walking the
860 isknown specifies whether the filename came from walking the
862 disk, to avoid extra filesystem access.
861 disk, to avoid extra filesystem access.
863
862
864 If ignoremissing is True, missing path are returned
863 If ignoremissing is True, missing path are returned
865 unchanged. Otherwise, we try harder to normalize possibly
864 unchanged. Otherwise, we try harder to normalize possibly
866 existing path components.
865 existing path components.
867
866
868 The normalized case is determined based on the following precedence:
867 The normalized case is determined based on the following precedence:
869
868
870 - version of name already stored in the dirstate
869 - version of name already stored in the dirstate
871 - version of name stored on disk
870 - version of name stored on disk
872 - version provided via command arguments
871 - version provided via command arguments
873 """
872 """
874
873
875 if self._checkcase:
874 if self._checkcase:
876 return self._normalize(path, isknown, ignoremissing)
875 return self._normalize(path, isknown, ignoremissing)
877 return path
876 return path
878
877
879 # XXX this method is barely used, as a result:
878 # XXX this method is barely used, as a result:
880 # - its semantic is unclear
879 # - its semantic is unclear
881 # - do we really needs it ?
880 # - do we really needs it ?
882 @requires_changing_parents
881 @requires_changing_parents
883 def clear(self):
882 def clear(self):
884 self._map.clear()
883 self._map.clear()
885 self._dirty = True
884 self._dirty = True
886
885
887 @requires_changing_parents
886 @requires_changing_parents
888 def rebuild(self, parent, allfiles, changedfiles=None):
887 def rebuild(self, parent, allfiles, changedfiles=None):
889 matcher = self._sparsematcher
888 matcher = self._sparsematcher
890 if matcher is not None and not matcher.always():
889 if matcher is not None and not matcher.always():
891 # should not add non-matching files
890 # should not add non-matching files
892 allfiles = [f for f in allfiles if matcher(f)]
891 allfiles = [f for f in allfiles if matcher(f)]
893 if changedfiles:
892 if changedfiles:
894 changedfiles = [f for f in changedfiles if matcher(f)]
893 changedfiles = [f for f in changedfiles if matcher(f)]
895
894
896 if changedfiles is not None:
895 if changedfiles is not None:
897 # these files will be deleted from the dirstate when they are
896 # these files will be deleted from the dirstate when they are
898 # not found to be in allfiles
897 # not found to be in allfiles
899 dirstatefilestoremove = {f for f in self if not matcher(f)}
898 dirstatefilestoremove = {f for f in self if not matcher(f)}
900 changedfiles = dirstatefilestoremove.union(changedfiles)
899 changedfiles = dirstatefilestoremove.union(changedfiles)
901
900
902 if changedfiles is None:
901 if changedfiles is None:
903 # Rebuild entire dirstate
902 # Rebuild entire dirstate
904 to_lookup = allfiles
903 to_lookup = allfiles
905 to_drop = []
904 to_drop = []
906 self.clear()
905 self.clear()
907 elif len(changedfiles) < 10:
906 elif len(changedfiles) < 10:
908 # Avoid turning allfiles into a set, which can be expensive if it's
907 # Avoid turning allfiles into a set, which can be expensive if it's
909 # large.
908 # large.
910 to_lookup = []
909 to_lookup = []
911 to_drop = []
910 to_drop = []
912 for f in changedfiles:
911 for f in changedfiles:
913 if f in allfiles:
912 if f in allfiles:
914 to_lookup.append(f)
913 to_lookup.append(f)
915 else:
914 else:
916 to_drop.append(f)
915 to_drop.append(f)
917 else:
916 else:
918 changedfilesset = set(changedfiles)
917 changedfilesset = set(changedfiles)
919 to_lookup = changedfilesset & set(allfiles)
918 to_lookup = changedfilesset & set(allfiles)
920 to_drop = changedfilesset - to_lookup
919 to_drop = changedfilesset - to_lookup
921
920
922 if self._origpl is None:
921 if self._origpl is None:
923 self._origpl = self._pl
922 self._origpl = self._pl
924 self._map.setparents(parent, self._nodeconstants.nullid)
923 self._map.setparents(parent, self._nodeconstants.nullid)
925
924
926 for f in to_lookup:
925 for f in to_lookup:
927 if self.in_merge:
926 if self.in_merge:
928 self.set_tracked(f)
927 self.set_tracked(f)
929 else:
928 else:
930 self._map.reset_state(
929 self._map.reset_state(
931 f,
930 f,
932 wc_tracked=True,
931 wc_tracked=True,
933 p1_tracked=True,
932 p1_tracked=True,
934 )
933 )
935 for f in to_drop:
934 for f in to_drop:
936 self._map.reset_state(f)
935 self._map.reset_state(f)
937
936
938 self._dirty = True
937 self._dirty = True
939
938
940 def identity(self):
939 def identity(self):
941 """Return identity of dirstate itself to detect changing in storage
940 """Return identity of dirstate itself to detect changing in storage
942
941
943 If identity of previous dirstate is equal to this, writing
942 If identity of previous dirstate is equal to this, writing
944 changes based on the former dirstate out can keep consistency.
943 changes based on the former dirstate out can keep consistency.
945 """
944 """
946 return self._map.identity
945 return self._map.identity
947
946
948 def write(self, tr):
947 def write(self, tr):
949 if not self._dirty:
948 if not self._dirty:
950 return
949 return
951 # make sure we don't request a write of invalidated content
950 # make sure we don't request a write of invalidated content
952 # XXX move before the dirty check once `unlock` stop calling `write`
951 # XXX move before the dirty check once `unlock` stop calling `write`
953 assert not self._invalidated_context
952 assert not self._invalidated_context
954
953
955 write_key = self._use_tracked_hint and self._dirty_tracked_set
954 write_key = self._use_tracked_hint and self._dirty_tracked_set
956 if tr:
955 if tr:
957
956
958 def on_abort(tr):
957 def on_abort(tr):
959 self._attached_to_a_transaction = False
958 self._attached_to_a_transaction = False
960 self.invalidate()
959 self.invalidate()
961
960
962 # make sure we invalidate the current change on abort
961 # make sure we invalidate the current change on abort
963 if tr is not None:
962 if tr is not None:
964 tr.addabort(
963 tr.addabort(
965 b'dirstate-invalidate%s' % self._tr_key_suffix,
964 b'dirstate-invalidate%s' % self._tr_key_suffix,
966 on_abort,
965 on_abort,
967 )
966 )
968
967
969 self._attached_to_a_transaction = True
968 self._attached_to_a_transaction = True
970
969
971 def on_success(f):
970 def on_success(f):
972 self._attached_to_a_transaction = False
971 self._attached_to_a_transaction = False
973 self._writedirstate(tr, f),
972 self._writedirstate(tr, f),
974
973
975 # delay writing in-memory changes out
974 # delay writing in-memory changes out
976 tr.addfilegenerator(
975 tr.addfilegenerator(
977 b'dirstate-1-main%s' % self._tr_key_suffix,
976 b'dirstate-1-main%s' % self._tr_key_suffix,
978 (self._filename,),
977 (self._filename,),
979 on_success,
978 on_success,
980 location=b'plain',
979 location=b'plain',
981 post_finalize=True,
980 post_finalize=True,
982 )
981 )
983 if write_key:
982 if write_key:
984 tr.addfilegenerator(
983 tr.addfilegenerator(
985 b'dirstate-2-key-post%s' % self._tr_key_suffix,
984 b'dirstate-2-key-post%s' % self._tr_key_suffix,
986 (self._filename_th,),
985 (self._filename_th,),
987 lambda f: self._write_tracked_hint(tr, f),
986 lambda f: self._write_tracked_hint(tr, f),
988 location=b'plain',
987 location=b'plain',
989 post_finalize=True,
988 post_finalize=True,
990 )
989 )
991 return
990 return
992
991
993 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
992 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
994 with file(self._filename) as f:
993 with file(self._filename) as f:
995 self._writedirstate(tr, f)
994 self._writedirstate(tr, f)
996 if write_key:
995 if write_key:
997 # we update the key-file after writing to make sure reader have a
996 # we update the key-file after writing to make sure reader have a
998 # key that match the newly written content
997 # key that match the newly written content
999 with file(self._filename_th) as f:
998 with file(self._filename_th) as f:
1000 self._write_tracked_hint(tr, f)
999 self._write_tracked_hint(tr, f)
1001
1000
1002 def delete_tracked_hint(self):
1001 def delete_tracked_hint(self):
1003 """remove the tracked_hint file
1002 """remove the tracked_hint file
1004
1003
1005 To be used by format downgrades operation"""
1004 To be used by format downgrades operation"""
1006 self._opener.unlink(self._filename_th)
1005 self._opener.unlink(self._filename_th)
1007 self._use_tracked_hint = False
1006 self._use_tracked_hint = False
1008
1007
1009 def addparentchangecallback(self, category, callback):
1008 def addparentchangecallback(self, category, callback):
1010 """add a callback to be called when the wd parents are changed
1009 """add a callback to be called when the wd parents are changed
1011
1010
1012 Callback will be called with the following arguments:
1011 Callback will be called with the following arguments:
1013 dirstate, (oldp1, oldp2), (newp1, newp2)
1012 dirstate, (oldp1, oldp2), (newp1, newp2)
1014
1013
1015 Category is a unique identifier to allow overwriting an old callback
1014 Category is a unique identifier to allow overwriting an old callback
1016 with a newer callback.
1015 with a newer callback.
1017 """
1016 """
1018 self._plchangecallbacks[category] = callback
1017 self._plchangecallbacks[category] = callback
1019
1018
1020 def _writedirstate(self, tr, st):
1019 def _writedirstate(self, tr, st):
1021 # make sure we don't write invalidated content
1020 # make sure we don't write invalidated content
1022 assert not self._invalidated_context
1021 assert not self._invalidated_context
1023 # notify callbacks about parents change
1022 # notify callbacks about parents change
1024 if self._origpl is not None and self._origpl != self._pl:
1023 if self._origpl is not None and self._origpl != self._pl:
1025 for c, callback in sorted(self._plchangecallbacks.items()):
1024 for c, callback in sorted(self._plchangecallbacks.items()):
1026 callback(self, self._origpl, self._pl)
1025 callback(self, self._origpl, self._pl)
1027 self._origpl = None
1026 self._origpl = None
1028 self._map.write(tr, st)
1027 self._map.write(tr, st)
1029 self._dirty = False
1028 self._dirty = False
1030 self._dirty_tracked_set = False
1029 self._dirty_tracked_set = False
1031
1030
1032 def _write_tracked_hint(self, tr, f):
1031 def _write_tracked_hint(self, tr, f):
1033 key = node.hex(uuid.uuid4().bytes)
1032 key = node.hex(uuid.uuid4().bytes)
1034 f.write(b"1\n%s\n" % key) # 1 is the format version
1033 f.write(b"1\n%s\n" % key) # 1 is the format version
1035
1034
1036 def _dirignore(self, f):
1035 def _dirignore(self, f):
1037 if self._ignore(f):
1036 if self._ignore(f):
1038 return True
1037 return True
1039 for p in pathutil.finddirs(f):
1038 for p in pathutil.finddirs(f):
1040 if self._ignore(p):
1039 if self._ignore(p):
1041 return True
1040 return True
1042 return False
1041 return False
1043
1042
1044 def _ignorefiles(self):
1043 def _ignorefiles(self):
1045 files = []
1044 files = []
1046 if os.path.exists(self._join(b'.hgignore')):
1045 if os.path.exists(self._join(b'.hgignore')):
1047 files.append(self._join(b'.hgignore'))
1046 files.append(self._join(b'.hgignore'))
1048 for name, path in self._ui.configitems(b"ui"):
1047 for name, path in self._ui.configitems(b"ui"):
1049 if name == b'ignore' or name.startswith(b'ignore.'):
1048 if name == b'ignore' or name.startswith(b'ignore.'):
1050 # we need to use os.path.join here rather than self._join
1049 # we need to use os.path.join here rather than self._join
1051 # because path is arbitrary and user-specified
1050 # because path is arbitrary and user-specified
1052 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1051 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1053 return files
1052 return files
1054
1053
1055 def _ignorefileandline(self, f):
1054 def _ignorefileandline(self, f):
1056 files = collections.deque(self._ignorefiles())
1055 files = collections.deque(self._ignorefiles())
1057 visited = set()
1056 visited = set()
1058 while files:
1057 while files:
1059 i = files.popleft()
1058 i = files.popleft()
1060 patterns = matchmod.readpatternfile(
1059 patterns = matchmod.readpatternfile(
1061 i, self._ui.warn, sourceinfo=True
1060 i, self._ui.warn, sourceinfo=True
1062 )
1061 )
1063 for pattern, lineno, line in patterns:
1062 for pattern, lineno, line in patterns:
1064 kind, p = matchmod._patsplit(pattern, b'glob')
1063 kind, p = matchmod._patsplit(pattern, b'glob')
1065 if kind == b"subinclude":
1064 if kind == b"subinclude":
1066 if p not in visited:
1065 if p not in visited:
1067 files.append(p)
1066 files.append(p)
1068 continue
1067 continue
1069 m = matchmod.match(
1068 m = matchmod.match(
1070 self._root, b'', [], [pattern], warn=self._ui.warn
1069 self._root, b'', [], [pattern], warn=self._ui.warn
1071 )
1070 )
1072 if m(f):
1071 if m(f):
1073 return (i, lineno, line)
1072 return (i, lineno, line)
1074 visited.add(i)
1073 visited.add(i)
1075 return (None, -1, b"")
1074 return (None, -1, b"")
1076
1075
1077 def _walkexplicit(self, match, subrepos):
1076 def _walkexplicit(self, match, subrepos):
1078 """Get stat data about the files explicitly specified by match.
1077 """Get stat data about the files explicitly specified by match.
1079
1078
1080 Return a triple (results, dirsfound, dirsnotfound).
1079 Return a triple (results, dirsfound, dirsnotfound).
1081 - results is a mapping from filename to stat result. It also contains
1080 - results is a mapping from filename to stat result. It also contains
1082 listings mapping subrepos and .hg to None.
1081 listings mapping subrepos and .hg to None.
1083 - dirsfound is a list of files found to be directories.
1082 - dirsfound is a list of files found to be directories.
1084 - dirsnotfound is a list of files that the dirstate thinks are
1083 - dirsnotfound is a list of files that the dirstate thinks are
1085 directories and that were not found."""
1084 directories and that were not found."""
1086
1085
1087 def badtype(mode):
1086 def badtype(mode):
1088 kind = _(b'unknown')
1087 kind = _(b'unknown')
1089 if stat.S_ISCHR(mode):
1088 if stat.S_ISCHR(mode):
1090 kind = _(b'character device')
1089 kind = _(b'character device')
1091 elif stat.S_ISBLK(mode):
1090 elif stat.S_ISBLK(mode):
1092 kind = _(b'block device')
1091 kind = _(b'block device')
1093 elif stat.S_ISFIFO(mode):
1092 elif stat.S_ISFIFO(mode):
1094 kind = _(b'fifo')
1093 kind = _(b'fifo')
1095 elif stat.S_ISSOCK(mode):
1094 elif stat.S_ISSOCK(mode):
1096 kind = _(b'socket')
1095 kind = _(b'socket')
1097 elif stat.S_ISDIR(mode):
1096 elif stat.S_ISDIR(mode):
1098 kind = _(b'directory')
1097 kind = _(b'directory')
1099 return _(b'unsupported file type (type is %s)') % kind
1098 return _(b'unsupported file type (type is %s)') % kind
1100
1099
1101 badfn = match.bad
1100 badfn = match.bad
1102 dmap = self._map
1101 dmap = self._map
1103 lstat = os.lstat
1102 lstat = os.lstat
1104 getkind = stat.S_IFMT
1103 getkind = stat.S_IFMT
1105 dirkind = stat.S_IFDIR
1104 dirkind = stat.S_IFDIR
1106 regkind = stat.S_IFREG
1105 regkind = stat.S_IFREG
1107 lnkkind = stat.S_IFLNK
1106 lnkkind = stat.S_IFLNK
1108 join = self._join
1107 join = self._join
1109 dirsfound = []
1108 dirsfound = []
1110 foundadd = dirsfound.append
1109 foundadd = dirsfound.append
1111 dirsnotfound = []
1110 dirsnotfound = []
1112 notfoundadd = dirsnotfound.append
1111 notfoundadd = dirsnotfound.append
1113
1112
1114 if not match.isexact() and self._checkcase:
1113 if not match.isexact() and self._checkcase:
1115 normalize = self._normalize
1114 normalize = self._normalize
1116 else:
1115 else:
1117 normalize = None
1116 normalize = None
1118
1117
1119 files = sorted(match.files())
1118 files = sorted(match.files())
1120 subrepos.sort()
1119 subrepos.sort()
1121 i, j = 0, 0
1120 i, j = 0, 0
1122 while i < len(files) and j < len(subrepos):
1121 while i < len(files) and j < len(subrepos):
1123 subpath = subrepos[j] + b"/"
1122 subpath = subrepos[j] + b"/"
1124 if files[i] < subpath:
1123 if files[i] < subpath:
1125 i += 1
1124 i += 1
1126 continue
1125 continue
1127 while i < len(files) and files[i].startswith(subpath):
1126 while i < len(files) and files[i].startswith(subpath):
1128 del files[i]
1127 del files[i]
1129 j += 1
1128 j += 1
1130
1129
1131 if not files or b'' in files:
1130 if not files or b'' in files:
1132 files = [b'']
1131 files = [b'']
1133 # constructing the foldmap is expensive, so don't do it for the
1132 # constructing the foldmap is expensive, so don't do it for the
1134 # common case where files is ['']
1133 # common case where files is ['']
1135 normalize = None
1134 normalize = None
1136 results = dict.fromkeys(subrepos)
1135 results = dict.fromkeys(subrepos)
1137 results[b'.hg'] = None
1136 results[b'.hg'] = None
1138
1137
1139 for ff in files:
1138 for ff in files:
1140 if normalize:
1139 if normalize:
1141 nf = normalize(ff, False, True)
1140 nf = normalize(ff, False, True)
1142 else:
1141 else:
1143 nf = ff
1142 nf = ff
1144 if nf in results:
1143 if nf in results:
1145 continue
1144 continue
1146
1145
1147 try:
1146 try:
1148 st = lstat(join(nf))
1147 st = lstat(join(nf))
1149 kind = getkind(st.st_mode)
1148 kind = getkind(st.st_mode)
1150 if kind == dirkind:
1149 if kind == dirkind:
1151 if nf in dmap:
1150 if nf in dmap:
1152 # file replaced by dir on disk but still in dirstate
1151 # file replaced by dir on disk but still in dirstate
1153 results[nf] = None
1152 results[nf] = None
1154 foundadd((nf, ff))
1153 foundadd((nf, ff))
1155 elif kind == regkind or kind == lnkkind:
1154 elif kind == regkind or kind == lnkkind:
1156 results[nf] = st
1155 results[nf] = st
1157 else:
1156 else:
1158 badfn(ff, badtype(kind))
1157 badfn(ff, badtype(kind))
1159 if nf in dmap:
1158 if nf in dmap:
1160 results[nf] = None
1159 results[nf] = None
1161 except (OSError) as inst:
1160 except (OSError) as inst:
1162 # nf not found on disk - it is dirstate only
1161 # nf not found on disk - it is dirstate only
1163 if nf in dmap: # does it exactly match a missing file?
1162 if nf in dmap: # does it exactly match a missing file?
1164 results[nf] = None
1163 results[nf] = None
1165 else: # does it match a missing directory?
1164 else: # does it match a missing directory?
1166 if self._map.hasdir(nf):
1165 if self._map.hasdir(nf):
1167 notfoundadd(nf)
1166 notfoundadd(nf)
1168 else:
1167 else:
1169 badfn(ff, encoding.strtolocal(inst.strerror))
1168 badfn(ff, encoding.strtolocal(inst.strerror))
1170
1169
1171 # match.files() may contain explicitly-specified paths that shouldn't
1170 # match.files() may contain explicitly-specified paths that shouldn't
1172 # be taken; drop them from the list of files found. dirsfound/notfound
1171 # be taken; drop them from the list of files found. dirsfound/notfound
1173 # aren't filtered here because they will be tested later.
1172 # aren't filtered here because they will be tested later.
1174 if match.anypats():
1173 if match.anypats():
1175 for f in list(results):
1174 for f in list(results):
1176 if f == b'.hg' or f in subrepos:
1175 if f == b'.hg' or f in subrepos:
1177 # keep sentinel to disable further out-of-repo walks
1176 # keep sentinel to disable further out-of-repo walks
1178 continue
1177 continue
1179 if not match(f):
1178 if not match(f):
1180 del results[f]
1179 del results[f]
1181
1180
1182 # Case insensitive filesystems cannot rely on lstat() failing to detect
1181 # Case insensitive filesystems cannot rely on lstat() failing to detect
1183 # a case-only rename. Prune the stat object for any file that does not
1182 # a case-only rename. Prune the stat object for any file that does not
1184 # match the case in the filesystem, if there are multiple files that
1183 # match the case in the filesystem, if there are multiple files that
1185 # normalize to the same path.
1184 # normalize to the same path.
1186 if match.isexact() and self._checkcase:
1185 if match.isexact() and self._checkcase:
1187 normed = {}
1186 normed = {}
1188
1187
1189 for f, st in results.items():
1188 for f, st in results.items():
1190 if st is None:
1189 if st is None:
1191 continue
1190 continue
1192
1191
1193 nc = util.normcase(f)
1192 nc = util.normcase(f)
1194 paths = normed.get(nc)
1193 paths = normed.get(nc)
1195
1194
1196 if paths is None:
1195 if paths is None:
1197 paths = set()
1196 paths = set()
1198 normed[nc] = paths
1197 normed[nc] = paths
1199
1198
1200 paths.add(f)
1199 paths.add(f)
1201
1200
1202 for norm, paths in normed.items():
1201 for norm, paths in normed.items():
1203 if len(paths) > 1:
1202 if len(paths) > 1:
1204 for path in paths:
1203 for path in paths:
1205 folded = self._discoverpath(
1204 folded = self._discoverpath(
1206 path, norm, True, None, self._map.dirfoldmap
1205 path, norm, True, None, self._map.dirfoldmap
1207 )
1206 )
1208 if path != folded:
1207 if path != folded:
1209 results[path] = None
1208 results[path] = None
1210
1209
1211 return results, dirsfound, dirsnotfound
1210 return results, dirsfound, dirsnotfound
1212
1211
1213 def walk(self, match, subrepos, unknown, ignored, full=True):
1212 def walk(self, match, subrepos, unknown, ignored, full=True):
1214 """
1213 """
1215 Walk recursively through the directory tree, finding all files
1214 Walk recursively through the directory tree, finding all files
1216 matched by match.
1215 matched by match.
1217
1216
1218 If full is False, maybe skip some known-clean files.
1217 If full is False, maybe skip some known-clean files.
1219
1218
1220 Return a dict mapping filename to stat-like object (either
1219 Return a dict mapping filename to stat-like object (either
1221 mercurial.osutil.stat instance or return value of os.stat()).
1220 mercurial.osutil.stat instance or return value of os.stat()).
1222
1221
1223 """
1222 """
1224 # full is a flag that extensions that hook into walk can use -- this
1223 # full is a flag that extensions that hook into walk can use -- this
1225 # implementation doesn't use it at all. This satisfies the contract
1224 # implementation doesn't use it at all. This satisfies the contract
1226 # because we only guarantee a "maybe".
1225 # because we only guarantee a "maybe".
1227
1226
1228 if ignored:
1227 if ignored:
1229 ignore = util.never
1228 ignore = util.never
1230 dirignore = util.never
1229 dirignore = util.never
1231 elif unknown:
1230 elif unknown:
1232 ignore = self._ignore
1231 ignore = self._ignore
1233 dirignore = self._dirignore
1232 dirignore = self._dirignore
1234 else:
1233 else:
1235 # if not unknown and not ignored, drop dir recursion and step 2
1234 # if not unknown and not ignored, drop dir recursion and step 2
1236 ignore = util.always
1235 ignore = util.always
1237 dirignore = util.always
1236 dirignore = util.always
1238
1237
1239 if self._sparsematchfn is not None:
1238 if self._sparsematchfn is not None:
1240 em = matchmod.exact(match.files())
1239 em = matchmod.exact(match.files())
1241 sm = matchmod.unionmatcher([self._sparsematcher, em])
1240 sm = matchmod.unionmatcher([self._sparsematcher, em])
1242 match = matchmod.intersectmatchers(match, sm)
1241 match = matchmod.intersectmatchers(match, sm)
1243
1242
1244 matchfn = match.matchfn
1243 matchfn = match.matchfn
1245 matchalways = match.always()
1244 matchalways = match.always()
1246 matchtdir = match.traversedir
1245 matchtdir = match.traversedir
1247 dmap = self._map
1246 dmap = self._map
1248 listdir = util.listdir
1247 listdir = util.listdir
1249 lstat = os.lstat
1248 lstat = os.lstat
1250 dirkind = stat.S_IFDIR
1249 dirkind = stat.S_IFDIR
1251 regkind = stat.S_IFREG
1250 regkind = stat.S_IFREG
1252 lnkkind = stat.S_IFLNK
1251 lnkkind = stat.S_IFLNK
1253 join = self._join
1252 join = self._join
1254
1253
1255 exact = skipstep3 = False
1254 exact = skipstep3 = False
1256 if match.isexact(): # match.exact
1255 if match.isexact(): # match.exact
1257 exact = True
1256 exact = True
1258 dirignore = util.always # skip step 2
1257 dirignore = util.always # skip step 2
1259 elif match.prefix(): # match.match, no patterns
1258 elif match.prefix(): # match.match, no patterns
1260 skipstep3 = True
1259 skipstep3 = True
1261
1260
1262 if not exact and self._checkcase:
1261 if not exact and self._checkcase:
1263 normalize = self._normalize
1262 normalize = self._normalize
1264 normalizefile = self._normalizefile
1263 normalizefile = self._normalizefile
1265 skipstep3 = False
1264 skipstep3 = False
1266 else:
1265 else:
1267 normalize = self._normalize
1266 normalize = self._normalize
1268 normalizefile = None
1267 normalizefile = None
1269
1268
1270 # step 1: find all explicit files
1269 # step 1: find all explicit files
1271 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1270 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1272 if matchtdir:
1271 if matchtdir:
1273 for d in work:
1272 for d in work:
1274 matchtdir(d[0])
1273 matchtdir(d[0])
1275 for d in dirsnotfound:
1274 for d in dirsnotfound:
1276 matchtdir(d)
1275 matchtdir(d)
1277
1276
1278 skipstep3 = skipstep3 and not (work or dirsnotfound)
1277 skipstep3 = skipstep3 and not (work or dirsnotfound)
1279 work = [d for d in work if not dirignore(d[0])]
1278 work = [d for d in work if not dirignore(d[0])]
1280
1279
1281 # step 2: visit subdirectories
1280 # step 2: visit subdirectories
1282 def traverse(work, alreadynormed):
1281 def traverse(work, alreadynormed):
1283 wadd = work.append
1282 wadd = work.append
1284 while work:
1283 while work:
1285 tracing.counter('dirstate.walk work', len(work))
1284 tracing.counter('dirstate.walk work', len(work))
1286 nd = work.pop()
1285 nd = work.pop()
1287 visitentries = match.visitchildrenset(nd)
1286 visitentries = match.visitchildrenset(nd)
1288 if not visitentries:
1287 if not visitentries:
1289 continue
1288 continue
1290 if visitentries == b'this' or visitentries == b'all':
1289 if visitentries == b'this' or visitentries == b'all':
1291 visitentries = None
1290 visitentries = None
1292 skip = None
1291 skip = None
1293 if nd != b'':
1292 if nd != b'':
1294 skip = b'.hg'
1293 skip = b'.hg'
1295 try:
1294 try:
1296 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1295 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1297 entries = listdir(join(nd), stat=True, skip=skip)
1296 entries = listdir(join(nd), stat=True, skip=skip)
1298 except (PermissionError, FileNotFoundError) as inst:
1297 except (PermissionError, FileNotFoundError) as inst:
1299 match.bad(
1298 match.bad(
1300 self.pathto(nd), encoding.strtolocal(inst.strerror)
1299 self.pathto(nd), encoding.strtolocal(inst.strerror)
1301 )
1300 )
1302 continue
1301 continue
1303 for f, kind, st in entries:
1302 for f, kind, st in entries:
1304 # Some matchers may return files in the visitentries set,
1303 # Some matchers may return files in the visitentries set,
1305 # instead of 'this', if the matcher explicitly mentions them
1304 # instead of 'this', if the matcher explicitly mentions them
1306 # and is not an exactmatcher. This is acceptable; we do not
1305 # and is not an exactmatcher. This is acceptable; we do not
1307 # make any hard assumptions about file-or-directory below
1306 # make any hard assumptions about file-or-directory below
1308 # based on the presence of `f` in visitentries. If
1307 # based on the presence of `f` in visitentries. If
1309 # visitchildrenset returned a set, we can always skip the
1308 # visitchildrenset returned a set, we can always skip the
1310 # entries *not* in the set it provided regardless of whether
1309 # entries *not* in the set it provided regardless of whether
1311 # they're actually a file or a directory.
1310 # they're actually a file or a directory.
1312 if visitentries and f not in visitentries:
1311 if visitentries and f not in visitentries:
1313 continue
1312 continue
1314 if normalizefile:
1313 if normalizefile:
1315 # even though f might be a directory, we're only
1314 # even though f might be a directory, we're only
1316 # interested in comparing it to files currently in the
1315 # interested in comparing it to files currently in the
1317 # dmap -- therefore normalizefile is enough
1316 # dmap -- therefore normalizefile is enough
1318 nf = normalizefile(
1317 nf = normalizefile(
1319 nd and (nd + b"/" + f) or f, True, True
1318 nd and (nd + b"/" + f) or f, True, True
1320 )
1319 )
1321 else:
1320 else:
1322 nf = nd and (nd + b"/" + f) or f
1321 nf = nd and (nd + b"/" + f) or f
1323 if nf not in results:
1322 if nf not in results:
1324 if kind == dirkind:
1323 if kind == dirkind:
1325 if not ignore(nf):
1324 if not ignore(nf):
1326 if matchtdir:
1325 if matchtdir:
1327 matchtdir(nf)
1326 matchtdir(nf)
1328 wadd(nf)
1327 wadd(nf)
1329 if nf in dmap and (matchalways or matchfn(nf)):
1328 if nf in dmap and (matchalways or matchfn(nf)):
1330 results[nf] = None
1329 results[nf] = None
1331 elif kind == regkind or kind == lnkkind:
1330 elif kind == regkind or kind == lnkkind:
1332 if nf in dmap:
1331 if nf in dmap:
1333 if matchalways or matchfn(nf):
1332 if matchalways or matchfn(nf):
1334 results[nf] = st
1333 results[nf] = st
1335 elif (matchalways or matchfn(nf)) and not ignore(
1334 elif (matchalways or matchfn(nf)) and not ignore(
1336 nf
1335 nf
1337 ):
1336 ):
1338 # unknown file -- normalize if necessary
1337 # unknown file -- normalize if necessary
1339 if not alreadynormed:
1338 if not alreadynormed:
1340 nf = normalize(nf, False, True)
1339 nf = normalize(nf, False, True)
1341 results[nf] = st
1340 results[nf] = st
1342 elif nf in dmap and (matchalways or matchfn(nf)):
1341 elif nf in dmap and (matchalways or matchfn(nf)):
1343 results[nf] = None
1342 results[nf] = None
1344
1343
1345 for nd, d in work:
1344 for nd, d in work:
1346 # alreadynormed means that processwork doesn't have to do any
1345 # alreadynormed means that processwork doesn't have to do any
1347 # expensive directory normalization
1346 # expensive directory normalization
1348 alreadynormed = not normalize or nd == d
1347 alreadynormed = not normalize or nd == d
1349 traverse([d], alreadynormed)
1348 traverse([d], alreadynormed)
1350
1349
1351 for s in subrepos:
1350 for s in subrepos:
1352 del results[s]
1351 del results[s]
1353 del results[b'.hg']
1352 del results[b'.hg']
1354
1353
1355 # step 3: visit remaining files from dmap
1354 # step 3: visit remaining files from dmap
1356 if not skipstep3 and not exact:
1355 if not skipstep3 and not exact:
1357 # If a dmap file is not in results yet, it was either
1356 # If a dmap file is not in results yet, it was either
1358 # a) not matching matchfn b) ignored, c) missing, or d) under a
1357 # a) not matching matchfn b) ignored, c) missing, or d) under a
1359 # symlink directory.
1358 # symlink directory.
1360 if not results and matchalways:
1359 if not results and matchalways:
1361 visit = [f for f in dmap]
1360 visit = [f for f in dmap]
1362 else:
1361 else:
1363 visit = [f for f in dmap if f not in results and matchfn(f)]
1362 visit = [f for f in dmap if f not in results and matchfn(f)]
1364 visit.sort()
1363 visit.sort()
1365
1364
1366 if unknown:
1365 if unknown:
1367 # unknown == True means we walked all dirs under the roots
1366 # unknown == True means we walked all dirs under the roots
1368 # that wasn't ignored, and everything that matched was stat'ed
1367 # that wasn't ignored, and everything that matched was stat'ed
1369 # and is already in results.
1368 # and is already in results.
1370 # The rest must thus be ignored or under a symlink.
1369 # The rest must thus be ignored or under a symlink.
1371 audit_path = pathutil.pathauditor(self._root, cached=True)
1370 audit_path = pathutil.pathauditor(self._root, cached=True)
1372
1371
1373 for nf in iter(visit):
1372 for nf in iter(visit):
1374 # If a stat for the same file was already added with a
1373 # If a stat for the same file was already added with a
1375 # different case, don't add one for this, since that would
1374 # different case, don't add one for this, since that would
1376 # make it appear as if the file exists under both names
1375 # make it appear as if the file exists under both names
1377 # on disk.
1376 # on disk.
1378 if (
1377 if (
1379 normalizefile
1378 normalizefile
1380 and normalizefile(nf, True, True) in results
1379 and normalizefile(nf, True, True) in results
1381 ):
1380 ):
1382 results[nf] = None
1381 results[nf] = None
1383 # Report ignored items in the dmap as long as they are not
1382 # Report ignored items in the dmap as long as they are not
1384 # under a symlink directory.
1383 # under a symlink directory.
1385 elif audit_path.check(nf):
1384 elif audit_path.check(nf):
1386 try:
1385 try:
1387 results[nf] = lstat(join(nf))
1386 results[nf] = lstat(join(nf))
1388 # file was just ignored, no links, and exists
1387 # file was just ignored, no links, and exists
1389 except OSError:
1388 except OSError:
1390 # file doesn't exist
1389 # file doesn't exist
1391 results[nf] = None
1390 results[nf] = None
1392 else:
1391 else:
1393 # It's either missing or under a symlink directory
1392 # It's either missing or under a symlink directory
1394 # which we in this case report as missing
1393 # which we in this case report as missing
1395 results[nf] = None
1394 results[nf] = None
1396 else:
1395 else:
1397 # We may not have walked the full directory tree above,
1396 # We may not have walked the full directory tree above,
1398 # so stat and check everything we missed.
1397 # so stat and check everything we missed.
1399 iv = iter(visit)
1398 iv = iter(visit)
1400 for st in util.statfiles([join(i) for i in visit]):
1399 for st in util.statfiles([join(i) for i in visit]):
1401 results[next(iv)] = st
1400 results[next(iv)] = st
1402 return results
1401 return results
1403
1402
1404 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1403 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1405 if self._sparsematchfn is not None:
1404 if self._sparsematchfn is not None:
1406 em = matchmod.exact(matcher.files())
1405 em = matchmod.exact(matcher.files())
1407 sm = matchmod.unionmatcher([self._sparsematcher, em])
1406 sm = matchmod.unionmatcher([self._sparsematcher, em])
1408 matcher = matchmod.intersectmatchers(matcher, sm)
1407 matcher = matchmod.intersectmatchers(matcher, sm)
1409 # Force Rayon (Rust parallelism library) to respect the number of
1408 # Force Rayon (Rust parallelism library) to respect the number of
1410 # workers. This is a temporary workaround until Rust code knows
1409 # workers. This is a temporary workaround until Rust code knows
1411 # how to read the config file.
1410 # how to read the config file.
1412 numcpus = self._ui.configint(b"worker", b"numcpus")
1411 numcpus = self._ui.configint(b"worker", b"numcpus")
1413 if numcpus is not None:
1412 if numcpus is not None:
1414 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1413 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1415
1414
1416 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1415 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1417 if not workers_enabled:
1416 if not workers_enabled:
1418 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1417 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1419
1418
1420 (
1419 (
1421 lookup,
1420 lookup,
1422 modified,
1421 modified,
1423 added,
1422 added,
1424 removed,
1423 removed,
1425 deleted,
1424 deleted,
1426 clean,
1425 clean,
1427 ignored,
1426 ignored,
1428 unknown,
1427 unknown,
1429 warnings,
1428 warnings,
1430 bad,
1429 bad,
1431 traversed,
1430 traversed,
1432 dirty,
1431 dirty,
1433 ) = rustmod.status(
1432 ) = rustmod.status(
1434 self._map._map,
1433 self._map._map,
1435 matcher,
1434 matcher,
1436 self._rootdir,
1435 self._rootdir,
1437 self._ignorefiles(),
1436 self._ignorefiles(),
1438 self._checkexec,
1437 self._checkexec,
1439 bool(list_clean),
1438 bool(list_clean),
1440 bool(list_ignored),
1439 bool(list_ignored),
1441 bool(list_unknown),
1440 bool(list_unknown),
1442 bool(matcher.traversedir),
1441 bool(matcher.traversedir),
1443 )
1442 )
1444
1443
1445 self._dirty |= dirty
1444 self._dirty |= dirty
1446
1445
1447 if matcher.traversedir:
1446 if matcher.traversedir:
1448 for dir in traversed:
1447 for dir in traversed:
1449 matcher.traversedir(dir)
1448 matcher.traversedir(dir)
1450
1449
1451 if self._ui.warn:
1450 if self._ui.warn:
1452 for item in warnings:
1451 for item in warnings:
1453 if isinstance(item, tuple):
1452 if isinstance(item, tuple):
1454 file_path, syntax = item
1453 file_path, syntax = item
1455 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1454 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1456 file_path,
1455 file_path,
1457 syntax,
1456 syntax,
1458 )
1457 )
1459 self._ui.warn(msg)
1458 self._ui.warn(msg)
1460 else:
1459 else:
1461 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1460 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1462 self._ui.warn(
1461 self._ui.warn(
1463 msg
1462 msg
1464 % (
1463 % (
1465 pathutil.canonpath(
1464 pathutil.canonpath(
1466 self._rootdir, self._rootdir, item
1465 self._rootdir, self._rootdir, item
1467 ),
1466 ),
1468 b"No such file or directory",
1467 b"No such file or directory",
1469 )
1468 )
1470 )
1469 )
1471
1470
1472 for fn, message in bad:
1471 for fn, message in bad:
1473 matcher.bad(fn, encoding.strtolocal(message))
1472 matcher.bad(fn, encoding.strtolocal(message))
1474
1473
1475 status = scmutil.status(
1474 status = scmutil.status(
1476 modified=modified,
1475 modified=modified,
1477 added=added,
1476 added=added,
1478 removed=removed,
1477 removed=removed,
1479 deleted=deleted,
1478 deleted=deleted,
1480 unknown=unknown,
1479 unknown=unknown,
1481 ignored=ignored,
1480 ignored=ignored,
1482 clean=clean,
1481 clean=clean,
1483 )
1482 )
1484 return (lookup, status)
1483 return (lookup, status)
1485
1484
1486 # XXX since this can make the dirstate dirty (through rust), we should
1485 # XXX since this can make the dirstate dirty (through rust), we should
1487 # enforce that it is done withing an appropriate change-context that scope
1486 # enforce that it is done withing an appropriate change-context that scope
1488 # the change and ensure it eventually get written on disk (or rolled back)
1487 # the change and ensure it eventually get written on disk (or rolled back)
1489 def status(self, match, subrepos, ignored, clean, unknown):
1488 def status(self, match, subrepos, ignored, clean, unknown):
1490 """Determine the status of the working copy relative to the
1489 """Determine the status of the working copy relative to the
1491 dirstate and return a pair of (unsure, status), where status is of type
1490 dirstate and return a pair of (unsure, status), where status is of type
1492 scmutil.status and:
1491 scmutil.status and:
1493
1492
1494 unsure:
1493 unsure:
1495 files that might have been modified since the dirstate was
1494 files that might have been modified since the dirstate was
1496 written, but need to be read to be sure (size is the same
1495 written, but need to be read to be sure (size is the same
1497 but mtime differs)
1496 but mtime differs)
1498 status.modified:
1497 status.modified:
1499 files that have definitely been modified since the dirstate
1498 files that have definitely been modified since the dirstate
1500 was written (different size or mode)
1499 was written (different size or mode)
1501 status.clean:
1500 status.clean:
1502 files that have definitely not been modified since the
1501 files that have definitely not been modified since the
1503 dirstate was written
1502 dirstate was written
1504 """
1503 """
1505 listignored, listclean, listunknown = ignored, clean, unknown
1504 listignored, listclean, listunknown = ignored, clean, unknown
1506 lookup, modified, added, unknown, ignored = [], [], [], [], []
1505 lookup, modified, added, unknown, ignored = [], [], [], [], []
1507 removed, deleted, clean = [], [], []
1506 removed, deleted, clean = [], [], []
1508
1507
1509 dmap = self._map
1508 dmap = self._map
1510 dmap.preload()
1509 dmap.preload()
1511
1510
1512 use_rust = True
1511 use_rust = True
1513
1512
1514 allowed_matchers = (
1513 allowed_matchers = (
1515 matchmod.alwaysmatcher,
1514 matchmod.alwaysmatcher,
1516 matchmod.differencematcher,
1515 matchmod.differencematcher,
1517 matchmod.exactmatcher,
1516 matchmod.exactmatcher,
1518 matchmod.includematcher,
1517 matchmod.includematcher,
1519 matchmod.intersectionmatcher,
1518 matchmod.intersectionmatcher,
1520 matchmod.nevermatcher,
1519 matchmod.nevermatcher,
1521 matchmod.unionmatcher,
1520 matchmod.unionmatcher,
1522 )
1521 )
1523
1522
1524 if rustmod is None:
1523 if rustmod is None:
1525 use_rust = False
1524 use_rust = False
1526 elif self._checkcase:
1525 elif self._checkcase:
1527 # Case-insensitive filesystems are not handled yet
1526 # Case-insensitive filesystems are not handled yet
1528 use_rust = False
1527 use_rust = False
1529 elif subrepos:
1528 elif subrepos:
1530 use_rust = False
1529 use_rust = False
1531 elif not isinstance(match, allowed_matchers):
1530 elif not isinstance(match, allowed_matchers):
1532 # Some matchers have yet to be implemented
1531 # Some matchers have yet to be implemented
1533 use_rust = False
1532 use_rust = False
1534
1533
1535 # Get the time from the filesystem so we can disambiguate files that
1534 # Get the time from the filesystem so we can disambiguate files that
1536 # appear modified in the present or future.
1535 # appear modified in the present or future.
1537 try:
1536 try:
1538 mtime_boundary = timestamp.get_fs_now(self._opener)
1537 mtime_boundary = timestamp.get_fs_now(self._opener)
1539 except OSError:
1538 except OSError:
1540 # In largefiles or readonly context
1539 # In largefiles or readonly context
1541 mtime_boundary = None
1540 mtime_boundary = None
1542
1541
1543 if use_rust:
1542 if use_rust:
1544 try:
1543 try:
1545 res = self._rust_status(
1544 res = self._rust_status(
1546 match, listclean, listignored, listunknown
1545 match, listclean, listignored, listunknown
1547 )
1546 )
1548 return res + (mtime_boundary,)
1547 return res + (mtime_boundary,)
1549 except rustmod.FallbackError:
1548 except rustmod.FallbackError:
1550 pass
1549 pass
1551
1550
1552 def noop(f):
1551 def noop(f):
1553 pass
1552 pass
1554
1553
1555 dcontains = dmap.__contains__
1554 dcontains = dmap.__contains__
1556 dget = dmap.__getitem__
1555 dget = dmap.__getitem__
1557 ladd = lookup.append # aka "unsure"
1556 ladd = lookup.append # aka "unsure"
1558 madd = modified.append
1557 madd = modified.append
1559 aadd = added.append
1558 aadd = added.append
1560 uadd = unknown.append if listunknown else noop
1559 uadd = unknown.append if listunknown else noop
1561 iadd = ignored.append if listignored else noop
1560 iadd = ignored.append if listignored else noop
1562 radd = removed.append
1561 radd = removed.append
1563 dadd = deleted.append
1562 dadd = deleted.append
1564 cadd = clean.append if listclean else noop
1563 cadd = clean.append if listclean else noop
1565 mexact = match.exact
1564 mexact = match.exact
1566 dirignore = self._dirignore
1565 dirignore = self._dirignore
1567 checkexec = self._checkexec
1566 checkexec = self._checkexec
1568 checklink = self._checklink
1567 checklink = self._checklink
1569 copymap = self._map.copymap
1568 copymap = self._map.copymap
1570
1569
1571 # We need to do full walks when either
1570 # We need to do full walks when either
1572 # - we're listing all clean files, or
1571 # - we're listing all clean files, or
1573 # - match.traversedir does something, because match.traversedir should
1572 # - match.traversedir does something, because match.traversedir should
1574 # be called for every dir in the working dir
1573 # be called for every dir in the working dir
1575 full = listclean or match.traversedir is not None
1574 full = listclean or match.traversedir is not None
1576 for fn, st in self.walk(
1575 for fn, st in self.walk(
1577 match, subrepos, listunknown, listignored, full=full
1576 match, subrepos, listunknown, listignored, full=full
1578 ).items():
1577 ).items():
1579 if not dcontains(fn):
1578 if not dcontains(fn):
1580 if (listignored or mexact(fn)) and dirignore(fn):
1579 if (listignored or mexact(fn)) and dirignore(fn):
1581 if listignored:
1580 if listignored:
1582 iadd(fn)
1581 iadd(fn)
1583 else:
1582 else:
1584 uadd(fn)
1583 uadd(fn)
1585 continue
1584 continue
1586
1585
1587 t = dget(fn)
1586 t = dget(fn)
1588 mode = t.mode
1587 mode = t.mode
1589 size = t.size
1588 size = t.size
1590
1589
1591 if not st and t.tracked:
1590 if not st and t.tracked:
1592 dadd(fn)
1591 dadd(fn)
1593 elif t.p2_info:
1592 elif t.p2_info:
1594 madd(fn)
1593 madd(fn)
1595 elif t.added:
1594 elif t.added:
1596 aadd(fn)
1595 aadd(fn)
1597 elif t.removed:
1596 elif t.removed:
1598 radd(fn)
1597 radd(fn)
1599 elif t.tracked:
1598 elif t.tracked:
1600 if not checklink and t.has_fallback_symlink:
1599 if not checklink and t.has_fallback_symlink:
1601 # If the file system does not support symlink, the mode
1600 # If the file system does not support symlink, the mode
1602 # might not be correctly stored in the dirstate, so do not
1601 # might not be correctly stored in the dirstate, so do not
1603 # trust it.
1602 # trust it.
1604 ladd(fn)
1603 ladd(fn)
1605 elif not checkexec and t.has_fallback_exec:
1604 elif not checkexec and t.has_fallback_exec:
1606 # If the file system does not support exec bits, the mode
1605 # If the file system does not support exec bits, the mode
1607 # might not be correctly stored in the dirstate, so do not
1606 # might not be correctly stored in the dirstate, so do not
1608 # trust it.
1607 # trust it.
1609 ladd(fn)
1608 ladd(fn)
1610 elif (
1609 elif (
1611 size >= 0
1610 size >= 0
1612 and (
1611 and (
1613 (size != st.st_size and size != st.st_size & _rangemask)
1612 (size != st.st_size and size != st.st_size & _rangemask)
1614 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1613 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1615 )
1614 )
1616 or fn in copymap
1615 or fn in copymap
1617 ):
1616 ):
1618 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1617 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1619 # issue6456: Size returned may be longer due to
1618 # issue6456: Size returned may be longer due to
1620 # encryption on EXT-4 fscrypt, undecided.
1619 # encryption on EXT-4 fscrypt, undecided.
1621 ladd(fn)
1620 ladd(fn)
1622 else:
1621 else:
1623 madd(fn)
1622 madd(fn)
1624 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1623 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1625 # There might be a change in the future if for example the
1624 # There might be a change in the future if for example the
1626 # internal clock is off, but this is a case where the issues
1625 # internal clock is off, but this is a case where the issues
1627 # the user would face would be a lot worse and there is
1626 # the user would face would be a lot worse and there is
1628 # nothing we can really do.
1627 # nothing we can really do.
1629 ladd(fn)
1628 ladd(fn)
1630 elif listclean:
1629 elif listclean:
1631 cadd(fn)
1630 cadd(fn)
1632 status = scmutil.status(
1631 status = scmutil.status(
1633 modified, added, removed, deleted, unknown, ignored, clean
1632 modified, added, removed, deleted, unknown, ignored, clean
1634 )
1633 )
1635 return (lookup, status, mtime_boundary)
1634 return (lookup, status, mtime_boundary)
1636
1635
1637 def matches(self, match):
1636 def matches(self, match):
1638 """
1637 """
1639 return files in the dirstate (in whatever state) filtered by match
1638 return files in the dirstate (in whatever state) filtered by match
1640 """
1639 """
1641 dmap = self._map
1640 dmap = self._map
1642 if rustmod is not None:
1641 if rustmod is not None:
1643 dmap = self._map._map
1642 dmap = self._map._map
1644
1643
1645 if match.always():
1644 if match.always():
1646 return dmap.keys()
1645 return dmap.keys()
1647 files = match.files()
1646 files = match.files()
1648 if match.isexact():
1647 if match.isexact():
1649 # fast path -- filter the other way around, since typically files is
1648 # fast path -- filter the other way around, since typically files is
1650 # much smaller than dmap
1649 # much smaller than dmap
1651 return [f for f in files if f in dmap]
1650 return [f for f in files if f in dmap]
1652 if match.prefix() and all(fn in dmap for fn in files):
1651 if match.prefix() and all(fn in dmap for fn in files):
1653 # fast path -- all the values are known to be files, so just return
1652 # fast path -- all the values are known to be files, so just return
1654 # that
1653 # that
1655 return list(files)
1654 return list(files)
1656 return [f for f in dmap if match(f)]
1655 return [f for f in dmap if match(f)]
1657
1656
1658 def _actualfilename(self, tr):
1657 def _actualfilename(self, tr):
1659 if tr:
1658 if tr:
1660 return self._pendingfilename
1659 return self._pendingfilename
1661 else:
1660 else:
1662 return self._filename
1661 return self._filename
1663
1662
1664 def all_file_names(self):
1663 def all_file_names(self):
1665 """list all filename currently used by this dirstate
1664 """list all filename currently used by this dirstate
1666
1665
1667 This is only used to do `hg rollback` related backup in the transaction
1666 This is only used to do `hg rollback` related backup in the transaction
1668 """
1667 """
1669 if not self._opener.exists(self._filename):
1668 if not self._opener.exists(self._filename):
1670 # no data every written to disk yet
1669 # no data every written to disk yet
1671 return ()
1670 return ()
1672 elif self._use_dirstate_v2:
1671 elif self._use_dirstate_v2:
1673 return (
1672 return (
1674 self._filename,
1673 self._filename,
1675 self._map.docket.data_filename(),
1674 self._map.docket.data_filename(),
1676 )
1675 )
1677 else:
1676 else:
1678 return (self._filename,)
1677 return (self._filename,)
1679
1678
1680 def verify(self, m1, m2, p1, narrow_matcher=None):
1679 def verify(self, m1, m2, p1, narrow_matcher=None):
1681 """
1680 """
1682 check the dirstate contents against the parent manifest and yield errors
1681 check the dirstate contents against the parent manifest and yield errors
1683 """
1682 """
1684 missing_from_p1 = _(
1683 missing_from_p1 = _(
1685 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1684 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1686 )
1685 )
1687 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1686 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1688 missing_from_ps = _(
1687 missing_from_ps = _(
1689 b"%s marked as modified, but not in either manifest\n"
1688 b"%s marked as modified, but not in either manifest\n"
1690 )
1689 )
1691 missing_from_ds = _(
1690 missing_from_ds = _(
1692 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1691 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1693 )
1692 )
1694 for f, entry in self.items():
1693 for f, entry in self.items():
1695 if entry.p1_tracked:
1694 if entry.p1_tracked:
1696 if entry.modified and f not in m1 and f not in m2:
1695 if entry.modified and f not in m1 and f not in m2:
1697 yield missing_from_ps % f
1696 yield missing_from_ps % f
1698 elif f not in m1:
1697 elif f not in m1:
1699 yield missing_from_p1 % (f, node.short(p1))
1698 yield missing_from_p1 % (f, node.short(p1))
1700 if entry.added and f in m1:
1699 if entry.added and f in m1:
1701 yield unexpected_in_p1 % f
1700 yield unexpected_in_p1 % f
1702 for f in m1:
1701 for f in m1:
1703 if narrow_matcher is not None and not narrow_matcher(f):
1702 if narrow_matcher is not None and not narrow_matcher(f):
1704 continue
1703 continue
1705 entry = self.get_entry(f)
1704 entry = self.get_entry(f)
1706 if not entry.p1_tracked:
1705 if not entry.p1_tracked:
1707 yield missing_from_ds % (f, node.short(p1))
1706 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now