##// END OF EJS Templates
dirstate: add small asserts for double security...
marmoute -
r51016:15531d10 default
parent child Browse files
Show More
@@ -1,1677 +1,1682 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136 def __init__(
136 def __init__(
137 self,
137 self,
138 opener,
138 opener,
139 ui,
139 ui,
140 root,
140 root,
141 validate,
141 validate,
142 sparsematchfn,
142 sparsematchfn,
143 nodeconstants,
143 nodeconstants,
144 use_dirstate_v2,
144 use_dirstate_v2,
145 use_tracked_hint=False,
145 use_tracked_hint=False,
146 ):
146 ):
147 """Create a new dirstate object.
147 """Create a new dirstate object.
148
148
149 opener is an open()-like callable that can be used to open the
149 opener is an open()-like callable that can be used to open the
150 dirstate file; root is the root of the directory tracked by
150 dirstate file; root is the root of the directory tracked by
151 the dirstate.
151 the dirstate.
152 """
152 """
153 self._use_dirstate_v2 = use_dirstate_v2
153 self._use_dirstate_v2 = use_dirstate_v2
154 self._use_tracked_hint = use_tracked_hint
154 self._use_tracked_hint = use_tracked_hint
155 self._nodeconstants = nodeconstants
155 self._nodeconstants = nodeconstants
156 self._opener = opener
156 self._opener = opener
157 self._validate = validate
157 self._validate = validate
158 self._root = root
158 self._root = root
159 # Either build a sparse-matcher or None if sparse is disabled
159 # Either build a sparse-matcher or None if sparse is disabled
160 self._sparsematchfn = sparsematchfn
160 self._sparsematchfn = sparsematchfn
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
162 # UNC path pointing to root share (issue4557)
162 # UNC path pointing to root share (issue4557)
163 self._rootdir = pathutil.normasprefix(root)
163 self._rootdir = pathutil.normasprefix(root)
164 # True is any internal state may be different
164 # True is any internal state may be different
165 self._dirty = False
165 self._dirty = False
166 # True if the set of tracked file may be different
166 # True if the set of tracked file may be different
167 self._dirty_tracked_set = False
167 self._dirty_tracked_set = False
168 self._ui = ui
168 self._ui = ui
169 self._filecache = {}
169 self._filecache = {}
170 # nesting level of `changing_parents` context
170 # nesting level of `changing_parents` context
171 self._changing_level = 0
171 self._changing_level = 0
172 # the change currently underway
172 # the change currently underway
173 self._change_type = None
173 self._change_type = None
174 # True if the current dirstate changing operations have been
174 # True if the current dirstate changing operations have been
175 # invalidated (used to make sure all nested contexts have been exited)
175 # invalidated (used to make sure all nested contexts have been exited)
176 self._invalidated_context = False
176 self._invalidated_context = False
177 self._filename = b'dirstate'
177 self._filename = b'dirstate'
178 self._filename_th = b'dirstate-tracked-hint'
178 self._filename_th = b'dirstate-tracked-hint'
179 self._pendingfilename = b'%s.pending' % self._filename
179 self._pendingfilename = b'%s.pending' % self._filename
180 self._plchangecallbacks = {}
180 self._plchangecallbacks = {}
181 self._origpl = None
181 self._origpl = None
182 self._mapcls = dirstatemap.dirstatemap
182 self._mapcls = dirstatemap.dirstatemap
183 # Access and cache cwd early, so we don't access it for the first time
183 # Access and cache cwd early, so we don't access it for the first time
184 # after a working-copy update caused it to not exist (accessing it then
184 # after a working-copy update caused it to not exist (accessing it then
185 # raises an exception).
185 # raises an exception).
186 self._cwd
186 self._cwd
187
187
188 def prefetch_parents(self):
188 def prefetch_parents(self):
189 """make sure the parents are loaded
189 """make sure the parents are loaded
190
190
191 Used to avoid a race condition.
191 Used to avoid a race condition.
192 """
192 """
193 self._pl
193 self._pl
194
194
195 @contextlib.contextmanager
195 @contextlib.contextmanager
196 @check_invalidated
196 @check_invalidated
197 def _changing(self, repo, change_type):
197 def _changing(self, repo, change_type):
198 if repo.currentwlock() is None:
198 if repo.currentwlock() is None:
199 msg = b"trying to change the dirstate without holding the wlock"
199 msg = b"trying to change the dirstate without holding the wlock"
200 raise error.ProgrammingError(msg)
200 raise error.ProgrammingError(msg)
201
201
202 has_tr = repo.currenttransaction() is not None
202 has_tr = repo.currenttransaction() is not None
203 if not has_tr and self._changing_level == 0 and self._dirty:
203 if not has_tr and self._changing_level == 0 and self._dirty:
204 msg = "entering a changing context, but dirstate is already dirty"
204 msg = "entering a changing context, but dirstate is already dirty"
205 raise error.ProgrammingError(msg)
205 raise error.ProgrammingError(msg)
206
206
207 assert self._changing_level >= 0
207 assert self._changing_level >= 0
208 # different type of change are mutually exclusive
208 # different type of change are mutually exclusive
209 if self._change_type is None:
209 if self._change_type is None:
210 assert self._changing_level == 0
210 assert self._changing_level == 0
211 self._change_type = change_type
211 self._change_type = change_type
212 elif self._change_type != change_type:
212 elif self._change_type != change_type:
213 msg = (
213 msg = (
214 'trying to open "%s" dirstate-changing context while a "%s" is'
214 'trying to open "%s" dirstate-changing context while a "%s" is'
215 ' already open'
215 ' already open'
216 )
216 )
217 msg %= (change_type, self._change_type)
217 msg %= (change_type, self._change_type)
218 raise error.ProgrammingError(msg)
218 raise error.ProgrammingError(msg)
219 should_write = False
219 should_write = False
220 self._changing_level += 1
220 self._changing_level += 1
221 try:
221 try:
222 yield
222 yield
223 except: # re-raises
223 except: # re-raises
224 self.invalidate() # this will set `_invalidated_context`
224 self.invalidate() # this will set `_invalidated_context`
225 raise
225 raise
226 finally:
226 finally:
227 assert self._changing_level > 0
227 assert self._changing_level > 0
228 self._changing_level -= 1
228 self._changing_level -= 1
229 # If the dirstate is being invalidated, call invalidate again.
229 # If the dirstate is being invalidated, call invalidate again.
230 # This will throw away anything added by a upper context and
230 # This will throw away anything added by a upper context and
231 # reset the `_invalidated_context` flag when relevant
231 # reset the `_invalidated_context` flag when relevant
232 if self._changing_level <= 0:
232 if self._changing_level <= 0:
233 self._change_type = None
233 self._change_type = None
234 assert self._changing_level == 0
234 assert self._changing_level == 0
235 if self._invalidated_context:
235 if self._invalidated_context:
236 # make sure we invalidate anything an upper context might
236 # make sure we invalidate anything an upper context might
237 # have changed.
237 # have changed.
238 self.invalidate()
238 self.invalidate()
239 else:
239 else:
240 should_write = self._changing_level <= 0
240 should_write = self._changing_level <= 0
241 tr = repo.currenttransaction()
241 tr = repo.currenttransaction()
242 if has_tr != (tr is not None):
242 if has_tr != (tr is not None):
243 if has_tr:
243 if has_tr:
244 m = "transaction vanished while changing dirstate"
244 m = "transaction vanished while changing dirstate"
245 else:
245 else:
246 m = "transaction appeared while changing dirstate"
246 m = "transaction appeared while changing dirstate"
247 raise error.ProgrammingError(m)
247 raise error.ProgrammingError(m)
248 if should_write:
248 if should_write:
249 self.write(tr)
249 self.write(tr)
250
250
251 @contextlib.contextmanager
251 @contextlib.contextmanager
252 def changing_parents(self, repo):
252 def changing_parents(self, repo):
253 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
253 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
254 yield c
254 yield c
255
255
256 @contextlib.contextmanager
256 @contextlib.contextmanager
257 def changing_files(self, repo):
257 def changing_files(self, repo):
258 with self._changing(repo, CHANGE_TYPE_FILES) as c:
258 with self._changing(repo, CHANGE_TYPE_FILES) as c:
259 yield c
259 yield c
260
260
261 # here to help migration to the new code
261 # here to help migration to the new code
262 def parentchange(self):
262 def parentchange(self):
263 msg = (
263 msg = (
264 "Mercurial 6.4 and later requires call to "
264 "Mercurial 6.4 and later requires call to "
265 "`dirstate.changing_parents(repo)`"
265 "`dirstate.changing_parents(repo)`"
266 )
266 )
267 raise error.ProgrammingError(msg)
267 raise error.ProgrammingError(msg)
268
268
269 @property
269 @property
270 def is_changing_any(self):
270 def is_changing_any(self):
271 """Returns true if the dirstate is in the middle of a set of changes.
271 """Returns true if the dirstate is in the middle of a set of changes.
272
272
273 This returns True for any kind of change.
273 This returns True for any kind of change.
274 """
274 """
275 return self._changing_level > 0
275 return self._changing_level > 0
276
276
277 def pendingparentchange(self):
277 def pendingparentchange(self):
278 return self.is_changing_parent()
278 return self.is_changing_parent()
279
279
280 def is_changing_parent(self):
280 def is_changing_parent(self):
281 """Returns true if the dirstate is in the middle of a set of changes
281 """Returns true if the dirstate is in the middle of a set of changes
282 that modify the dirstate parent.
282 that modify the dirstate parent.
283 """
283 """
284 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
284 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
285 return self.is_changing_parents
285 return self.is_changing_parents
286
286
287 @property
287 @property
288 def is_changing_parents(self):
288 def is_changing_parents(self):
289 """Returns true if the dirstate is in the middle of a set of changes
289 """Returns true if the dirstate is in the middle of a set of changes
290 that modify the dirstate parent.
290 that modify the dirstate parent.
291 """
291 """
292 if self._changing_level <= 0:
292 if self._changing_level <= 0:
293 return False
293 return False
294 return self._change_type == CHANGE_TYPE_PARENTS
294 return self._change_type == CHANGE_TYPE_PARENTS
295
295
296 @property
296 @property
297 def is_changing_files(self):
297 def is_changing_files(self):
298 """Returns true if the dirstate is in the middle of a set of changes
298 """Returns true if the dirstate is in the middle of a set of changes
299 that modify the files tracked or their sources.
299 that modify the files tracked or their sources.
300 """
300 """
301 if self._changing_level <= 0:
301 if self._changing_level <= 0:
302 return False
302 return False
303 return self._change_type == CHANGE_TYPE_FILES
303 return self._change_type == CHANGE_TYPE_FILES
304
304
305 @propertycache
305 @propertycache
306 def _map(self):
306 def _map(self):
307 """Return the dirstate contents (see documentation for dirstatemap)."""
307 """Return the dirstate contents (see documentation for dirstatemap)."""
308 self._map = self._mapcls(
308 self._map = self._mapcls(
309 self._ui,
309 self._ui,
310 self._opener,
310 self._opener,
311 self._root,
311 self._root,
312 self._nodeconstants,
312 self._nodeconstants,
313 self._use_dirstate_v2,
313 self._use_dirstate_v2,
314 )
314 )
315 return self._map
315 return self._map
316
316
317 @property
317 @property
318 def _sparsematcher(self):
318 def _sparsematcher(self):
319 """The matcher for the sparse checkout.
319 """The matcher for the sparse checkout.
320
320
321 The working directory may not include every file from a manifest. The
321 The working directory may not include every file from a manifest. The
322 matcher obtained by this property will match a path if it is to be
322 matcher obtained by this property will match a path if it is to be
323 included in the working directory.
323 included in the working directory.
324
324
325 When sparse if disabled, return None.
325 When sparse if disabled, return None.
326 """
326 """
327 if self._sparsematchfn is None:
327 if self._sparsematchfn is None:
328 return None
328 return None
329 # TODO there is potential to cache this property. For now, the matcher
329 # TODO there is potential to cache this property. For now, the matcher
330 # is resolved on every access. (But the called function does use a
330 # is resolved on every access. (But the called function does use a
331 # cache to keep the lookup fast.)
331 # cache to keep the lookup fast.)
332 return self._sparsematchfn()
332 return self._sparsematchfn()
333
333
334 @repocache(b'branch')
334 @repocache(b'branch')
335 def _branch(self):
335 def _branch(self):
336 try:
336 try:
337 return self._opener.read(b"branch").strip() or b"default"
337 return self._opener.read(b"branch").strip() or b"default"
338 except FileNotFoundError:
338 except FileNotFoundError:
339 return b"default"
339 return b"default"
340
340
341 @property
341 @property
342 def _pl(self):
342 def _pl(self):
343 return self._map.parents()
343 return self._map.parents()
344
344
345 def hasdir(self, d):
345 def hasdir(self, d):
346 return self._map.hastrackeddir(d)
346 return self._map.hastrackeddir(d)
347
347
348 @rootcache(b'.hgignore')
348 @rootcache(b'.hgignore')
349 def _ignore(self):
349 def _ignore(self):
350 files = self._ignorefiles()
350 files = self._ignorefiles()
351 if not files:
351 if not files:
352 return matchmod.never()
352 return matchmod.never()
353
353
354 pats = [b'include:%s' % f for f in files]
354 pats = [b'include:%s' % f for f in files]
355 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
355 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
356
356
357 @propertycache
357 @propertycache
358 def _slash(self):
358 def _slash(self):
359 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
359 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
360
360
361 @propertycache
361 @propertycache
362 def _checklink(self):
362 def _checklink(self):
363 return util.checklink(self._root)
363 return util.checklink(self._root)
364
364
365 @propertycache
365 @propertycache
366 def _checkexec(self):
366 def _checkexec(self):
367 return bool(util.checkexec(self._root))
367 return bool(util.checkexec(self._root))
368
368
369 @propertycache
369 @propertycache
370 def _checkcase(self):
370 def _checkcase(self):
371 return not util.fscasesensitive(self._join(b'.hg'))
371 return not util.fscasesensitive(self._join(b'.hg'))
372
372
373 def _join(self, f):
373 def _join(self, f):
374 # much faster than os.path.join()
374 # much faster than os.path.join()
375 # it's safe because f is always a relative path
375 # it's safe because f is always a relative path
376 return self._rootdir + f
376 return self._rootdir + f
377
377
378 def flagfunc(self, buildfallback):
378 def flagfunc(self, buildfallback):
379 """build a callable that returns flags associated with a filename
379 """build a callable that returns flags associated with a filename
380
380
381 The information is extracted from three possible layers:
381 The information is extracted from three possible layers:
382 1. the file system if it supports the information
382 1. the file system if it supports the information
383 2. the "fallback" information stored in the dirstate if any
383 2. the "fallback" information stored in the dirstate if any
384 3. a more expensive mechanism inferring the flags from the parents.
384 3. a more expensive mechanism inferring the flags from the parents.
385 """
385 """
386
386
387 # small hack to cache the result of buildfallback()
387 # small hack to cache the result of buildfallback()
388 fallback_func = []
388 fallback_func = []
389
389
390 def get_flags(x):
390 def get_flags(x):
391 entry = None
391 entry = None
392 fallback_value = None
392 fallback_value = None
393 try:
393 try:
394 st = os.lstat(self._join(x))
394 st = os.lstat(self._join(x))
395 except OSError:
395 except OSError:
396 return b''
396 return b''
397
397
398 if self._checklink:
398 if self._checklink:
399 if util.statislink(st):
399 if util.statislink(st):
400 return b'l'
400 return b'l'
401 else:
401 else:
402 entry = self.get_entry(x)
402 entry = self.get_entry(x)
403 if entry.has_fallback_symlink:
403 if entry.has_fallback_symlink:
404 if entry.fallback_symlink:
404 if entry.fallback_symlink:
405 return b'l'
405 return b'l'
406 else:
406 else:
407 if not fallback_func:
407 if not fallback_func:
408 fallback_func.append(buildfallback())
408 fallback_func.append(buildfallback())
409 fallback_value = fallback_func[0](x)
409 fallback_value = fallback_func[0](x)
410 if b'l' in fallback_value:
410 if b'l' in fallback_value:
411 return b'l'
411 return b'l'
412
412
413 if self._checkexec:
413 if self._checkexec:
414 if util.statisexec(st):
414 if util.statisexec(st):
415 return b'x'
415 return b'x'
416 else:
416 else:
417 if entry is None:
417 if entry is None:
418 entry = self.get_entry(x)
418 entry = self.get_entry(x)
419 if entry.has_fallback_exec:
419 if entry.has_fallback_exec:
420 if entry.fallback_exec:
420 if entry.fallback_exec:
421 return b'x'
421 return b'x'
422 else:
422 else:
423 if fallback_value is None:
423 if fallback_value is None:
424 if not fallback_func:
424 if not fallback_func:
425 fallback_func.append(buildfallback())
425 fallback_func.append(buildfallback())
426 fallback_value = fallback_func[0](x)
426 fallback_value = fallback_func[0](x)
427 if b'x' in fallback_value:
427 if b'x' in fallback_value:
428 return b'x'
428 return b'x'
429 return b''
429 return b''
430
430
431 return get_flags
431 return get_flags
432
432
433 @propertycache
433 @propertycache
434 def _cwd(self):
434 def _cwd(self):
435 # internal config: ui.forcecwd
435 # internal config: ui.forcecwd
436 forcecwd = self._ui.config(b'ui', b'forcecwd')
436 forcecwd = self._ui.config(b'ui', b'forcecwd')
437 if forcecwd:
437 if forcecwd:
438 return forcecwd
438 return forcecwd
439 return encoding.getcwd()
439 return encoding.getcwd()
440
440
441 def getcwd(self):
441 def getcwd(self):
442 """Return the path from which a canonical path is calculated.
442 """Return the path from which a canonical path is calculated.
443
443
444 This path should be used to resolve file patterns or to convert
444 This path should be used to resolve file patterns or to convert
445 canonical paths back to file paths for display. It shouldn't be
445 canonical paths back to file paths for display. It shouldn't be
446 used to get real file paths. Use vfs functions instead.
446 used to get real file paths. Use vfs functions instead.
447 """
447 """
448 cwd = self._cwd
448 cwd = self._cwd
449 if cwd == self._root:
449 if cwd == self._root:
450 return b''
450 return b''
451 # self._root ends with a path separator if self._root is '/' or 'C:\'
451 # self._root ends with a path separator if self._root is '/' or 'C:\'
452 rootsep = self._root
452 rootsep = self._root
453 if not util.endswithsep(rootsep):
453 if not util.endswithsep(rootsep):
454 rootsep += pycompat.ossep
454 rootsep += pycompat.ossep
455 if cwd.startswith(rootsep):
455 if cwd.startswith(rootsep):
456 return cwd[len(rootsep) :]
456 return cwd[len(rootsep) :]
457 else:
457 else:
458 # we're outside the repo. return an absolute path.
458 # we're outside the repo. return an absolute path.
459 return cwd
459 return cwd
460
460
461 def pathto(self, f, cwd=None):
461 def pathto(self, f, cwd=None):
462 if cwd is None:
462 if cwd is None:
463 cwd = self.getcwd()
463 cwd = self.getcwd()
464 path = util.pathto(self._root, cwd, f)
464 path = util.pathto(self._root, cwd, f)
465 if self._slash:
465 if self._slash:
466 return util.pconvert(path)
466 return util.pconvert(path)
467 return path
467 return path
468
468
469 def get_entry(self, path):
469 def get_entry(self, path):
470 """return a DirstateItem for the associated path"""
470 """return a DirstateItem for the associated path"""
471 entry = self._map.get(path)
471 entry = self._map.get(path)
472 if entry is None:
472 if entry is None:
473 return DirstateItem()
473 return DirstateItem()
474 return entry
474 return entry
475
475
476 def __contains__(self, key):
476 def __contains__(self, key):
477 return key in self._map
477 return key in self._map
478
478
479 def __iter__(self):
479 def __iter__(self):
480 return iter(sorted(self._map))
480 return iter(sorted(self._map))
481
481
482 def items(self):
482 def items(self):
483 return self._map.items()
483 return self._map.items()
484
484
485 iteritems = items
485 iteritems = items
486
486
487 def parents(self):
487 def parents(self):
488 return [self._validate(p) for p in self._pl]
488 return [self._validate(p) for p in self._pl]
489
489
490 def p1(self):
490 def p1(self):
491 return self._validate(self._pl[0])
491 return self._validate(self._pl[0])
492
492
493 def p2(self):
493 def p2(self):
494 return self._validate(self._pl[1])
494 return self._validate(self._pl[1])
495
495
496 @property
496 @property
497 def in_merge(self):
497 def in_merge(self):
498 """True if a merge is in progress"""
498 """True if a merge is in progress"""
499 return self._pl[1] != self._nodeconstants.nullid
499 return self._pl[1] != self._nodeconstants.nullid
500
500
501 def branch(self):
501 def branch(self):
502 return encoding.tolocal(self._branch)
502 return encoding.tolocal(self._branch)
503
503
504 @requires_changing_parents
504 @requires_changing_parents
505 def setparents(self, p1, p2=None):
505 def setparents(self, p1, p2=None):
506 """Set dirstate parents to p1 and p2.
506 """Set dirstate parents to p1 and p2.
507
507
508 When moving from two parents to one, "merged" entries a
508 When moving from two parents to one, "merged" entries a
509 adjusted to normal and previous copy records discarded and
509 adjusted to normal and previous copy records discarded and
510 returned by the call.
510 returned by the call.
511
511
512 See localrepo.setparents()
512 See localrepo.setparents()
513 """
513 """
514 if p2 is None:
514 if p2 is None:
515 p2 = self._nodeconstants.nullid
515 p2 = self._nodeconstants.nullid
516 if self._changing_level == 0:
516 if self._changing_level == 0:
517 raise ValueError(
517 raise ValueError(
518 b"cannot set dirstate parent outside of "
518 b"cannot set dirstate parent outside of "
519 b"dirstate.changing_parents context manager"
519 b"dirstate.changing_parents context manager"
520 )
520 )
521
521
522 self._dirty = True
522 self._dirty = True
523 oldp2 = self._pl[1]
523 oldp2 = self._pl[1]
524 if self._origpl is None:
524 if self._origpl is None:
525 self._origpl = self._pl
525 self._origpl = self._pl
526 nullid = self._nodeconstants.nullid
526 nullid = self._nodeconstants.nullid
527 # True if we need to fold p2 related state back to a linear case
527 # True if we need to fold p2 related state back to a linear case
528 fold_p2 = oldp2 != nullid and p2 == nullid
528 fold_p2 = oldp2 != nullid and p2 == nullid
529 return self._map.setparents(p1, p2, fold_p2=fold_p2)
529 return self._map.setparents(p1, p2, fold_p2=fold_p2)
530
530
531 def setbranch(self, branch):
531 def setbranch(self, branch):
532 self.__class__._branch.set(self, encoding.fromlocal(branch))
532 self.__class__._branch.set(self, encoding.fromlocal(branch))
533 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
533 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
534 try:
534 try:
535 f.write(self._branch + b'\n')
535 f.write(self._branch + b'\n')
536 f.close()
536 f.close()
537
537
538 # make sure filecache has the correct stat info for _branch after
538 # make sure filecache has the correct stat info for _branch after
539 # replacing the underlying file
539 # replacing the underlying file
540 ce = self._filecache[b'_branch']
540 ce = self._filecache[b'_branch']
541 if ce:
541 if ce:
542 ce.refresh()
542 ce.refresh()
543 except: # re-raises
543 except: # re-raises
544 f.discard()
544 f.discard()
545 raise
545 raise
546
546
547 def invalidate(self):
547 def invalidate(self):
548 """Causes the next access to reread the dirstate.
548 """Causes the next access to reread the dirstate.
549
549
550 This is different from localrepo.invalidatedirstate() because it always
550 This is different from localrepo.invalidatedirstate() because it always
551 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
551 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
552 check whether the dirstate has changed before rereading it."""
552 check whether the dirstate has changed before rereading it."""
553
553
554 for a in ("_map", "_branch", "_ignore"):
554 for a in ("_map", "_branch", "_ignore"):
555 if a in self.__dict__:
555 if a in self.__dict__:
556 delattr(self, a)
556 delattr(self, a)
557 self._dirty = False
557 self._dirty = False
558 self._dirty_tracked_set = False
558 self._dirty_tracked_set = False
559 self._invalidated_context = self._changing_level > 0
559 self._invalidated_context = self._changing_level > 0
560 self._origpl = None
560 self._origpl = None
561
561
562 @requires_changing_any
562 @requires_changing_any
563 def copy(self, source, dest):
563 def copy(self, source, dest):
564 """Mark dest as a copy of source. Unmark dest if source is None."""
564 """Mark dest as a copy of source. Unmark dest if source is None."""
565 if source == dest:
565 if source == dest:
566 return
566 return
567 self._dirty = True
567 self._dirty = True
568 if source is not None:
568 if source is not None:
569 self._check_sparse(source)
569 self._check_sparse(source)
570 self._map.copymap[dest] = source
570 self._map.copymap[dest] = source
571 else:
571 else:
572 self._map.copymap.pop(dest, None)
572 self._map.copymap.pop(dest, None)
573
573
574 def copied(self, file):
574 def copied(self, file):
575 return self._map.copymap.get(file, None)
575 return self._map.copymap.get(file, None)
576
576
577 def copies(self):
577 def copies(self):
578 return self._map.copymap
578 return self._map.copymap
579
579
580 @requires_changing_files
580 @requires_changing_files
581 def set_tracked(self, filename, reset_copy=False):
581 def set_tracked(self, filename, reset_copy=False):
582 """a "public" method for generic code to mark a file as tracked
582 """a "public" method for generic code to mark a file as tracked
583
583
584 This function is to be called outside of "update/merge" case. For
584 This function is to be called outside of "update/merge" case. For
585 example by a command like `hg add X`.
585 example by a command like `hg add X`.
586
586
587 if reset_copy is set, any existing copy information will be dropped.
587 if reset_copy is set, any existing copy information will be dropped.
588
588
589 return True the file was previously untracked, False otherwise.
589 return True the file was previously untracked, False otherwise.
590 """
590 """
591 self._dirty = True
591 self._dirty = True
592 entry = self._map.get(filename)
592 entry = self._map.get(filename)
593 if entry is None or not entry.tracked:
593 if entry is None or not entry.tracked:
594 self._check_new_tracked_filename(filename)
594 self._check_new_tracked_filename(filename)
595 pre_tracked = self._map.set_tracked(filename)
595 pre_tracked = self._map.set_tracked(filename)
596 if reset_copy:
596 if reset_copy:
597 self._map.copymap.pop(filename, None)
597 self._map.copymap.pop(filename, None)
598 if pre_tracked:
598 if pre_tracked:
599 self._dirty_tracked_set = True
599 self._dirty_tracked_set = True
600 return pre_tracked
600 return pre_tracked
601
601
602 @requires_changing_files
602 @requires_changing_files
603 def set_untracked(self, filename):
603 def set_untracked(self, filename):
604 """a "public" method for generic code to mark a file as untracked
604 """a "public" method for generic code to mark a file as untracked
605
605
606 This function is to be called outside of "update/merge" case. For
606 This function is to be called outside of "update/merge" case. For
607 example by a command like `hg remove X`.
607 example by a command like `hg remove X`.
608
608
609 return True the file was previously tracked, False otherwise.
609 return True the file was previously tracked, False otherwise.
610 """
610 """
611 ret = self._map.set_untracked(filename)
611 ret = self._map.set_untracked(filename)
612 if ret:
612 if ret:
613 self._dirty = True
613 self._dirty = True
614 self._dirty_tracked_set = True
614 self._dirty_tracked_set = True
615 return ret
615 return ret
616
616
617 @requires_not_changing_parents
617 @requires_not_changing_parents
618 def set_clean(self, filename, parentfiledata):
618 def set_clean(self, filename, parentfiledata):
619 """record that the current state of the file on disk is known to be clean"""
619 """record that the current state of the file on disk is known to be clean"""
620 self._dirty = True
620 self._dirty = True
621 if not self._map[filename].tracked:
621 if not self._map[filename].tracked:
622 self._check_new_tracked_filename(filename)
622 self._check_new_tracked_filename(filename)
623 (mode, size, mtime) = parentfiledata
623 (mode, size, mtime) = parentfiledata
624 self._map.set_clean(filename, mode, size, mtime)
624 self._map.set_clean(filename, mode, size, mtime)
625
625
626 @requires_not_changing_parents
626 @requires_not_changing_parents
627 def set_possibly_dirty(self, filename):
627 def set_possibly_dirty(self, filename):
628 """record that the current state of the file on disk is unknown"""
628 """record that the current state of the file on disk is unknown"""
629 self._dirty = True
629 self._dirty = True
630 self._map.set_possibly_dirty(filename)
630 self._map.set_possibly_dirty(filename)
631
631
632 @requires_changing_parents
632 @requires_changing_parents
633 def update_file_p1(
633 def update_file_p1(
634 self,
634 self,
635 filename,
635 filename,
636 p1_tracked,
636 p1_tracked,
637 ):
637 ):
638 """Set a file as tracked in the parent (or not)
638 """Set a file as tracked in the parent (or not)
639
639
640 This is to be called when adjust the dirstate to a new parent after an history
640 This is to be called when adjust the dirstate to a new parent after an history
641 rewriting operation.
641 rewriting operation.
642
642
643 It should not be called during a merge (p2 != nullid) and only within
643 It should not be called during a merge (p2 != nullid) and only within
644 a `with dirstate.changing_parents(repo):` context.
644 a `with dirstate.changing_parents(repo):` context.
645 """
645 """
646 if self.in_merge:
646 if self.in_merge:
647 msg = b'update_file_reference should not be called when merging'
647 msg = b'update_file_reference should not be called when merging'
648 raise error.ProgrammingError(msg)
648 raise error.ProgrammingError(msg)
649 entry = self._map.get(filename)
649 entry = self._map.get(filename)
650 if entry is None:
650 if entry is None:
651 wc_tracked = False
651 wc_tracked = False
652 else:
652 else:
653 wc_tracked = entry.tracked
653 wc_tracked = entry.tracked
654 if not (p1_tracked or wc_tracked):
654 if not (p1_tracked or wc_tracked):
655 # the file is no longer relevant to anyone
655 # the file is no longer relevant to anyone
656 if self._map.get(filename) is not None:
656 if self._map.get(filename) is not None:
657 self._map.reset_state(filename)
657 self._map.reset_state(filename)
658 self._dirty = True
658 self._dirty = True
659 elif (not p1_tracked) and wc_tracked:
659 elif (not p1_tracked) and wc_tracked:
660 if entry is not None and entry.added:
660 if entry is not None and entry.added:
661 return # avoid dropping copy information (maybe?)
661 return # avoid dropping copy information (maybe?)
662
662
663 self._map.reset_state(
663 self._map.reset_state(
664 filename,
664 filename,
665 wc_tracked,
665 wc_tracked,
666 p1_tracked,
666 p1_tracked,
667 # the underlying reference might have changed, we will have to
667 # the underlying reference might have changed, we will have to
668 # check it.
668 # check it.
669 has_meaningful_mtime=False,
669 has_meaningful_mtime=False,
670 )
670 )
671
671
672 @requires_changing_parents
672 @requires_changing_parents
673 def update_file(
673 def update_file(
674 self,
674 self,
675 filename,
675 filename,
676 wc_tracked,
676 wc_tracked,
677 p1_tracked,
677 p1_tracked,
678 p2_info=False,
678 p2_info=False,
679 possibly_dirty=False,
679 possibly_dirty=False,
680 parentfiledata=None,
680 parentfiledata=None,
681 ):
681 ):
682 """update the information about a file in the dirstate
682 """update the information about a file in the dirstate
683
683
684 This is to be called when the direstates parent changes to keep track
684 This is to be called when the direstates parent changes to keep track
685 of what is the file situation in regards to the working copy and its parent.
685 of what is the file situation in regards to the working copy and its parent.
686
686
687 This function must be called within a `dirstate.changing_parents` context.
687 This function must be called within a `dirstate.changing_parents` context.
688
688
689 note: the API is at an early stage and we might need to adjust it
689 note: the API is at an early stage and we might need to adjust it
690 depending of what information ends up being relevant and useful to
690 depending of what information ends up being relevant and useful to
691 other processing.
691 other processing.
692 """
692 """
693 self._update_file(
693 self._update_file(
694 filename=filename,
694 filename=filename,
695 wc_tracked=wc_tracked,
695 wc_tracked=wc_tracked,
696 p1_tracked=p1_tracked,
696 p1_tracked=p1_tracked,
697 p2_info=p2_info,
697 p2_info=p2_info,
698 possibly_dirty=possibly_dirty,
698 possibly_dirty=possibly_dirty,
699 parentfiledata=parentfiledata,
699 parentfiledata=parentfiledata,
700 )
700 )
701
701
702 # XXX since this make the dirstate dirty, we should enforce that it is done
702 # XXX since this make the dirstate dirty, we should enforce that it is done
703 # withing an appropriate change-context that scope the change and ensure it
703 # withing an appropriate change-context that scope the change and ensure it
704 # eventually get written on disk (or rolled back)
704 # eventually get written on disk (or rolled back)
705 def hacky_extension_update_file(self, *args, **kwargs):
705 def hacky_extension_update_file(self, *args, **kwargs):
706 """NEVER USE THIS, YOU DO NOT NEED IT
706 """NEVER USE THIS, YOU DO NOT NEED IT
707
707
708 This function is a variant of "update_file" to be called by a small set
708 This function is a variant of "update_file" to be called by a small set
709 of extensions, it also adjust the internal state of file, but can be
709 of extensions, it also adjust the internal state of file, but can be
710 called outside an `changing_parents` context.
710 called outside an `changing_parents` context.
711
711
712 A very small number of extension meddle with the working copy content
712 A very small number of extension meddle with the working copy content
713 in a way that requires to adjust the dirstate accordingly. At the time
713 in a way that requires to adjust the dirstate accordingly. At the time
714 this command is written they are :
714 this command is written they are :
715 - keyword,
715 - keyword,
716 - largefile,
716 - largefile,
717 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
717 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
718
718
719 This function could probably be replaced by more semantic one (like
719 This function could probably be replaced by more semantic one (like
720 "adjust expected size" or "always revalidate file content", etc)
720 "adjust expected size" or "always revalidate file content", etc)
721 however at the time where this is writen, this is too much of a detour
721 however at the time where this is writen, this is too much of a detour
722 to be considered.
722 to be considered.
723 """
723 """
724 self._update_file(
724 self._update_file(
725 *args,
725 *args,
726 **kwargs,
726 **kwargs,
727 )
727 )
728
728
729 def _update_file(
729 def _update_file(
730 self,
730 self,
731 filename,
731 filename,
732 wc_tracked,
732 wc_tracked,
733 p1_tracked,
733 p1_tracked,
734 p2_info=False,
734 p2_info=False,
735 possibly_dirty=False,
735 possibly_dirty=False,
736 parentfiledata=None,
736 parentfiledata=None,
737 ):
737 ):
738
738
739 # note: I do not think we need to double check name clash here since we
739 # note: I do not think we need to double check name clash here since we
740 # are in a update/merge case that should already have taken care of
740 # are in a update/merge case that should already have taken care of
741 # this. The test agrees
741 # this. The test agrees
742
742
743 self._dirty = True
743 self._dirty = True
744 old_entry = self._map.get(filename)
744 old_entry = self._map.get(filename)
745 if old_entry is None:
745 if old_entry is None:
746 prev_tracked = False
746 prev_tracked = False
747 else:
747 else:
748 prev_tracked = old_entry.tracked
748 prev_tracked = old_entry.tracked
749 if prev_tracked != wc_tracked:
749 if prev_tracked != wc_tracked:
750 self._dirty_tracked_set = True
750 self._dirty_tracked_set = True
751
751
752 self._map.reset_state(
752 self._map.reset_state(
753 filename,
753 filename,
754 wc_tracked,
754 wc_tracked,
755 p1_tracked,
755 p1_tracked,
756 p2_info=p2_info,
756 p2_info=p2_info,
757 has_meaningful_mtime=not possibly_dirty,
757 has_meaningful_mtime=not possibly_dirty,
758 parentfiledata=parentfiledata,
758 parentfiledata=parentfiledata,
759 )
759 )
760
760
761 def _check_new_tracked_filename(self, filename):
761 def _check_new_tracked_filename(self, filename):
762 scmutil.checkfilename(filename)
762 scmutil.checkfilename(filename)
763 if self._map.hastrackeddir(filename):
763 if self._map.hastrackeddir(filename):
764 msg = _(b'directory %r already in dirstate')
764 msg = _(b'directory %r already in dirstate')
765 msg %= pycompat.bytestr(filename)
765 msg %= pycompat.bytestr(filename)
766 raise error.Abort(msg)
766 raise error.Abort(msg)
767 # shadows
767 # shadows
768 for d in pathutil.finddirs(filename):
768 for d in pathutil.finddirs(filename):
769 if self._map.hastrackeddir(d):
769 if self._map.hastrackeddir(d):
770 break
770 break
771 entry = self._map.get(d)
771 entry = self._map.get(d)
772 if entry is not None and not entry.removed:
772 if entry is not None and not entry.removed:
773 msg = _(b'file %r in dirstate clashes with %r')
773 msg = _(b'file %r in dirstate clashes with %r')
774 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
774 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
775 raise error.Abort(msg)
775 raise error.Abort(msg)
776 self._check_sparse(filename)
776 self._check_sparse(filename)
777
777
778 def _check_sparse(self, filename):
778 def _check_sparse(self, filename):
779 """Check that a filename is inside the sparse profile"""
779 """Check that a filename is inside the sparse profile"""
780 sparsematch = self._sparsematcher
780 sparsematch = self._sparsematcher
781 if sparsematch is not None and not sparsematch.always():
781 if sparsematch is not None and not sparsematch.always():
782 if not sparsematch(filename):
782 if not sparsematch(filename):
783 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
783 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
784 hint = _(
784 hint = _(
785 b'include file with `hg debugsparse --include <pattern>` or use '
785 b'include file with `hg debugsparse --include <pattern>` or use '
786 b'`hg add -s <file>` to include file directory while adding'
786 b'`hg add -s <file>` to include file directory while adding'
787 )
787 )
788 raise error.Abort(msg % filename, hint=hint)
788 raise error.Abort(msg % filename, hint=hint)
789
789
790 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
790 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
791 if exists is None:
791 if exists is None:
792 exists = os.path.lexists(os.path.join(self._root, path))
792 exists = os.path.lexists(os.path.join(self._root, path))
793 if not exists:
793 if not exists:
794 # Maybe a path component exists
794 # Maybe a path component exists
795 if not ignoremissing and b'/' in path:
795 if not ignoremissing and b'/' in path:
796 d, f = path.rsplit(b'/', 1)
796 d, f = path.rsplit(b'/', 1)
797 d = self._normalize(d, False, ignoremissing, None)
797 d = self._normalize(d, False, ignoremissing, None)
798 folded = d + b"/" + f
798 folded = d + b"/" + f
799 else:
799 else:
800 # No path components, preserve original case
800 # No path components, preserve original case
801 folded = path
801 folded = path
802 else:
802 else:
803 # recursively normalize leading directory components
803 # recursively normalize leading directory components
804 # against dirstate
804 # against dirstate
805 if b'/' in normed:
805 if b'/' in normed:
806 d, f = normed.rsplit(b'/', 1)
806 d, f = normed.rsplit(b'/', 1)
807 d = self._normalize(d, False, ignoremissing, True)
807 d = self._normalize(d, False, ignoremissing, True)
808 r = self._root + b"/" + d
808 r = self._root + b"/" + d
809 folded = d + b"/" + util.fspath(f, r)
809 folded = d + b"/" + util.fspath(f, r)
810 else:
810 else:
811 folded = util.fspath(normed, self._root)
811 folded = util.fspath(normed, self._root)
812 storemap[normed] = folded
812 storemap[normed] = folded
813
813
814 return folded
814 return folded
815
815
816 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
816 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
817 normed = util.normcase(path)
817 normed = util.normcase(path)
818 folded = self._map.filefoldmap.get(normed, None)
818 folded = self._map.filefoldmap.get(normed, None)
819 if folded is None:
819 if folded is None:
820 if isknown:
820 if isknown:
821 folded = path
821 folded = path
822 else:
822 else:
823 folded = self._discoverpath(
823 folded = self._discoverpath(
824 path, normed, ignoremissing, exists, self._map.filefoldmap
824 path, normed, ignoremissing, exists, self._map.filefoldmap
825 )
825 )
826 return folded
826 return folded
827
827
828 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
828 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
829 normed = util.normcase(path)
829 normed = util.normcase(path)
830 folded = self._map.filefoldmap.get(normed, None)
830 folded = self._map.filefoldmap.get(normed, None)
831 if folded is None:
831 if folded is None:
832 folded = self._map.dirfoldmap.get(normed, None)
832 folded = self._map.dirfoldmap.get(normed, None)
833 if folded is None:
833 if folded is None:
834 if isknown:
834 if isknown:
835 folded = path
835 folded = path
836 else:
836 else:
837 # store discovered result in dirfoldmap so that future
837 # store discovered result in dirfoldmap so that future
838 # normalizefile calls don't start matching directories
838 # normalizefile calls don't start matching directories
839 folded = self._discoverpath(
839 folded = self._discoverpath(
840 path, normed, ignoremissing, exists, self._map.dirfoldmap
840 path, normed, ignoremissing, exists, self._map.dirfoldmap
841 )
841 )
842 return folded
842 return folded
843
843
844 def normalize(self, path, isknown=False, ignoremissing=False):
844 def normalize(self, path, isknown=False, ignoremissing=False):
845 """
845 """
846 normalize the case of a pathname when on a casefolding filesystem
846 normalize the case of a pathname when on a casefolding filesystem
847
847
848 isknown specifies whether the filename came from walking the
848 isknown specifies whether the filename came from walking the
849 disk, to avoid extra filesystem access.
849 disk, to avoid extra filesystem access.
850
850
851 If ignoremissing is True, missing path are returned
851 If ignoremissing is True, missing path are returned
852 unchanged. Otherwise, we try harder to normalize possibly
852 unchanged. Otherwise, we try harder to normalize possibly
853 existing path components.
853 existing path components.
854
854
855 The normalized case is determined based on the following precedence:
855 The normalized case is determined based on the following precedence:
856
856
857 - version of name already stored in the dirstate
857 - version of name already stored in the dirstate
858 - version of name stored on disk
858 - version of name stored on disk
859 - version provided via command arguments
859 - version provided via command arguments
860 """
860 """
861
861
862 if self._checkcase:
862 if self._checkcase:
863 return self._normalize(path, isknown, ignoremissing)
863 return self._normalize(path, isknown, ignoremissing)
864 return path
864 return path
865
865
866 # XXX this method is barely used, as a result:
866 # XXX this method is barely used, as a result:
867 # - its semantic is unclear
867 # - its semantic is unclear
868 # - do we really needs it ?
868 # - do we really needs it ?
869 @requires_changing_parents
869 @requires_changing_parents
870 def clear(self):
870 def clear(self):
871 self._map.clear()
871 self._map.clear()
872 self._dirty = True
872 self._dirty = True
873
873
874 @requires_changing_parents
874 @requires_changing_parents
875 def rebuild(self, parent, allfiles, changedfiles=None):
875 def rebuild(self, parent, allfiles, changedfiles=None):
876 matcher = self._sparsematcher
876 matcher = self._sparsematcher
877 if matcher is not None and not matcher.always():
877 if matcher is not None and not matcher.always():
878 # should not add non-matching files
878 # should not add non-matching files
879 allfiles = [f for f in allfiles if matcher(f)]
879 allfiles = [f for f in allfiles if matcher(f)]
880 if changedfiles:
880 if changedfiles:
881 changedfiles = [f for f in changedfiles if matcher(f)]
881 changedfiles = [f for f in changedfiles if matcher(f)]
882
882
883 if changedfiles is not None:
883 if changedfiles is not None:
884 # these files will be deleted from the dirstate when they are
884 # these files will be deleted from the dirstate when they are
885 # not found to be in allfiles
885 # not found to be in allfiles
886 dirstatefilestoremove = {f for f in self if not matcher(f)}
886 dirstatefilestoremove = {f for f in self if not matcher(f)}
887 changedfiles = dirstatefilestoremove.union(changedfiles)
887 changedfiles = dirstatefilestoremove.union(changedfiles)
888
888
889 if changedfiles is None:
889 if changedfiles is None:
890 # Rebuild entire dirstate
890 # Rebuild entire dirstate
891 to_lookup = allfiles
891 to_lookup = allfiles
892 to_drop = []
892 to_drop = []
893 self.clear()
893 self.clear()
894 elif len(changedfiles) < 10:
894 elif len(changedfiles) < 10:
895 # Avoid turning allfiles into a set, which can be expensive if it's
895 # Avoid turning allfiles into a set, which can be expensive if it's
896 # large.
896 # large.
897 to_lookup = []
897 to_lookup = []
898 to_drop = []
898 to_drop = []
899 for f in changedfiles:
899 for f in changedfiles:
900 if f in allfiles:
900 if f in allfiles:
901 to_lookup.append(f)
901 to_lookup.append(f)
902 else:
902 else:
903 to_drop.append(f)
903 to_drop.append(f)
904 else:
904 else:
905 changedfilesset = set(changedfiles)
905 changedfilesset = set(changedfiles)
906 to_lookup = changedfilesset & set(allfiles)
906 to_lookup = changedfilesset & set(allfiles)
907 to_drop = changedfilesset - to_lookup
907 to_drop = changedfilesset - to_lookup
908
908
909 if self._origpl is None:
909 if self._origpl is None:
910 self._origpl = self._pl
910 self._origpl = self._pl
911 self._map.setparents(parent, self._nodeconstants.nullid)
911 self._map.setparents(parent, self._nodeconstants.nullid)
912
912
913 for f in to_lookup:
913 for f in to_lookup:
914 if self.in_merge:
914 if self.in_merge:
915 self.set_tracked(f)
915 self.set_tracked(f)
916 else:
916 else:
917 self._map.reset_state(
917 self._map.reset_state(
918 f,
918 f,
919 wc_tracked=True,
919 wc_tracked=True,
920 p1_tracked=True,
920 p1_tracked=True,
921 )
921 )
922 for f in to_drop:
922 for f in to_drop:
923 self._map.reset_state(f)
923 self._map.reset_state(f)
924
924
925 self._dirty = True
925 self._dirty = True
926
926
927 def identity(self):
927 def identity(self):
928 """Return identity of dirstate itself to detect changing in storage
928 """Return identity of dirstate itself to detect changing in storage
929
929
930 If identity of previous dirstate is equal to this, writing
930 If identity of previous dirstate is equal to this, writing
931 changes based on the former dirstate out can keep consistency.
931 changes based on the former dirstate out can keep consistency.
932 """
932 """
933 return self._map.identity
933 return self._map.identity
934
934
935 def write(self, tr):
935 def write(self, tr):
936 if not self._dirty:
936 if not self._dirty:
937 return
937 return
938 # make sure we don't request a write of invalidated content
939 # XXX move before the dirty check once `unlock` stop calling `write`
940 assert not self._invalidated_context
938
941
939 write_key = self._use_tracked_hint and self._dirty_tracked_set
942 write_key = self._use_tracked_hint and self._dirty_tracked_set
940 if tr:
943 if tr:
941 # make sure we invalidate the current change on abort
944 # make sure we invalidate the current change on abort
942 if tr is not None:
945 if tr is not None:
943 tr.addabort(
946 tr.addabort(
944 b'dirstate-invalidate',
947 b'dirstate-invalidate',
945 lambda tr: self.invalidate(),
948 lambda tr: self.invalidate(),
946 )
949 )
947 # delay writing in-memory changes out
950 # delay writing in-memory changes out
948 tr.addfilegenerator(
951 tr.addfilegenerator(
949 b'dirstate-1-main',
952 b'dirstate-1-main',
950 (self._filename,),
953 (self._filename,),
951 lambda f: self._writedirstate(tr, f),
954 lambda f: self._writedirstate(tr, f),
952 location=b'plain',
955 location=b'plain',
953 post_finalize=True,
956 post_finalize=True,
954 )
957 )
955 if write_key:
958 if write_key:
956 tr.addfilegenerator(
959 tr.addfilegenerator(
957 b'dirstate-2-key-post',
960 b'dirstate-2-key-post',
958 (self._filename_th,),
961 (self._filename_th,),
959 lambda f: self._write_tracked_hint(tr, f),
962 lambda f: self._write_tracked_hint(tr, f),
960 location=b'plain',
963 location=b'plain',
961 post_finalize=True,
964 post_finalize=True,
962 )
965 )
963 return
966 return
964
967
965 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
968 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
966 with file(self._filename) as f:
969 with file(self._filename) as f:
967 self._writedirstate(tr, f)
970 self._writedirstate(tr, f)
968 if write_key:
971 if write_key:
969 # we update the key-file after writing to make sure reader have a
972 # we update the key-file after writing to make sure reader have a
970 # key that match the newly written content
973 # key that match the newly written content
971 with file(self._filename_th) as f:
974 with file(self._filename_th) as f:
972 self._write_tracked_hint(tr, f)
975 self._write_tracked_hint(tr, f)
973
976
974 def delete_tracked_hint(self):
977 def delete_tracked_hint(self):
975 """remove the tracked_hint file
978 """remove the tracked_hint file
976
979
977 To be used by format downgrades operation"""
980 To be used by format downgrades operation"""
978 self._opener.unlink(self._filename_th)
981 self._opener.unlink(self._filename_th)
979 self._use_tracked_hint = False
982 self._use_tracked_hint = False
980
983
981 def addparentchangecallback(self, category, callback):
984 def addparentchangecallback(self, category, callback):
982 """add a callback to be called when the wd parents are changed
985 """add a callback to be called when the wd parents are changed
983
986
984 Callback will be called with the following arguments:
987 Callback will be called with the following arguments:
985 dirstate, (oldp1, oldp2), (newp1, newp2)
988 dirstate, (oldp1, oldp2), (newp1, newp2)
986
989
987 Category is a unique identifier to allow overwriting an old callback
990 Category is a unique identifier to allow overwriting an old callback
988 with a newer callback.
991 with a newer callback.
989 """
992 """
990 self._plchangecallbacks[category] = callback
993 self._plchangecallbacks[category] = callback
991
994
992 def _writedirstate(self, tr, st):
995 def _writedirstate(self, tr, st):
996 # make sure we don't write invalidated content
997 assert not self._invalidated_context
993 # notify callbacks about parents change
998 # notify callbacks about parents change
994 if self._origpl is not None and self._origpl != self._pl:
999 if self._origpl is not None and self._origpl != self._pl:
995 for c, callback in sorted(self._plchangecallbacks.items()):
1000 for c, callback in sorted(self._plchangecallbacks.items()):
996 callback(self, self._origpl, self._pl)
1001 callback(self, self._origpl, self._pl)
997 self._origpl = None
1002 self._origpl = None
998 self._map.write(tr, st)
1003 self._map.write(tr, st)
999 self._dirty = False
1004 self._dirty = False
1000 self._dirty_tracked_set = False
1005 self._dirty_tracked_set = False
1001
1006
1002 def _write_tracked_hint(self, tr, f):
1007 def _write_tracked_hint(self, tr, f):
1003 key = node.hex(uuid.uuid4().bytes)
1008 key = node.hex(uuid.uuid4().bytes)
1004 f.write(b"1\n%s\n" % key) # 1 is the format version
1009 f.write(b"1\n%s\n" % key) # 1 is the format version
1005
1010
1006 def _dirignore(self, f):
1011 def _dirignore(self, f):
1007 if self._ignore(f):
1012 if self._ignore(f):
1008 return True
1013 return True
1009 for p in pathutil.finddirs(f):
1014 for p in pathutil.finddirs(f):
1010 if self._ignore(p):
1015 if self._ignore(p):
1011 return True
1016 return True
1012 return False
1017 return False
1013
1018
1014 def _ignorefiles(self):
1019 def _ignorefiles(self):
1015 files = []
1020 files = []
1016 if os.path.exists(self._join(b'.hgignore')):
1021 if os.path.exists(self._join(b'.hgignore')):
1017 files.append(self._join(b'.hgignore'))
1022 files.append(self._join(b'.hgignore'))
1018 for name, path in self._ui.configitems(b"ui"):
1023 for name, path in self._ui.configitems(b"ui"):
1019 if name == b'ignore' or name.startswith(b'ignore.'):
1024 if name == b'ignore' or name.startswith(b'ignore.'):
1020 # we need to use os.path.join here rather than self._join
1025 # we need to use os.path.join here rather than self._join
1021 # because path is arbitrary and user-specified
1026 # because path is arbitrary and user-specified
1022 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1027 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1023 return files
1028 return files
1024
1029
1025 def _ignorefileandline(self, f):
1030 def _ignorefileandline(self, f):
1026 files = collections.deque(self._ignorefiles())
1031 files = collections.deque(self._ignorefiles())
1027 visited = set()
1032 visited = set()
1028 while files:
1033 while files:
1029 i = files.popleft()
1034 i = files.popleft()
1030 patterns = matchmod.readpatternfile(
1035 patterns = matchmod.readpatternfile(
1031 i, self._ui.warn, sourceinfo=True
1036 i, self._ui.warn, sourceinfo=True
1032 )
1037 )
1033 for pattern, lineno, line in patterns:
1038 for pattern, lineno, line in patterns:
1034 kind, p = matchmod._patsplit(pattern, b'glob')
1039 kind, p = matchmod._patsplit(pattern, b'glob')
1035 if kind == b"subinclude":
1040 if kind == b"subinclude":
1036 if p not in visited:
1041 if p not in visited:
1037 files.append(p)
1042 files.append(p)
1038 continue
1043 continue
1039 m = matchmod.match(
1044 m = matchmod.match(
1040 self._root, b'', [], [pattern], warn=self._ui.warn
1045 self._root, b'', [], [pattern], warn=self._ui.warn
1041 )
1046 )
1042 if m(f):
1047 if m(f):
1043 return (i, lineno, line)
1048 return (i, lineno, line)
1044 visited.add(i)
1049 visited.add(i)
1045 return (None, -1, b"")
1050 return (None, -1, b"")
1046
1051
1047 def _walkexplicit(self, match, subrepos):
1052 def _walkexplicit(self, match, subrepos):
1048 """Get stat data about the files explicitly specified by match.
1053 """Get stat data about the files explicitly specified by match.
1049
1054
1050 Return a triple (results, dirsfound, dirsnotfound).
1055 Return a triple (results, dirsfound, dirsnotfound).
1051 - results is a mapping from filename to stat result. It also contains
1056 - results is a mapping from filename to stat result. It also contains
1052 listings mapping subrepos and .hg to None.
1057 listings mapping subrepos and .hg to None.
1053 - dirsfound is a list of files found to be directories.
1058 - dirsfound is a list of files found to be directories.
1054 - dirsnotfound is a list of files that the dirstate thinks are
1059 - dirsnotfound is a list of files that the dirstate thinks are
1055 directories and that were not found."""
1060 directories and that were not found."""
1056
1061
1057 def badtype(mode):
1062 def badtype(mode):
1058 kind = _(b'unknown')
1063 kind = _(b'unknown')
1059 if stat.S_ISCHR(mode):
1064 if stat.S_ISCHR(mode):
1060 kind = _(b'character device')
1065 kind = _(b'character device')
1061 elif stat.S_ISBLK(mode):
1066 elif stat.S_ISBLK(mode):
1062 kind = _(b'block device')
1067 kind = _(b'block device')
1063 elif stat.S_ISFIFO(mode):
1068 elif stat.S_ISFIFO(mode):
1064 kind = _(b'fifo')
1069 kind = _(b'fifo')
1065 elif stat.S_ISSOCK(mode):
1070 elif stat.S_ISSOCK(mode):
1066 kind = _(b'socket')
1071 kind = _(b'socket')
1067 elif stat.S_ISDIR(mode):
1072 elif stat.S_ISDIR(mode):
1068 kind = _(b'directory')
1073 kind = _(b'directory')
1069 return _(b'unsupported file type (type is %s)') % kind
1074 return _(b'unsupported file type (type is %s)') % kind
1070
1075
1071 badfn = match.bad
1076 badfn = match.bad
1072 dmap = self._map
1077 dmap = self._map
1073 lstat = os.lstat
1078 lstat = os.lstat
1074 getkind = stat.S_IFMT
1079 getkind = stat.S_IFMT
1075 dirkind = stat.S_IFDIR
1080 dirkind = stat.S_IFDIR
1076 regkind = stat.S_IFREG
1081 regkind = stat.S_IFREG
1077 lnkkind = stat.S_IFLNK
1082 lnkkind = stat.S_IFLNK
1078 join = self._join
1083 join = self._join
1079 dirsfound = []
1084 dirsfound = []
1080 foundadd = dirsfound.append
1085 foundadd = dirsfound.append
1081 dirsnotfound = []
1086 dirsnotfound = []
1082 notfoundadd = dirsnotfound.append
1087 notfoundadd = dirsnotfound.append
1083
1088
1084 if not match.isexact() and self._checkcase:
1089 if not match.isexact() and self._checkcase:
1085 normalize = self._normalize
1090 normalize = self._normalize
1086 else:
1091 else:
1087 normalize = None
1092 normalize = None
1088
1093
1089 files = sorted(match.files())
1094 files = sorted(match.files())
1090 subrepos.sort()
1095 subrepos.sort()
1091 i, j = 0, 0
1096 i, j = 0, 0
1092 while i < len(files) and j < len(subrepos):
1097 while i < len(files) and j < len(subrepos):
1093 subpath = subrepos[j] + b"/"
1098 subpath = subrepos[j] + b"/"
1094 if files[i] < subpath:
1099 if files[i] < subpath:
1095 i += 1
1100 i += 1
1096 continue
1101 continue
1097 while i < len(files) and files[i].startswith(subpath):
1102 while i < len(files) and files[i].startswith(subpath):
1098 del files[i]
1103 del files[i]
1099 j += 1
1104 j += 1
1100
1105
1101 if not files or b'' in files:
1106 if not files or b'' in files:
1102 files = [b'']
1107 files = [b'']
1103 # constructing the foldmap is expensive, so don't do it for the
1108 # constructing the foldmap is expensive, so don't do it for the
1104 # common case where files is ['']
1109 # common case where files is ['']
1105 normalize = None
1110 normalize = None
1106 results = dict.fromkeys(subrepos)
1111 results = dict.fromkeys(subrepos)
1107 results[b'.hg'] = None
1112 results[b'.hg'] = None
1108
1113
1109 for ff in files:
1114 for ff in files:
1110 if normalize:
1115 if normalize:
1111 nf = normalize(ff, False, True)
1116 nf = normalize(ff, False, True)
1112 else:
1117 else:
1113 nf = ff
1118 nf = ff
1114 if nf in results:
1119 if nf in results:
1115 continue
1120 continue
1116
1121
1117 try:
1122 try:
1118 st = lstat(join(nf))
1123 st = lstat(join(nf))
1119 kind = getkind(st.st_mode)
1124 kind = getkind(st.st_mode)
1120 if kind == dirkind:
1125 if kind == dirkind:
1121 if nf in dmap:
1126 if nf in dmap:
1122 # file replaced by dir on disk but still in dirstate
1127 # file replaced by dir on disk but still in dirstate
1123 results[nf] = None
1128 results[nf] = None
1124 foundadd((nf, ff))
1129 foundadd((nf, ff))
1125 elif kind == regkind or kind == lnkkind:
1130 elif kind == regkind or kind == lnkkind:
1126 results[nf] = st
1131 results[nf] = st
1127 else:
1132 else:
1128 badfn(ff, badtype(kind))
1133 badfn(ff, badtype(kind))
1129 if nf in dmap:
1134 if nf in dmap:
1130 results[nf] = None
1135 results[nf] = None
1131 except (OSError) as inst:
1136 except (OSError) as inst:
1132 # nf not found on disk - it is dirstate only
1137 # nf not found on disk - it is dirstate only
1133 if nf in dmap: # does it exactly match a missing file?
1138 if nf in dmap: # does it exactly match a missing file?
1134 results[nf] = None
1139 results[nf] = None
1135 else: # does it match a missing directory?
1140 else: # does it match a missing directory?
1136 if self._map.hasdir(nf):
1141 if self._map.hasdir(nf):
1137 notfoundadd(nf)
1142 notfoundadd(nf)
1138 else:
1143 else:
1139 badfn(ff, encoding.strtolocal(inst.strerror))
1144 badfn(ff, encoding.strtolocal(inst.strerror))
1140
1145
1141 # match.files() may contain explicitly-specified paths that shouldn't
1146 # match.files() may contain explicitly-specified paths that shouldn't
1142 # be taken; drop them from the list of files found. dirsfound/notfound
1147 # be taken; drop them from the list of files found. dirsfound/notfound
1143 # aren't filtered here because they will be tested later.
1148 # aren't filtered here because they will be tested later.
1144 if match.anypats():
1149 if match.anypats():
1145 for f in list(results):
1150 for f in list(results):
1146 if f == b'.hg' or f in subrepos:
1151 if f == b'.hg' or f in subrepos:
1147 # keep sentinel to disable further out-of-repo walks
1152 # keep sentinel to disable further out-of-repo walks
1148 continue
1153 continue
1149 if not match(f):
1154 if not match(f):
1150 del results[f]
1155 del results[f]
1151
1156
1152 # Case insensitive filesystems cannot rely on lstat() failing to detect
1157 # Case insensitive filesystems cannot rely on lstat() failing to detect
1153 # a case-only rename. Prune the stat object for any file that does not
1158 # a case-only rename. Prune the stat object for any file that does not
1154 # match the case in the filesystem, if there are multiple files that
1159 # match the case in the filesystem, if there are multiple files that
1155 # normalize to the same path.
1160 # normalize to the same path.
1156 if match.isexact() and self._checkcase:
1161 if match.isexact() and self._checkcase:
1157 normed = {}
1162 normed = {}
1158
1163
1159 for f, st in results.items():
1164 for f, st in results.items():
1160 if st is None:
1165 if st is None:
1161 continue
1166 continue
1162
1167
1163 nc = util.normcase(f)
1168 nc = util.normcase(f)
1164 paths = normed.get(nc)
1169 paths = normed.get(nc)
1165
1170
1166 if paths is None:
1171 if paths is None:
1167 paths = set()
1172 paths = set()
1168 normed[nc] = paths
1173 normed[nc] = paths
1169
1174
1170 paths.add(f)
1175 paths.add(f)
1171
1176
1172 for norm, paths in normed.items():
1177 for norm, paths in normed.items():
1173 if len(paths) > 1:
1178 if len(paths) > 1:
1174 for path in paths:
1179 for path in paths:
1175 folded = self._discoverpath(
1180 folded = self._discoverpath(
1176 path, norm, True, None, self._map.dirfoldmap
1181 path, norm, True, None, self._map.dirfoldmap
1177 )
1182 )
1178 if path != folded:
1183 if path != folded:
1179 results[path] = None
1184 results[path] = None
1180
1185
1181 return results, dirsfound, dirsnotfound
1186 return results, dirsfound, dirsnotfound
1182
1187
1183 def walk(self, match, subrepos, unknown, ignored, full=True):
1188 def walk(self, match, subrepos, unknown, ignored, full=True):
1184 """
1189 """
1185 Walk recursively through the directory tree, finding all files
1190 Walk recursively through the directory tree, finding all files
1186 matched by match.
1191 matched by match.
1187
1192
1188 If full is False, maybe skip some known-clean files.
1193 If full is False, maybe skip some known-clean files.
1189
1194
1190 Return a dict mapping filename to stat-like object (either
1195 Return a dict mapping filename to stat-like object (either
1191 mercurial.osutil.stat instance or return value of os.stat()).
1196 mercurial.osutil.stat instance or return value of os.stat()).
1192
1197
1193 """
1198 """
1194 # full is a flag that extensions that hook into walk can use -- this
1199 # full is a flag that extensions that hook into walk can use -- this
1195 # implementation doesn't use it at all. This satisfies the contract
1200 # implementation doesn't use it at all. This satisfies the contract
1196 # because we only guarantee a "maybe".
1201 # because we only guarantee a "maybe".
1197
1202
1198 if ignored:
1203 if ignored:
1199 ignore = util.never
1204 ignore = util.never
1200 dirignore = util.never
1205 dirignore = util.never
1201 elif unknown:
1206 elif unknown:
1202 ignore = self._ignore
1207 ignore = self._ignore
1203 dirignore = self._dirignore
1208 dirignore = self._dirignore
1204 else:
1209 else:
1205 # if not unknown and not ignored, drop dir recursion and step 2
1210 # if not unknown and not ignored, drop dir recursion and step 2
1206 ignore = util.always
1211 ignore = util.always
1207 dirignore = util.always
1212 dirignore = util.always
1208
1213
1209 if self._sparsematchfn is not None:
1214 if self._sparsematchfn is not None:
1210 em = matchmod.exact(match.files())
1215 em = matchmod.exact(match.files())
1211 sm = matchmod.unionmatcher([self._sparsematcher, em])
1216 sm = matchmod.unionmatcher([self._sparsematcher, em])
1212 match = matchmod.intersectmatchers(match, sm)
1217 match = matchmod.intersectmatchers(match, sm)
1213
1218
1214 matchfn = match.matchfn
1219 matchfn = match.matchfn
1215 matchalways = match.always()
1220 matchalways = match.always()
1216 matchtdir = match.traversedir
1221 matchtdir = match.traversedir
1217 dmap = self._map
1222 dmap = self._map
1218 listdir = util.listdir
1223 listdir = util.listdir
1219 lstat = os.lstat
1224 lstat = os.lstat
1220 dirkind = stat.S_IFDIR
1225 dirkind = stat.S_IFDIR
1221 regkind = stat.S_IFREG
1226 regkind = stat.S_IFREG
1222 lnkkind = stat.S_IFLNK
1227 lnkkind = stat.S_IFLNK
1223 join = self._join
1228 join = self._join
1224
1229
1225 exact = skipstep3 = False
1230 exact = skipstep3 = False
1226 if match.isexact(): # match.exact
1231 if match.isexact(): # match.exact
1227 exact = True
1232 exact = True
1228 dirignore = util.always # skip step 2
1233 dirignore = util.always # skip step 2
1229 elif match.prefix(): # match.match, no patterns
1234 elif match.prefix(): # match.match, no patterns
1230 skipstep3 = True
1235 skipstep3 = True
1231
1236
1232 if not exact and self._checkcase:
1237 if not exact and self._checkcase:
1233 normalize = self._normalize
1238 normalize = self._normalize
1234 normalizefile = self._normalizefile
1239 normalizefile = self._normalizefile
1235 skipstep3 = False
1240 skipstep3 = False
1236 else:
1241 else:
1237 normalize = self._normalize
1242 normalize = self._normalize
1238 normalizefile = None
1243 normalizefile = None
1239
1244
1240 # step 1: find all explicit files
1245 # step 1: find all explicit files
1241 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1246 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1242 if matchtdir:
1247 if matchtdir:
1243 for d in work:
1248 for d in work:
1244 matchtdir(d[0])
1249 matchtdir(d[0])
1245 for d in dirsnotfound:
1250 for d in dirsnotfound:
1246 matchtdir(d)
1251 matchtdir(d)
1247
1252
1248 skipstep3 = skipstep3 and not (work or dirsnotfound)
1253 skipstep3 = skipstep3 and not (work or dirsnotfound)
1249 work = [d for d in work if not dirignore(d[0])]
1254 work = [d for d in work if not dirignore(d[0])]
1250
1255
1251 # step 2: visit subdirectories
1256 # step 2: visit subdirectories
1252 def traverse(work, alreadynormed):
1257 def traverse(work, alreadynormed):
1253 wadd = work.append
1258 wadd = work.append
1254 while work:
1259 while work:
1255 tracing.counter('dirstate.walk work', len(work))
1260 tracing.counter('dirstate.walk work', len(work))
1256 nd = work.pop()
1261 nd = work.pop()
1257 visitentries = match.visitchildrenset(nd)
1262 visitentries = match.visitchildrenset(nd)
1258 if not visitentries:
1263 if not visitentries:
1259 continue
1264 continue
1260 if visitentries == b'this' or visitentries == b'all':
1265 if visitentries == b'this' or visitentries == b'all':
1261 visitentries = None
1266 visitentries = None
1262 skip = None
1267 skip = None
1263 if nd != b'':
1268 if nd != b'':
1264 skip = b'.hg'
1269 skip = b'.hg'
1265 try:
1270 try:
1266 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1271 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1267 entries = listdir(join(nd), stat=True, skip=skip)
1272 entries = listdir(join(nd), stat=True, skip=skip)
1268 except (PermissionError, FileNotFoundError) as inst:
1273 except (PermissionError, FileNotFoundError) as inst:
1269 match.bad(
1274 match.bad(
1270 self.pathto(nd), encoding.strtolocal(inst.strerror)
1275 self.pathto(nd), encoding.strtolocal(inst.strerror)
1271 )
1276 )
1272 continue
1277 continue
1273 for f, kind, st in entries:
1278 for f, kind, st in entries:
1274 # Some matchers may return files in the visitentries set,
1279 # Some matchers may return files in the visitentries set,
1275 # instead of 'this', if the matcher explicitly mentions them
1280 # instead of 'this', if the matcher explicitly mentions them
1276 # and is not an exactmatcher. This is acceptable; we do not
1281 # and is not an exactmatcher. This is acceptable; we do not
1277 # make any hard assumptions about file-or-directory below
1282 # make any hard assumptions about file-or-directory below
1278 # based on the presence of `f` in visitentries. If
1283 # based on the presence of `f` in visitentries. If
1279 # visitchildrenset returned a set, we can always skip the
1284 # visitchildrenset returned a set, we can always skip the
1280 # entries *not* in the set it provided regardless of whether
1285 # entries *not* in the set it provided regardless of whether
1281 # they're actually a file or a directory.
1286 # they're actually a file or a directory.
1282 if visitentries and f not in visitentries:
1287 if visitentries and f not in visitentries:
1283 continue
1288 continue
1284 if normalizefile:
1289 if normalizefile:
1285 # even though f might be a directory, we're only
1290 # even though f might be a directory, we're only
1286 # interested in comparing it to files currently in the
1291 # interested in comparing it to files currently in the
1287 # dmap -- therefore normalizefile is enough
1292 # dmap -- therefore normalizefile is enough
1288 nf = normalizefile(
1293 nf = normalizefile(
1289 nd and (nd + b"/" + f) or f, True, True
1294 nd and (nd + b"/" + f) or f, True, True
1290 )
1295 )
1291 else:
1296 else:
1292 nf = nd and (nd + b"/" + f) or f
1297 nf = nd and (nd + b"/" + f) or f
1293 if nf not in results:
1298 if nf not in results:
1294 if kind == dirkind:
1299 if kind == dirkind:
1295 if not ignore(nf):
1300 if not ignore(nf):
1296 if matchtdir:
1301 if matchtdir:
1297 matchtdir(nf)
1302 matchtdir(nf)
1298 wadd(nf)
1303 wadd(nf)
1299 if nf in dmap and (matchalways or matchfn(nf)):
1304 if nf in dmap and (matchalways or matchfn(nf)):
1300 results[nf] = None
1305 results[nf] = None
1301 elif kind == regkind or kind == lnkkind:
1306 elif kind == regkind or kind == lnkkind:
1302 if nf in dmap:
1307 if nf in dmap:
1303 if matchalways or matchfn(nf):
1308 if matchalways or matchfn(nf):
1304 results[nf] = st
1309 results[nf] = st
1305 elif (matchalways or matchfn(nf)) and not ignore(
1310 elif (matchalways or matchfn(nf)) and not ignore(
1306 nf
1311 nf
1307 ):
1312 ):
1308 # unknown file -- normalize if necessary
1313 # unknown file -- normalize if necessary
1309 if not alreadynormed:
1314 if not alreadynormed:
1310 nf = normalize(nf, False, True)
1315 nf = normalize(nf, False, True)
1311 results[nf] = st
1316 results[nf] = st
1312 elif nf in dmap and (matchalways or matchfn(nf)):
1317 elif nf in dmap and (matchalways or matchfn(nf)):
1313 results[nf] = None
1318 results[nf] = None
1314
1319
1315 for nd, d in work:
1320 for nd, d in work:
1316 # alreadynormed means that processwork doesn't have to do any
1321 # alreadynormed means that processwork doesn't have to do any
1317 # expensive directory normalization
1322 # expensive directory normalization
1318 alreadynormed = not normalize or nd == d
1323 alreadynormed = not normalize or nd == d
1319 traverse([d], alreadynormed)
1324 traverse([d], alreadynormed)
1320
1325
1321 for s in subrepos:
1326 for s in subrepos:
1322 del results[s]
1327 del results[s]
1323 del results[b'.hg']
1328 del results[b'.hg']
1324
1329
1325 # step 3: visit remaining files from dmap
1330 # step 3: visit remaining files from dmap
1326 if not skipstep3 and not exact:
1331 if not skipstep3 and not exact:
1327 # If a dmap file is not in results yet, it was either
1332 # If a dmap file is not in results yet, it was either
1328 # a) not matching matchfn b) ignored, c) missing, or d) under a
1333 # a) not matching matchfn b) ignored, c) missing, or d) under a
1329 # symlink directory.
1334 # symlink directory.
1330 if not results and matchalways:
1335 if not results and matchalways:
1331 visit = [f for f in dmap]
1336 visit = [f for f in dmap]
1332 else:
1337 else:
1333 visit = [f for f in dmap if f not in results and matchfn(f)]
1338 visit = [f for f in dmap if f not in results and matchfn(f)]
1334 visit.sort()
1339 visit.sort()
1335
1340
1336 if unknown:
1341 if unknown:
1337 # unknown == True means we walked all dirs under the roots
1342 # unknown == True means we walked all dirs under the roots
1338 # that wasn't ignored, and everything that matched was stat'ed
1343 # that wasn't ignored, and everything that matched was stat'ed
1339 # and is already in results.
1344 # and is already in results.
1340 # The rest must thus be ignored or under a symlink.
1345 # The rest must thus be ignored or under a symlink.
1341 audit_path = pathutil.pathauditor(self._root, cached=True)
1346 audit_path = pathutil.pathauditor(self._root, cached=True)
1342
1347
1343 for nf in iter(visit):
1348 for nf in iter(visit):
1344 # If a stat for the same file was already added with a
1349 # If a stat for the same file was already added with a
1345 # different case, don't add one for this, since that would
1350 # different case, don't add one for this, since that would
1346 # make it appear as if the file exists under both names
1351 # make it appear as if the file exists under both names
1347 # on disk.
1352 # on disk.
1348 if (
1353 if (
1349 normalizefile
1354 normalizefile
1350 and normalizefile(nf, True, True) in results
1355 and normalizefile(nf, True, True) in results
1351 ):
1356 ):
1352 results[nf] = None
1357 results[nf] = None
1353 # Report ignored items in the dmap as long as they are not
1358 # Report ignored items in the dmap as long as they are not
1354 # under a symlink directory.
1359 # under a symlink directory.
1355 elif audit_path.check(nf):
1360 elif audit_path.check(nf):
1356 try:
1361 try:
1357 results[nf] = lstat(join(nf))
1362 results[nf] = lstat(join(nf))
1358 # file was just ignored, no links, and exists
1363 # file was just ignored, no links, and exists
1359 except OSError:
1364 except OSError:
1360 # file doesn't exist
1365 # file doesn't exist
1361 results[nf] = None
1366 results[nf] = None
1362 else:
1367 else:
1363 # It's either missing or under a symlink directory
1368 # It's either missing or under a symlink directory
1364 # which we in this case report as missing
1369 # which we in this case report as missing
1365 results[nf] = None
1370 results[nf] = None
1366 else:
1371 else:
1367 # We may not have walked the full directory tree above,
1372 # We may not have walked the full directory tree above,
1368 # so stat and check everything we missed.
1373 # so stat and check everything we missed.
1369 iv = iter(visit)
1374 iv = iter(visit)
1370 for st in util.statfiles([join(i) for i in visit]):
1375 for st in util.statfiles([join(i) for i in visit]):
1371 results[next(iv)] = st
1376 results[next(iv)] = st
1372 return results
1377 return results
1373
1378
1374 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1379 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1375 if self._sparsematchfn is not None:
1380 if self._sparsematchfn is not None:
1376 em = matchmod.exact(matcher.files())
1381 em = matchmod.exact(matcher.files())
1377 sm = matchmod.unionmatcher([self._sparsematcher, em])
1382 sm = matchmod.unionmatcher([self._sparsematcher, em])
1378 matcher = matchmod.intersectmatchers(matcher, sm)
1383 matcher = matchmod.intersectmatchers(matcher, sm)
1379 # Force Rayon (Rust parallelism library) to respect the number of
1384 # Force Rayon (Rust parallelism library) to respect the number of
1380 # workers. This is a temporary workaround until Rust code knows
1385 # workers. This is a temporary workaround until Rust code knows
1381 # how to read the config file.
1386 # how to read the config file.
1382 numcpus = self._ui.configint(b"worker", b"numcpus")
1387 numcpus = self._ui.configint(b"worker", b"numcpus")
1383 if numcpus is not None:
1388 if numcpus is not None:
1384 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1389 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1385
1390
1386 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1391 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1387 if not workers_enabled:
1392 if not workers_enabled:
1388 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1393 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1389
1394
1390 (
1395 (
1391 lookup,
1396 lookup,
1392 modified,
1397 modified,
1393 added,
1398 added,
1394 removed,
1399 removed,
1395 deleted,
1400 deleted,
1396 clean,
1401 clean,
1397 ignored,
1402 ignored,
1398 unknown,
1403 unknown,
1399 warnings,
1404 warnings,
1400 bad,
1405 bad,
1401 traversed,
1406 traversed,
1402 dirty,
1407 dirty,
1403 ) = rustmod.status(
1408 ) = rustmod.status(
1404 self._map._map,
1409 self._map._map,
1405 matcher,
1410 matcher,
1406 self._rootdir,
1411 self._rootdir,
1407 self._ignorefiles(),
1412 self._ignorefiles(),
1408 self._checkexec,
1413 self._checkexec,
1409 bool(list_clean),
1414 bool(list_clean),
1410 bool(list_ignored),
1415 bool(list_ignored),
1411 bool(list_unknown),
1416 bool(list_unknown),
1412 bool(matcher.traversedir),
1417 bool(matcher.traversedir),
1413 )
1418 )
1414
1419
1415 self._dirty |= dirty
1420 self._dirty |= dirty
1416
1421
1417 if matcher.traversedir:
1422 if matcher.traversedir:
1418 for dir in traversed:
1423 for dir in traversed:
1419 matcher.traversedir(dir)
1424 matcher.traversedir(dir)
1420
1425
1421 if self._ui.warn:
1426 if self._ui.warn:
1422 for item in warnings:
1427 for item in warnings:
1423 if isinstance(item, tuple):
1428 if isinstance(item, tuple):
1424 file_path, syntax = item
1429 file_path, syntax = item
1425 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1430 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1426 file_path,
1431 file_path,
1427 syntax,
1432 syntax,
1428 )
1433 )
1429 self._ui.warn(msg)
1434 self._ui.warn(msg)
1430 else:
1435 else:
1431 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1436 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1432 self._ui.warn(
1437 self._ui.warn(
1433 msg
1438 msg
1434 % (
1439 % (
1435 pathutil.canonpath(
1440 pathutil.canonpath(
1436 self._rootdir, self._rootdir, item
1441 self._rootdir, self._rootdir, item
1437 ),
1442 ),
1438 b"No such file or directory",
1443 b"No such file or directory",
1439 )
1444 )
1440 )
1445 )
1441
1446
1442 for fn, message in bad:
1447 for fn, message in bad:
1443 matcher.bad(fn, encoding.strtolocal(message))
1448 matcher.bad(fn, encoding.strtolocal(message))
1444
1449
1445 status = scmutil.status(
1450 status = scmutil.status(
1446 modified=modified,
1451 modified=modified,
1447 added=added,
1452 added=added,
1448 removed=removed,
1453 removed=removed,
1449 deleted=deleted,
1454 deleted=deleted,
1450 unknown=unknown,
1455 unknown=unknown,
1451 ignored=ignored,
1456 ignored=ignored,
1452 clean=clean,
1457 clean=clean,
1453 )
1458 )
1454 return (lookup, status)
1459 return (lookup, status)
1455
1460
1456 # XXX since this can make the dirstate dirty (through rust), we should
1461 # XXX since this can make the dirstate dirty (through rust), we should
1457 # enforce that it is done withing an appropriate change-context that scope
1462 # enforce that it is done withing an appropriate change-context that scope
1458 # the change and ensure it eventually get written on disk (or rolled back)
1463 # the change and ensure it eventually get written on disk (or rolled back)
1459 def status(self, match, subrepos, ignored, clean, unknown):
1464 def status(self, match, subrepos, ignored, clean, unknown):
1460 """Determine the status of the working copy relative to the
1465 """Determine the status of the working copy relative to the
1461 dirstate and return a pair of (unsure, status), where status is of type
1466 dirstate and return a pair of (unsure, status), where status is of type
1462 scmutil.status and:
1467 scmutil.status and:
1463
1468
1464 unsure:
1469 unsure:
1465 files that might have been modified since the dirstate was
1470 files that might have been modified since the dirstate was
1466 written, but need to be read to be sure (size is the same
1471 written, but need to be read to be sure (size is the same
1467 but mtime differs)
1472 but mtime differs)
1468 status.modified:
1473 status.modified:
1469 files that have definitely been modified since the dirstate
1474 files that have definitely been modified since the dirstate
1470 was written (different size or mode)
1475 was written (different size or mode)
1471 status.clean:
1476 status.clean:
1472 files that have definitely not been modified since the
1477 files that have definitely not been modified since the
1473 dirstate was written
1478 dirstate was written
1474 """
1479 """
1475 listignored, listclean, listunknown = ignored, clean, unknown
1480 listignored, listclean, listunknown = ignored, clean, unknown
1476 lookup, modified, added, unknown, ignored = [], [], [], [], []
1481 lookup, modified, added, unknown, ignored = [], [], [], [], []
1477 removed, deleted, clean = [], [], []
1482 removed, deleted, clean = [], [], []
1478
1483
1479 dmap = self._map
1484 dmap = self._map
1480 dmap.preload()
1485 dmap.preload()
1481
1486
1482 use_rust = True
1487 use_rust = True
1483
1488
1484 allowed_matchers = (
1489 allowed_matchers = (
1485 matchmod.alwaysmatcher,
1490 matchmod.alwaysmatcher,
1486 matchmod.differencematcher,
1491 matchmod.differencematcher,
1487 matchmod.exactmatcher,
1492 matchmod.exactmatcher,
1488 matchmod.includematcher,
1493 matchmod.includematcher,
1489 matchmod.intersectionmatcher,
1494 matchmod.intersectionmatcher,
1490 matchmod.nevermatcher,
1495 matchmod.nevermatcher,
1491 matchmod.unionmatcher,
1496 matchmod.unionmatcher,
1492 )
1497 )
1493
1498
1494 if rustmod is None:
1499 if rustmod is None:
1495 use_rust = False
1500 use_rust = False
1496 elif self._checkcase:
1501 elif self._checkcase:
1497 # Case-insensitive filesystems are not handled yet
1502 # Case-insensitive filesystems are not handled yet
1498 use_rust = False
1503 use_rust = False
1499 elif subrepos:
1504 elif subrepos:
1500 use_rust = False
1505 use_rust = False
1501 elif not isinstance(match, allowed_matchers):
1506 elif not isinstance(match, allowed_matchers):
1502 # Some matchers have yet to be implemented
1507 # Some matchers have yet to be implemented
1503 use_rust = False
1508 use_rust = False
1504
1509
1505 # Get the time from the filesystem so we can disambiguate files that
1510 # Get the time from the filesystem so we can disambiguate files that
1506 # appear modified in the present or future.
1511 # appear modified in the present or future.
1507 try:
1512 try:
1508 mtime_boundary = timestamp.get_fs_now(self._opener)
1513 mtime_boundary = timestamp.get_fs_now(self._opener)
1509 except OSError:
1514 except OSError:
1510 # In largefiles or readonly context
1515 # In largefiles or readonly context
1511 mtime_boundary = None
1516 mtime_boundary = None
1512
1517
1513 if use_rust:
1518 if use_rust:
1514 try:
1519 try:
1515 res = self._rust_status(
1520 res = self._rust_status(
1516 match, listclean, listignored, listunknown
1521 match, listclean, listignored, listunknown
1517 )
1522 )
1518 return res + (mtime_boundary,)
1523 return res + (mtime_boundary,)
1519 except rustmod.FallbackError:
1524 except rustmod.FallbackError:
1520 pass
1525 pass
1521
1526
1522 def noop(f):
1527 def noop(f):
1523 pass
1528 pass
1524
1529
1525 dcontains = dmap.__contains__
1530 dcontains = dmap.__contains__
1526 dget = dmap.__getitem__
1531 dget = dmap.__getitem__
1527 ladd = lookup.append # aka "unsure"
1532 ladd = lookup.append # aka "unsure"
1528 madd = modified.append
1533 madd = modified.append
1529 aadd = added.append
1534 aadd = added.append
1530 uadd = unknown.append if listunknown else noop
1535 uadd = unknown.append if listunknown else noop
1531 iadd = ignored.append if listignored else noop
1536 iadd = ignored.append if listignored else noop
1532 radd = removed.append
1537 radd = removed.append
1533 dadd = deleted.append
1538 dadd = deleted.append
1534 cadd = clean.append if listclean else noop
1539 cadd = clean.append if listclean else noop
1535 mexact = match.exact
1540 mexact = match.exact
1536 dirignore = self._dirignore
1541 dirignore = self._dirignore
1537 checkexec = self._checkexec
1542 checkexec = self._checkexec
1538 checklink = self._checklink
1543 checklink = self._checklink
1539 copymap = self._map.copymap
1544 copymap = self._map.copymap
1540
1545
1541 # We need to do full walks when either
1546 # We need to do full walks when either
1542 # - we're listing all clean files, or
1547 # - we're listing all clean files, or
1543 # - match.traversedir does something, because match.traversedir should
1548 # - match.traversedir does something, because match.traversedir should
1544 # be called for every dir in the working dir
1549 # be called for every dir in the working dir
1545 full = listclean or match.traversedir is not None
1550 full = listclean or match.traversedir is not None
1546 for fn, st in self.walk(
1551 for fn, st in self.walk(
1547 match, subrepos, listunknown, listignored, full=full
1552 match, subrepos, listunknown, listignored, full=full
1548 ).items():
1553 ).items():
1549 if not dcontains(fn):
1554 if not dcontains(fn):
1550 if (listignored or mexact(fn)) and dirignore(fn):
1555 if (listignored or mexact(fn)) and dirignore(fn):
1551 if listignored:
1556 if listignored:
1552 iadd(fn)
1557 iadd(fn)
1553 else:
1558 else:
1554 uadd(fn)
1559 uadd(fn)
1555 continue
1560 continue
1556
1561
1557 t = dget(fn)
1562 t = dget(fn)
1558 mode = t.mode
1563 mode = t.mode
1559 size = t.size
1564 size = t.size
1560
1565
1561 if not st and t.tracked:
1566 if not st and t.tracked:
1562 dadd(fn)
1567 dadd(fn)
1563 elif t.p2_info:
1568 elif t.p2_info:
1564 madd(fn)
1569 madd(fn)
1565 elif t.added:
1570 elif t.added:
1566 aadd(fn)
1571 aadd(fn)
1567 elif t.removed:
1572 elif t.removed:
1568 radd(fn)
1573 radd(fn)
1569 elif t.tracked:
1574 elif t.tracked:
1570 if not checklink and t.has_fallback_symlink:
1575 if not checklink and t.has_fallback_symlink:
1571 # If the file system does not support symlink, the mode
1576 # If the file system does not support symlink, the mode
1572 # might not be correctly stored in the dirstate, so do not
1577 # might not be correctly stored in the dirstate, so do not
1573 # trust it.
1578 # trust it.
1574 ladd(fn)
1579 ladd(fn)
1575 elif not checkexec and t.has_fallback_exec:
1580 elif not checkexec and t.has_fallback_exec:
1576 # If the file system does not support exec bits, the mode
1581 # If the file system does not support exec bits, the mode
1577 # might not be correctly stored in the dirstate, so do not
1582 # might not be correctly stored in the dirstate, so do not
1578 # trust it.
1583 # trust it.
1579 ladd(fn)
1584 ladd(fn)
1580 elif (
1585 elif (
1581 size >= 0
1586 size >= 0
1582 and (
1587 and (
1583 (size != st.st_size and size != st.st_size & _rangemask)
1588 (size != st.st_size and size != st.st_size & _rangemask)
1584 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1589 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1585 )
1590 )
1586 or fn in copymap
1591 or fn in copymap
1587 ):
1592 ):
1588 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1593 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1589 # issue6456: Size returned may be longer due to
1594 # issue6456: Size returned may be longer due to
1590 # encryption on EXT-4 fscrypt, undecided.
1595 # encryption on EXT-4 fscrypt, undecided.
1591 ladd(fn)
1596 ladd(fn)
1592 else:
1597 else:
1593 madd(fn)
1598 madd(fn)
1594 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1599 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1595 # There might be a change in the future if for example the
1600 # There might be a change in the future if for example the
1596 # internal clock is off, but this is a case where the issues
1601 # internal clock is off, but this is a case where the issues
1597 # the user would face would be a lot worse and there is
1602 # the user would face would be a lot worse and there is
1598 # nothing we can really do.
1603 # nothing we can really do.
1599 ladd(fn)
1604 ladd(fn)
1600 elif listclean:
1605 elif listclean:
1601 cadd(fn)
1606 cadd(fn)
1602 status = scmutil.status(
1607 status = scmutil.status(
1603 modified, added, removed, deleted, unknown, ignored, clean
1608 modified, added, removed, deleted, unknown, ignored, clean
1604 )
1609 )
1605 return (lookup, status, mtime_boundary)
1610 return (lookup, status, mtime_boundary)
1606
1611
1607 def matches(self, match):
1612 def matches(self, match):
1608 """
1613 """
1609 return files in the dirstate (in whatever state) filtered by match
1614 return files in the dirstate (in whatever state) filtered by match
1610 """
1615 """
1611 dmap = self._map
1616 dmap = self._map
1612 if rustmod is not None:
1617 if rustmod is not None:
1613 dmap = self._map._map
1618 dmap = self._map._map
1614
1619
1615 if match.always():
1620 if match.always():
1616 return dmap.keys()
1621 return dmap.keys()
1617 files = match.files()
1622 files = match.files()
1618 if match.isexact():
1623 if match.isexact():
1619 # fast path -- filter the other way around, since typically files is
1624 # fast path -- filter the other way around, since typically files is
1620 # much smaller than dmap
1625 # much smaller than dmap
1621 return [f for f in files if f in dmap]
1626 return [f for f in files if f in dmap]
1622 if match.prefix() and all(fn in dmap for fn in files):
1627 if match.prefix() and all(fn in dmap for fn in files):
1623 # fast path -- all the values are known to be files, so just return
1628 # fast path -- all the values are known to be files, so just return
1624 # that
1629 # that
1625 return list(files)
1630 return list(files)
1626 return [f for f in dmap if match(f)]
1631 return [f for f in dmap if match(f)]
1627
1632
1628 def _actualfilename(self, tr):
1633 def _actualfilename(self, tr):
1629 if tr:
1634 if tr:
1630 return self._pendingfilename
1635 return self._pendingfilename
1631 else:
1636 else:
1632 return self._filename
1637 return self._filename
1633
1638
1634 def all_file_names(self):
1639 def all_file_names(self):
1635 """list all filename currently used by this dirstate
1640 """list all filename currently used by this dirstate
1636
1641
1637 This is only used to do `hg rollback` related backup in the transaction
1642 This is only used to do `hg rollback` related backup in the transaction
1638 """
1643 """
1639 if not self._opener.exists(self._filename):
1644 if not self._opener.exists(self._filename):
1640 # no data every written to disk yet
1645 # no data every written to disk yet
1641 return ()
1646 return ()
1642 elif self._use_dirstate_v2:
1647 elif self._use_dirstate_v2:
1643 return (
1648 return (
1644 self._filename,
1649 self._filename,
1645 self._map.docket.data_filename(),
1650 self._map.docket.data_filename(),
1646 )
1651 )
1647 else:
1652 else:
1648 return (self._filename,)
1653 return (self._filename,)
1649
1654
1650 def verify(self, m1, m2, p1, narrow_matcher=None):
1655 def verify(self, m1, m2, p1, narrow_matcher=None):
1651 """
1656 """
1652 check the dirstate contents against the parent manifest and yield errors
1657 check the dirstate contents against the parent manifest and yield errors
1653 """
1658 """
1654 missing_from_p1 = _(
1659 missing_from_p1 = _(
1655 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1660 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1656 )
1661 )
1657 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1662 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1658 missing_from_ps = _(
1663 missing_from_ps = _(
1659 b"%s marked as modified, but not in either manifest\n"
1664 b"%s marked as modified, but not in either manifest\n"
1660 )
1665 )
1661 missing_from_ds = _(
1666 missing_from_ds = _(
1662 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1667 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1663 )
1668 )
1664 for f, entry in self.items():
1669 for f, entry in self.items():
1665 if entry.p1_tracked:
1670 if entry.p1_tracked:
1666 if entry.modified and f not in m1 and f not in m2:
1671 if entry.modified and f not in m1 and f not in m2:
1667 yield missing_from_ps % f
1672 yield missing_from_ps % f
1668 elif f not in m1:
1673 elif f not in m1:
1669 yield missing_from_p1 % (f, node.short(p1))
1674 yield missing_from_p1 % (f, node.short(p1))
1670 if entry.added and f in m1:
1675 if entry.added and f in m1:
1671 yield unexpected_in_p1 % f
1676 yield unexpected_in_p1 % f
1672 for f in m1:
1677 for f in m1:
1673 if narrow_matcher is not None and not narrow_matcher(f):
1678 if narrow_matcher is not None and not narrow_matcher(f):
1674 continue
1679 continue
1675 entry = self.get_entry(f)
1680 entry = self.get_entry(f)
1676 if not entry.p1_tracked:
1681 if not entry.p1_tracked:
1677 yield missing_from_ds % (f, node.short(p1))
1682 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now