##// END OF EJS Templates
dirstate: introduce a check_invalidated decorator...
marmoute -
r51398:65943224 default
parent child Browse files
Show More
@@ -1,1672 +1,1684 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
70
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
73 such change.
74 """
75
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
82
83 return wrap
84
85
68 def requires_changing_parents(func):
86 def requires_changing_parents(func):
69 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
70 if not self.is_changing_parents:
88 if not self.is_changing_parents:
71 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
72 msg %= func.__name__
90 msg %= func.__name__
73 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
74 if self._invalidated_context:
75 msg = 'calling `%s` after the dirstate was invalidated'
76 raise error.ProgrammingError(msg)
77 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
78
93
79 return wrap
94 return check_invalidated(wrap)
80
95
81
96
82 def requires_changing_files(func):
97 def requires_changing_files(func):
83 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
84 if not self.is_changing_files:
99 if not self.is_changing_files:
85 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
86 msg %= func.__name__
101 msg %= func.__name__
87 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
88 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
89
104
90 return wrap
105 return check_invalidated(wrap)
91
106
92
107
93 def requires_changing_any(func):
108 def requires_changing_any(func):
94 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
95 if not self.is_changing_any:
110 if not self.is_changing_any:
96 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
97 msg %= func.__name__
112 msg %= func.__name__
98 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
99 if self._invalidated_context:
100 msg = 'calling `%s` after the dirstate was invalidated'
101 raise error.ProgrammingError(msg)
102 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
103
115
104 return wrap
116 return check_invalidated(wrap)
105
117
106
118
107 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
108 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
109 if self.is_changing_parents:
121 if self.is_changing_parents:
110 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
111 msg %= func.__name__
123 msg %= func.__name__
112 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
113 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
114
126
115 return wrap
127 return check_invalidated(wrap)
116
128
117
129
118 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
119 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
120
132
121
133
122 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
123 class dirstate:
135 class dirstate:
124 def __init__(
136 def __init__(
125 self,
137 self,
126 opener,
138 opener,
127 ui,
139 ui,
128 root,
140 root,
129 validate,
141 validate,
130 sparsematchfn,
142 sparsematchfn,
131 nodeconstants,
143 nodeconstants,
132 use_dirstate_v2,
144 use_dirstate_v2,
133 use_tracked_hint=False,
145 use_tracked_hint=False,
134 ):
146 ):
135 """Create a new dirstate object.
147 """Create a new dirstate object.
136
148
137 opener is an open()-like callable that can be used to open the
149 opener is an open()-like callable that can be used to open the
138 dirstate file; root is the root of the directory tracked by
150 dirstate file; root is the root of the directory tracked by
139 the dirstate.
151 the dirstate.
140 """
152 """
141 self._use_dirstate_v2 = use_dirstate_v2
153 self._use_dirstate_v2 = use_dirstate_v2
142 self._use_tracked_hint = use_tracked_hint
154 self._use_tracked_hint = use_tracked_hint
143 self._nodeconstants = nodeconstants
155 self._nodeconstants = nodeconstants
144 self._opener = opener
156 self._opener = opener
145 self._validate = validate
157 self._validate = validate
146 self._root = root
158 self._root = root
147 # Either build a sparse-matcher or None if sparse is disabled
159 # Either build a sparse-matcher or None if sparse is disabled
148 self._sparsematchfn = sparsematchfn
160 self._sparsematchfn = sparsematchfn
149 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
150 # UNC path pointing to root share (issue4557)
162 # UNC path pointing to root share (issue4557)
151 self._rootdir = pathutil.normasprefix(root)
163 self._rootdir = pathutil.normasprefix(root)
152 # True is any internal state may be different
164 # True is any internal state may be different
153 self._dirty = False
165 self._dirty = False
154 # True if the set of tracked file may be different
166 # True if the set of tracked file may be different
155 self._dirty_tracked_set = False
167 self._dirty_tracked_set = False
156 self._ui = ui
168 self._ui = ui
157 self._filecache = {}
169 self._filecache = {}
158 # nesting level of `changing_parents` context
170 # nesting level of `changing_parents` context
159 self._changing_level = 0
171 self._changing_level = 0
160 # the change currently underway
172 # the change currently underway
161 self._change_type = None
173 self._change_type = None
162 # True if the current dirstate changing operations have been
174 # True if the current dirstate changing operations have been
163 # invalidated (used to make sure all nested contexts have been exited)
175 # invalidated (used to make sure all nested contexts have been exited)
164 self._invalidated_context = False
176 self._invalidated_context = False
165 self._filename = b'dirstate'
177 self._filename = b'dirstate'
166 self._filename_th = b'dirstate-tracked-hint'
178 self._filename_th = b'dirstate-tracked-hint'
167 self._pendingfilename = b'%s.pending' % self._filename
179 self._pendingfilename = b'%s.pending' % self._filename
168 self._plchangecallbacks = {}
180 self._plchangecallbacks = {}
169 self._origpl = None
181 self._origpl = None
170 self._mapcls = dirstatemap.dirstatemap
182 self._mapcls = dirstatemap.dirstatemap
171 # Access and cache cwd early, so we don't access it for the first time
183 # Access and cache cwd early, so we don't access it for the first time
172 # after a working-copy update caused it to not exist (accessing it then
184 # after a working-copy update caused it to not exist (accessing it then
173 # raises an exception).
185 # raises an exception).
174 self._cwd
186 self._cwd
175
187
176 def prefetch_parents(self):
188 def prefetch_parents(self):
177 """make sure the parents are loaded
189 """make sure the parents are loaded
178
190
179 Used to avoid a race condition.
191 Used to avoid a race condition.
180 """
192 """
181 self._pl
193 self._pl
182
194
183 @contextlib.contextmanager
195 @contextlib.contextmanager
184 def _changing(self, repo, change_type):
196 def _changing(self, repo, change_type):
185 if repo.currentwlock() is None:
197 if repo.currentwlock() is None:
186 msg = b"trying to change the dirstate without holding the wlock"
198 msg = b"trying to change the dirstate without holding the wlock"
187 raise error.ProgrammingError(msg)
199 raise error.ProgrammingError(msg)
188 if self._invalidated_context:
200 if self._invalidated_context:
189 msg = "trying to use an invalidated dirstate before it has reset"
201 msg = "trying to use an invalidated dirstate before it has reset"
190 raise error.ProgrammingError(msg)
202 raise error.ProgrammingError(msg)
191
203
192 has_tr = repo.currenttransaction() is not None
204 has_tr = repo.currenttransaction() is not None
193 if not has_tr and self._changing_level == 0 and self._dirty:
205 if not has_tr and self._changing_level == 0 and self._dirty:
194 msg = "entering a changing context, but dirstate is already dirty"
206 msg = "entering a changing context, but dirstate is already dirty"
195 raise error.ProgrammingError(msg)
207 raise error.ProgrammingError(msg)
196
208
197 # different type of change are mutually exclusive
209 # different type of change are mutually exclusive
198 if self._change_type is None:
210 if self._change_type is None:
199 assert self._changing_level == 0
211 assert self._changing_level == 0
200 self._change_type = change_type
212 self._change_type = change_type
201 elif self._change_type != change_type:
213 elif self._change_type != change_type:
202 msg = (
214 msg = (
203 'trying to open "%s" dirstate-changing context while a "%s" is'
215 'trying to open "%s" dirstate-changing context while a "%s" is'
204 ' already open'
216 ' already open'
205 )
217 )
206 msg %= (change_type, self._change_type)
218 msg %= (change_type, self._change_type)
207 raise error.ProgrammingError(msg)
219 raise error.ProgrammingError(msg)
208 self._changing_level += 1
220 self._changing_level += 1
209 try:
221 try:
210 yield
222 yield
211 except: # re-raises
223 except: # re-raises
212 self.invalidate()
224 self.invalidate()
213 raise
225 raise
214 finally:
226 finally:
215 tr = repo.currenttransaction()
227 tr = repo.currenttransaction()
216 if self._changing_level > 0:
228 if self._changing_level > 0:
217 if self._invalidated_context:
229 if self._invalidated_context:
218 # make sure we invalidate anything an upper context might
230 # make sure we invalidate anything an upper context might
219 # have changed.
231 # have changed.
220 self.invalidate()
232 self.invalidate()
221 self._changing_level -= 1
233 self._changing_level -= 1
222 # The invalidation is complete once we exit the final context
234 # The invalidation is complete once we exit the final context
223 # manager
235 # manager
224 if self._changing_level <= 0:
236 if self._changing_level <= 0:
225 self._change_type = None
237 self._change_type = None
226 assert self._changing_level == 0
238 assert self._changing_level == 0
227 if self._invalidated_context:
239 if self._invalidated_context:
228 self._invalidated_context = False
240 self._invalidated_context = False
229 else:
241 else:
230 # When an exception occured, `_invalidated_context`
242 # When an exception occured, `_invalidated_context`
231 # would have been set to True by the `invalidate`
243 # would have been set to True by the `invalidate`
232 # call earlier.
244 # call earlier.
233 #
245 #
234 # We don't have more straightforward code, because the
246 # We don't have more straightforward code, because the
235 # Exception catching (and the associated `invalidate`
247 # Exception catching (and the associated `invalidate`
236 # calling) might have been called by a nested context
248 # calling) might have been called by a nested context
237 # instead of the top level one.
249 # instead of the top level one.
238 self.write(tr)
250 self.write(tr)
239 if has_tr != (tr is not None):
251 if has_tr != (tr is not None):
240 if has_tr:
252 if has_tr:
241 m = "transaction vanished while changing dirstate"
253 m = "transaction vanished while changing dirstate"
242 else:
254 else:
243 m = "transaction appeared while changing dirstate"
255 m = "transaction appeared while changing dirstate"
244 raise error.ProgrammingError(m)
256 raise error.ProgrammingError(m)
245
257
246 @contextlib.contextmanager
258 @contextlib.contextmanager
247 def changing_parents(self, repo):
259 def changing_parents(self, repo):
248 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
260 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
249 yield c
261 yield c
250
262
251 @contextlib.contextmanager
263 @contextlib.contextmanager
252 def changing_files(self, repo):
264 def changing_files(self, repo):
253 with self._changing(repo, CHANGE_TYPE_FILES) as c:
265 with self._changing(repo, CHANGE_TYPE_FILES) as c:
254 yield c
266 yield c
255
267
256 # here to help migration to the new code
268 # here to help migration to the new code
257 def parentchange(self):
269 def parentchange(self):
258 msg = (
270 msg = (
259 "Mercurial 6.4 and later requires call to "
271 "Mercurial 6.4 and later requires call to "
260 "`dirstate.changing_parents(repo)`"
272 "`dirstate.changing_parents(repo)`"
261 )
273 )
262 raise error.ProgrammingError(msg)
274 raise error.ProgrammingError(msg)
263
275
264 @property
276 @property
265 def is_changing_any(self):
277 def is_changing_any(self):
266 """Returns true if the dirstate is in the middle of a set of changes.
278 """Returns true if the dirstate is in the middle of a set of changes.
267
279
268 This returns True for any kind of change.
280 This returns True for any kind of change.
269 """
281 """
270 return self._changing_level > 0
282 return self._changing_level > 0
271
283
272 def pendingparentchange(self):
284 def pendingparentchange(self):
273 return self.is_changing_parent()
285 return self.is_changing_parent()
274
286
275 def is_changing_parent(self):
287 def is_changing_parent(self):
276 """Returns true if the dirstate is in the middle of a set of changes
288 """Returns true if the dirstate is in the middle of a set of changes
277 that modify the dirstate parent.
289 that modify the dirstate parent.
278 """
290 """
279 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
291 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
280 return self.is_changing_parents
292 return self.is_changing_parents
281
293
282 @property
294 @property
283 def is_changing_parents(self):
295 def is_changing_parents(self):
284 """Returns true if the dirstate is in the middle of a set of changes
296 """Returns true if the dirstate is in the middle of a set of changes
285 that modify the dirstate parent.
297 that modify the dirstate parent.
286 """
298 """
287 if self._changing_level <= 0:
299 if self._changing_level <= 0:
288 return False
300 return False
289 return self._change_type == CHANGE_TYPE_PARENTS
301 return self._change_type == CHANGE_TYPE_PARENTS
290
302
291 @property
303 @property
292 def is_changing_files(self):
304 def is_changing_files(self):
293 """Returns true if the dirstate is in the middle of a set of changes
305 """Returns true if the dirstate is in the middle of a set of changes
294 that modify the files tracked or their sources.
306 that modify the files tracked or their sources.
295 """
307 """
296 if self._changing_level <= 0:
308 if self._changing_level <= 0:
297 return False
309 return False
298 return self._change_type == CHANGE_TYPE_FILES
310 return self._change_type == CHANGE_TYPE_FILES
299
311
300 @propertycache
312 @propertycache
301 def _map(self):
313 def _map(self):
302 """Return the dirstate contents (see documentation for dirstatemap)."""
314 """Return the dirstate contents (see documentation for dirstatemap)."""
303 self._map = self._mapcls(
315 self._map = self._mapcls(
304 self._ui,
316 self._ui,
305 self._opener,
317 self._opener,
306 self._root,
318 self._root,
307 self._nodeconstants,
319 self._nodeconstants,
308 self._use_dirstate_v2,
320 self._use_dirstate_v2,
309 )
321 )
310 return self._map
322 return self._map
311
323
312 @property
324 @property
313 def _sparsematcher(self):
325 def _sparsematcher(self):
314 """The matcher for the sparse checkout.
326 """The matcher for the sparse checkout.
315
327
316 The working directory may not include every file from a manifest. The
328 The working directory may not include every file from a manifest. The
317 matcher obtained by this property will match a path if it is to be
329 matcher obtained by this property will match a path if it is to be
318 included in the working directory.
330 included in the working directory.
319
331
320 When sparse if disabled, return None.
332 When sparse if disabled, return None.
321 """
333 """
322 if self._sparsematchfn is None:
334 if self._sparsematchfn is None:
323 return None
335 return None
324 # TODO there is potential to cache this property. For now, the matcher
336 # TODO there is potential to cache this property. For now, the matcher
325 # is resolved on every access. (But the called function does use a
337 # is resolved on every access. (But the called function does use a
326 # cache to keep the lookup fast.)
338 # cache to keep the lookup fast.)
327 return self._sparsematchfn()
339 return self._sparsematchfn()
328
340
329 @repocache(b'branch')
341 @repocache(b'branch')
330 def _branch(self):
342 def _branch(self):
331 try:
343 try:
332 return self._opener.read(b"branch").strip() or b"default"
344 return self._opener.read(b"branch").strip() or b"default"
333 except FileNotFoundError:
345 except FileNotFoundError:
334 return b"default"
346 return b"default"
335
347
336 @property
348 @property
337 def _pl(self):
349 def _pl(self):
338 return self._map.parents()
350 return self._map.parents()
339
351
340 def hasdir(self, d):
352 def hasdir(self, d):
341 return self._map.hastrackeddir(d)
353 return self._map.hastrackeddir(d)
342
354
343 @rootcache(b'.hgignore')
355 @rootcache(b'.hgignore')
344 def _ignore(self):
356 def _ignore(self):
345 files = self._ignorefiles()
357 files = self._ignorefiles()
346 if not files:
358 if not files:
347 return matchmod.never()
359 return matchmod.never()
348
360
349 pats = [b'include:%s' % f for f in files]
361 pats = [b'include:%s' % f for f in files]
350 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
362 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
351
363
352 @propertycache
364 @propertycache
353 def _slash(self):
365 def _slash(self):
354 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
366 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
355
367
356 @propertycache
368 @propertycache
357 def _checklink(self):
369 def _checklink(self):
358 return util.checklink(self._root)
370 return util.checklink(self._root)
359
371
360 @propertycache
372 @propertycache
361 def _checkexec(self):
373 def _checkexec(self):
362 return bool(util.checkexec(self._root))
374 return bool(util.checkexec(self._root))
363
375
364 @propertycache
376 @propertycache
365 def _checkcase(self):
377 def _checkcase(self):
366 return not util.fscasesensitive(self._join(b'.hg'))
378 return not util.fscasesensitive(self._join(b'.hg'))
367
379
368 def _join(self, f):
380 def _join(self, f):
369 # much faster than os.path.join()
381 # much faster than os.path.join()
370 # it's safe because f is always a relative path
382 # it's safe because f is always a relative path
371 return self._rootdir + f
383 return self._rootdir + f
372
384
373 def flagfunc(self, buildfallback):
385 def flagfunc(self, buildfallback):
374 """build a callable that returns flags associated with a filename
386 """build a callable that returns flags associated with a filename
375
387
376 The information is extracted from three possible layers:
388 The information is extracted from three possible layers:
377 1. the file system if it supports the information
389 1. the file system if it supports the information
378 2. the "fallback" information stored in the dirstate if any
390 2. the "fallback" information stored in the dirstate if any
379 3. a more expensive mechanism inferring the flags from the parents.
391 3. a more expensive mechanism inferring the flags from the parents.
380 """
392 """
381
393
382 # small hack to cache the result of buildfallback()
394 # small hack to cache the result of buildfallback()
383 fallback_func = []
395 fallback_func = []
384
396
385 def get_flags(x):
397 def get_flags(x):
386 entry = None
398 entry = None
387 fallback_value = None
399 fallback_value = None
388 try:
400 try:
389 st = os.lstat(self._join(x))
401 st = os.lstat(self._join(x))
390 except OSError:
402 except OSError:
391 return b''
403 return b''
392
404
393 if self._checklink:
405 if self._checklink:
394 if util.statislink(st):
406 if util.statislink(st):
395 return b'l'
407 return b'l'
396 else:
408 else:
397 entry = self.get_entry(x)
409 entry = self.get_entry(x)
398 if entry.has_fallback_symlink:
410 if entry.has_fallback_symlink:
399 if entry.fallback_symlink:
411 if entry.fallback_symlink:
400 return b'l'
412 return b'l'
401 else:
413 else:
402 if not fallback_func:
414 if not fallback_func:
403 fallback_func.append(buildfallback())
415 fallback_func.append(buildfallback())
404 fallback_value = fallback_func[0](x)
416 fallback_value = fallback_func[0](x)
405 if b'l' in fallback_value:
417 if b'l' in fallback_value:
406 return b'l'
418 return b'l'
407
419
408 if self._checkexec:
420 if self._checkexec:
409 if util.statisexec(st):
421 if util.statisexec(st):
410 return b'x'
422 return b'x'
411 else:
423 else:
412 if entry is None:
424 if entry is None:
413 entry = self.get_entry(x)
425 entry = self.get_entry(x)
414 if entry.has_fallback_exec:
426 if entry.has_fallback_exec:
415 if entry.fallback_exec:
427 if entry.fallback_exec:
416 return b'x'
428 return b'x'
417 else:
429 else:
418 if fallback_value is None:
430 if fallback_value is None:
419 if not fallback_func:
431 if not fallback_func:
420 fallback_func.append(buildfallback())
432 fallback_func.append(buildfallback())
421 fallback_value = fallback_func[0](x)
433 fallback_value = fallback_func[0](x)
422 if b'x' in fallback_value:
434 if b'x' in fallback_value:
423 return b'x'
435 return b'x'
424 return b''
436 return b''
425
437
426 return get_flags
438 return get_flags
427
439
428 @propertycache
440 @propertycache
429 def _cwd(self):
441 def _cwd(self):
430 # internal config: ui.forcecwd
442 # internal config: ui.forcecwd
431 forcecwd = self._ui.config(b'ui', b'forcecwd')
443 forcecwd = self._ui.config(b'ui', b'forcecwd')
432 if forcecwd:
444 if forcecwd:
433 return forcecwd
445 return forcecwd
434 return encoding.getcwd()
446 return encoding.getcwd()
435
447
436 def getcwd(self):
448 def getcwd(self):
437 """Return the path from which a canonical path is calculated.
449 """Return the path from which a canonical path is calculated.
438
450
439 This path should be used to resolve file patterns or to convert
451 This path should be used to resolve file patterns or to convert
440 canonical paths back to file paths for display. It shouldn't be
452 canonical paths back to file paths for display. It shouldn't be
441 used to get real file paths. Use vfs functions instead.
453 used to get real file paths. Use vfs functions instead.
442 """
454 """
443 cwd = self._cwd
455 cwd = self._cwd
444 if cwd == self._root:
456 if cwd == self._root:
445 return b''
457 return b''
446 # self._root ends with a path separator if self._root is '/' or 'C:\'
458 # self._root ends with a path separator if self._root is '/' or 'C:\'
447 rootsep = self._root
459 rootsep = self._root
448 if not util.endswithsep(rootsep):
460 if not util.endswithsep(rootsep):
449 rootsep += pycompat.ossep
461 rootsep += pycompat.ossep
450 if cwd.startswith(rootsep):
462 if cwd.startswith(rootsep):
451 return cwd[len(rootsep) :]
463 return cwd[len(rootsep) :]
452 else:
464 else:
453 # we're outside the repo. return an absolute path.
465 # we're outside the repo. return an absolute path.
454 return cwd
466 return cwd
455
467
456 def pathto(self, f, cwd=None):
468 def pathto(self, f, cwd=None):
457 if cwd is None:
469 if cwd is None:
458 cwd = self.getcwd()
470 cwd = self.getcwd()
459 path = util.pathto(self._root, cwd, f)
471 path = util.pathto(self._root, cwd, f)
460 if self._slash:
472 if self._slash:
461 return util.pconvert(path)
473 return util.pconvert(path)
462 return path
474 return path
463
475
464 def get_entry(self, path):
476 def get_entry(self, path):
465 """return a DirstateItem for the associated path"""
477 """return a DirstateItem for the associated path"""
466 entry = self._map.get(path)
478 entry = self._map.get(path)
467 if entry is None:
479 if entry is None:
468 return DirstateItem()
480 return DirstateItem()
469 return entry
481 return entry
470
482
471 def __contains__(self, key):
483 def __contains__(self, key):
472 return key in self._map
484 return key in self._map
473
485
474 def __iter__(self):
486 def __iter__(self):
475 return iter(sorted(self._map))
487 return iter(sorted(self._map))
476
488
477 def items(self):
489 def items(self):
478 return self._map.items()
490 return self._map.items()
479
491
480 iteritems = items
492 iteritems = items
481
493
482 def parents(self):
494 def parents(self):
483 return [self._validate(p) for p in self._pl]
495 return [self._validate(p) for p in self._pl]
484
496
485 def p1(self):
497 def p1(self):
486 return self._validate(self._pl[0])
498 return self._validate(self._pl[0])
487
499
488 def p2(self):
500 def p2(self):
489 return self._validate(self._pl[1])
501 return self._validate(self._pl[1])
490
502
491 @property
503 @property
492 def in_merge(self):
504 def in_merge(self):
493 """True if a merge is in progress"""
505 """True if a merge is in progress"""
494 return self._pl[1] != self._nodeconstants.nullid
506 return self._pl[1] != self._nodeconstants.nullid
495
507
496 def branch(self):
508 def branch(self):
497 return encoding.tolocal(self._branch)
509 return encoding.tolocal(self._branch)
498
510
499 @requires_changing_parents
511 @requires_changing_parents
500 def setparents(self, p1, p2=None):
512 def setparents(self, p1, p2=None):
501 """Set dirstate parents to p1 and p2.
513 """Set dirstate parents to p1 and p2.
502
514
503 When moving from two parents to one, "merged" entries a
515 When moving from two parents to one, "merged" entries a
504 adjusted to normal and previous copy records discarded and
516 adjusted to normal and previous copy records discarded and
505 returned by the call.
517 returned by the call.
506
518
507 See localrepo.setparents()
519 See localrepo.setparents()
508 """
520 """
509 if p2 is None:
521 if p2 is None:
510 p2 = self._nodeconstants.nullid
522 p2 = self._nodeconstants.nullid
511 if self._changing_level == 0:
523 if self._changing_level == 0:
512 raise ValueError(
524 raise ValueError(
513 b"cannot set dirstate parent outside of "
525 b"cannot set dirstate parent outside of "
514 b"dirstate.changing_parents context manager"
526 b"dirstate.changing_parents context manager"
515 )
527 )
516
528
517 self._dirty = True
529 self._dirty = True
518 oldp2 = self._pl[1]
530 oldp2 = self._pl[1]
519 if self._origpl is None:
531 if self._origpl is None:
520 self._origpl = self._pl
532 self._origpl = self._pl
521 nullid = self._nodeconstants.nullid
533 nullid = self._nodeconstants.nullid
522 # True if we need to fold p2 related state back to a linear case
534 # True if we need to fold p2 related state back to a linear case
523 fold_p2 = oldp2 != nullid and p2 == nullid
535 fold_p2 = oldp2 != nullid and p2 == nullid
524 return self._map.setparents(p1, p2, fold_p2=fold_p2)
536 return self._map.setparents(p1, p2, fold_p2=fold_p2)
525
537
526 def setbranch(self, branch):
538 def setbranch(self, branch):
527 self.__class__._branch.set(self, encoding.fromlocal(branch))
539 self.__class__._branch.set(self, encoding.fromlocal(branch))
528 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
540 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
529 try:
541 try:
530 f.write(self._branch + b'\n')
542 f.write(self._branch + b'\n')
531 f.close()
543 f.close()
532
544
533 # make sure filecache has the correct stat info for _branch after
545 # make sure filecache has the correct stat info for _branch after
534 # replacing the underlying file
546 # replacing the underlying file
535 ce = self._filecache[b'_branch']
547 ce = self._filecache[b'_branch']
536 if ce:
548 if ce:
537 ce.refresh()
549 ce.refresh()
538 except: # re-raises
550 except: # re-raises
539 f.discard()
551 f.discard()
540 raise
552 raise
541
553
542 def invalidate(self):
554 def invalidate(self):
543 """Causes the next access to reread the dirstate.
555 """Causes the next access to reread the dirstate.
544
556
545 This is different from localrepo.invalidatedirstate() because it always
557 This is different from localrepo.invalidatedirstate() because it always
546 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
558 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
547 check whether the dirstate has changed before rereading it."""
559 check whether the dirstate has changed before rereading it."""
548
560
549 for a in ("_map", "_branch", "_ignore"):
561 for a in ("_map", "_branch", "_ignore"):
550 if a in self.__dict__:
562 if a in self.__dict__:
551 delattr(self, a)
563 delattr(self, a)
552 self._dirty = False
564 self._dirty = False
553 self._dirty_tracked_set = False
565 self._dirty_tracked_set = False
554 self._invalidated_context = self._changing_level > 0
566 self._invalidated_context = self._changing_level > 0
555 self._origpl = None
567 self._origpl = None
556
568
557 @requires_changing_any
569 @requires_changing_any
558 def copy(self, source, dest):
570 def copy(self, source, dest):
559 """Mark dest as a copy of source. Unmark dest if source is None."""
571 """Mark dest as a copy of source. Unmark dest if source is None."""
560 if source == dest:
572 if source == dest:
561 return
573 return
562 self._dirty = True
574 self._dirty = True
563 if source is not None:
575 if source is not None:
564 self._check_sparse(source)
576 self._check_sparse(source)
565 self._map.copymap[dest] = source
577 self._map.copymap[dest] = source
566 else:
578 else:
567 self._map.copymap.pop(dest, None)
579 self._map.copymap.pop(dest, None)
568
580
569 def copied(self, file):
581 def copied(self, file):
570 return self._map.copymap.get(file, None)
582 return self._map.copymap.get(file, None)
571
583
572 def copies(self):
584 def copies(self):
573 return self._map.copymap
585 return self._map.copymap
574
586
575 @requires_changing_files
587 @requires_changing_files
576 def set_tracked(self, filename, reset_copy=False):
588 def set_tracked(self, filename, reset_copy=False):
577 """a "public" method for generic code to mark a file as tracked
589 """a "public" method for generic code to mark a file as tracked
578
590
579 This function is to be called outside of "update/merge" case. For
591 This function is to be called outside of "update/merge" case. For
580 example by a command like `hg add X`.
592 example by a command like `hg add X`.
581
593
582 if reset_copy is set, any existing copy information will be dropped.
594 if reset_copy is set, any existing copy information will be dropped.
583
595
584 return True the file was previously untracked, False otherwise.
596 return True the file was previously untracked, False otherwise.
585 """
597 """
586 self._dirty = True
598 self._dirty = True
587 entry = self._map.get(filename)
599 entry = self._map.get(filename)
588 if entry is None or not entry.tracked:
600 if entry is None or not entry.tracked:
589 self._check_new_tracked_filename(filename)
601 self._check_new_tracked_filename(filename)
590 pre_tracked = self._map.set_tracked(filename)
602 pre_tracked = self._map.set_tracked(filename)
591 if reset_copy:
603 if reset_copy:
592 self._map.copymap.pop(filename, None)
604 self._map.copymap.pop(filename, None)
593 if pre_tracked:
605 if pre_tracked:
594 self._dirty_tracked_set = True
606 self._dirty_tracked_set = True
595 return pre_tracked
607 return pre_tracked
596
608
597 @requires_changing_files
609 @requires_changing_files
598 def set_untracked(self, filename):
610 def set_untracked(self, filename):
599 """a "public" method for generic code to mark a file as untracked
611 """a "public" method for generic code to mark a file as untracked
600
612
601 This function is to be called outside of "update/merge" case. For
613 This function is to be called outside of "update/merge" case. For
602 example by a command like `hg remove X`.
614 example by a command like `hg remove X`.
603
615
604 return True the file was previously tracked, False otherwise.
616 return True the file was previously tracked, False otherwise.
605 """
617 """
606 ret = self._map.set_untracked(filename)
618 ret = self._map.set_untracked(filename)
607 if ret:
619 if ret:
608 self._dirty = True
620 self._dirty = True
609 self._dirty_tracked_set = True
621 self._dirty_tracked_set = True
610 return ret
622 return ret
611
623
612 @requires_not_changing_parents
624 @requires_not_changing_parents
613 def set_clean(self, filename, parentfiledata):
625 def set_clean(self, filename, parentfiledata):
614 """record that the current state of the file on disk is known to be clean"""
626 """record that the current state of the file on disk is known to be clean"""
615 self._dirty = True
627 self._dirty = True
616 if not self._map[filename].tracked:
628 if not self._map[filename].tracked:
617 self._check_new_tracked_filename(filename)
629 self._check_new_tracked_filename(filename)
618 (mode, size, mtime) = parentfiledata
630 (mode, size, mtime) = parentfiledata
619 self._map.set_clean(filename, mode, size, mtime)
631 self._map.set_clean(filename, mode, size, mtime)
620
632
621 @requires_not_changing_parents
633 @requires_not_changing_parents
622 def set_possibly_dirty(self, filename):
634 def set_possibly_dirty(self, filename):
623 """record that the current state of the file on disk is unknown"""
635 """record that the current state of the file on disk is unknown"""
624 self._dirty = True
636 self._dirty = True
625 self._map.set_possibly_dirty(filename)
637 self._map.set_possibly_dirty(filename)
626
638
627 @requires_changing_parents
639 @requires_changing_parents
628 def update_file_p1(
640 def update_file_p1(
629 self,
641 self,
630 filename,
642 filename,
631 p1_tracked,
643 p1_tracked,
632 ):
644 ):
633 """Set a file as tracked in the parent (or not)
645 """Set a file as tracked in the parent (or not)
634
646
635 This is to be called when adjust the dirstate to a new parent after an history
647 This is to be called when adjust the dirstate to a new parent after an history
636 rewriting operation.
648 rewriting operation.
637
649
638 It should not be called during a merge (p2 != nullid) and only within
650 It should not be called during a merge (p2 != nullid) and only within
639 a `with dirstate.changing_parents(repo):` context.
651 a `with dirstate.changing_parents(repo):` context.
640 """
652 """
641 if self.in_merge:
653 if self.in_merge:
642 msg = b'update_file_reference should not be called when merging'
654 msg = b'update_file_reference should not be called when merging'
643 raise error.ProgrammingError(msg)
655 raise error.ProgrammingError(msg)
644 entry = self._map.get(filename)
656 entry = self._map.get(filename)
645 if entry is None:
657 if entry is None:
646 wc_tracked = False
658 wc_tracked = False
647 else:
659 else:
648 wc_tracked = entry.tracked
660 wc_tracked = entry.tracked
649 if not (p1_tracked or wc_tracked):
661 if not (p1_tracked or wc_tracked):
650 # the file is no longer relevant to anyone
662 # the file is no longer relevant to anyone
651 if self._map.get(filename) is not None:
663 if self._map.get(filename) is not None:
652 self._map.reset_state(filename)
664 self._map.reset_state(filename)
653 self._dirty = True
665 self._dirty = True
654 elif (not p1_tracked) and wc_tracked:
666 elif (not p1_tracked) and wc_tracked:
655 if entry is not None and entry.added:
667 if entry is not None and entry.added:
656 return # avoid dropping copy information (maybe?)
668 return # avoid dropping copy information (maybe?)
657
669
658 self._map.reset_state(
670 self._map.reset_state(
659 filename,
671 filename,
660 wc_tracked,
672 wc_tracked,
661 p1_tracked,
673 p1_tracked,
662 # the underlying reference might have changed, we will have to
674 # the underlying reference might have changed, we will have to
663 # check it.
675 # check it.
664 has_meaningful_mtime=False,
676 has_meaningful_mtime=False,
665 )
677 )
666
678
667 @requires_changing_parents
679 @requires_changing_parents
668 def update_file(
680 def update_file(
669 self,
681 self,
670 filename,
682 filename,
671 wc_tracked,
683 wc_tracked,
672 p1_tracked,
684 p1_tracked,
673 p2_info=False,
685 p2_info=False,
674 possibly_dirty=False,
686 possibly_dirty=False,
675 parentfiledata=None,
687 parentfiledata=None,
676 ):
688 ):
677 """update the information about a file in the dirstate
689 """update the information about a file in the dirstate
678
690
679 This is to be called when the direstates parent changes to keep track
691 This is to be called when the direstates parent changes to keep track
680 of what is the file situation in regards to the working copy and its parent.
692 of what is the file situation in regards to the working copy and its parent.
681
693
682 This function must be called within a `dirstate.changing_parents` context.
694 This function must be called within a `dirstate.changing_parents` context.
683
695
684 note: the API is at an early stage and we might need to adjust it
696 note: the API is at an early stage and we might need to adjust it
685 depending of what information ends up being relevant and useful to
697 depending of what information ends up being relevant and useful to
686 other processing.
698 other processing.
687 """
699 """
688 self._update_file(
700 self._update_file(
689 filename=filename,
701 filename=filename,
690 wc_tracked=wc_tracked,
702 wc_tracked=wc_tracked,
691 p1_tracked=p1_tracked,
703 p1_tracked=p1_tracked,
692 p2_info=p2_info,
704 p2_info=p2_info,
693 possibly_dirty=possibly_dirty,
705 possibly_dirty=possibly_dirty,
694 parentfiledata=parentfiledata,
706 parentfiledata=parentfiledata,
695 )
707 )
696
708
697 # XXX since this make the dirstate dirty, we should enforce that it is done
709 # XXX since this make the dirstate dirty, we should enforce that it is done
698 # withing an appropriate change-context that scope the change and ensure it
710 # withing an appropriate change-context that scope the change and ensure it
699 # eventually get written on disk (or rolled back)
711 # eventually get written on disk (or rolled back)
700 def hacky_extension_update_file(self, *args, **kwargs):
712 def hacky_extension_update_file(self, *args, **kwargs):
701 """NEVER USE THIS, YOU DO NOT NEED IT
713 """NEVER USE THIS, YOU DO NOT NEED IT
702
714
703 This function is a variant of "update_file" to be called by a small set
715 This function is a variant of "update_file" to be called by a small set
704 of extensions, it also adjust the internal state of file, but can be
716 of extensions, it also adjust the internal state of file, but can be
705 called outside an `changing_parents` context.
717 called outside an `changing_parents` context.
706
718
707 A very small number of extension meddle with the working copy content
719 A very small number of extension meddle with the working copy content
708 in a way that requires to adjust the dirstate accordingly. At the time
720 in a way that requires to adjust the dirstate accordingly. At the time
709 this command is written they are :
721 this command is written they are :
710 - keyword,
722 - keyword,
711 - largefile,
723 - largefile,
712 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
724 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
713
725
714 This function could probably be replaced by more semantic one (like
726 This function could probably be replaced by more semantic one (like
715 "adjust expected size" or "always revalidate file content", etc)
727 "adjust expected size" or "always revalidate file content", etc)
716 however at the time where this is writen, this is too much of a detour
728 however at the time where this is writen, this is too much of a detour
717 to be considered.
729 to be considered.
718 """
730 """
719 self._update_file(
731 self._update_file(
720 *args,
732 *args,
721 **kwargs,
733 **kwargs,
722 )
734 )
723
735
724 def _update_file(
736 def _update_file(
725 self,
737 self,
726 filename,
738 filename,
727 wc_tracked,
739 wc_tracked,
728 p1_tracked,
740 p1_tracked,
729 p2_info=False,
741 p2_info=False,
730 possibly_dirty=False,
742 possibly_dirty=False,
731 parentfiledata=None,
743 parentfiledata=None,
732 ):
744 ):
733
745
734 # note: I do not think we need to double check name clash here since we
746 # note: I do not think we need to double check name clash here since we
735 # are in a update/merge case that should already have taken care of
747 # are in a update/merge case that should already have taken care of
736 # this. The test agrees
748 # this. The test agrees
737
749
738 self._dirty = True
750 self._dirty = True
739 old_entry = self._map.get(filename)
751 old_entry = self._map.get(filename)
740 if old_entry is None:
752 if old_entry is None:
741 prev_tracked = False
753 prev_tracked = False
742 else:
754 else:
743 prev_tracked = old_entry.tracked
755 prev_tracked = old_entry.tracked
744 if prev_tracked != wc_tracked:
756 if prev_tracked != wc_tracked:
745 self._dirty_tracked_set = True
757 self._dirty_tracked_set = True
746
758
747 self._map.reset_state(
759 self._map.reset_state(
748 filename,
760 filename,
749 wc_tracked,
761 wc_tracked,
750 p1_tracked,
762 p1_tracked,
751 p2_info=p2_info,
763 p2_info=p2_info,
752 has_meaningful_mtime=not possibly_dirty,
764 has_meaningful_mtime=not possibly_dirty,
753 parentfiledata=parentfiledata,
765 parentfiledata=parentfiledata,
754 )
766 )
755
767
756 def _check_new_tracked_filename(self, filename):
768 def _check_new_tracked_filename(self, filename):
757 scmutil.checkfilename(filename)
769 scmutil.checkfilename(filename)
758 if self._map.hastrackeddir(filename):
770 if self._map.hastrackeddir(filename):
759 msg = _(b'directory %r already in dirstate')
771 msg = _(b'directory %r already in dirstate')
760 msg %= pycompat.bytestr(filename)
772 msg %= pycompat.bytestr(filename)
761 raise error.Abort(msg)
773 raise error.Abort(msg)
762 # shadows
774 # shadows
763 for d in pathutil.finddirs(filename):
775 for d in pathutil.finddirs(filename):
764 if self._map.hastrackeddir(d):
776 if self._map.hastrackeddir(d):
765 break
777 break
766 entry = self._map.get(d)
778 entry = self._map.get(d)
767 if entry is not None and not entry.removed:
779 if entry is not None and not entry.removed:
768 msg = _(b'file %r in dirstate clashes with %r')
780 msg = _(b'file %r in dirstate clashes with %r')
769 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
781 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
770 raise error.Abort(msg)
782 raise error.Abort(msg)
771 self._check_sparse(filename)
783 self._check_sparse(filename)
772
784
773 def _check_sparse(self, filename):
785 def _check_sparse(self, filename):
774 """Check that a filename is inside the sparse profile"""
786 """Check that a filename is inside the sparse profile"""
775 sparsematch = self._sparsematcher
787 sparsematch = self._sparsematcher
776 if sparsematch is not None and not sparsematch.always():
788 if sparsematch is not None and not sparsematch.always():
777 if not sparsematch(filename):
789 if not sparsematch(filename):
778 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
790 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
779 hint = _(
791 hint = _(
780 b'include file with `hg debugsparse --include <pattern>` or use '
792 b'include file with `hg debugsparse --include <pattern>` or use '
781 b'`hg add -s <file>` to include file directory while adding'
793 b'`hg add -s <file>` to include file directory while adding'
782 )
794 )
783 raise error.Abort(msg % filename, hint=hint)
795 raise error.Abort(msg % filename, hint=hint)
784
796
785 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
797 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
786 if exists is None:
798 if exists is None:
787 exists = os.path.lexists(os.path.join(self._root, path))
799 exists = os.path.lexists(os.path.join(self._root, path))
788 if not exists:
800 if not exists:
789 # Maybe a path component exists
801 # Maybe a path component exists
790 if not ignoremissing and b'/' in path:
802 if not ignoremissing and b'/' in path:
791 d, f = path.rsplit(b'/', 1)
803 d, f = path.rsplit(b'/', 1)
792 d = self._normalize(d, False, ignoremissing, None)
804 d = self._normalize(d, False, ignoremissing, None)
793 folded = d + b"/" + f
805 folded = d + b"/" + f
794 else:
806 else:
795 # No path components, preserve original case
807 # No path components, preserve original case
796 folded = path
808 folded = path
797 else:
809 else:
798 # recursively normalize leading directory components
810 # recursively normalize leading directory components
799 # against dirstate
811 # against dirstate
800 if b'/' in normed:
812 if b'/' in normed:
801 d, f = normed.rsplit(b'/', 1)
813 d, f = normed.rsplit(b'/', 1)
802 d = self._normalize(d, False, ignoremissing, True)
814 d = self._normalize(d, False, ignoremissing, True)
803 r = self._root + b"/" + d
815 r = self._root + b"/" + d
804 folded = d + b"/" + util.fspath(f, r)
816 folded = d + b"/" + util.fspath(f, r)
805 else:
817 else:
806 folded = util.fspath(normed, self._root)
818 folded = util.fspath(normed, self._root)
807 storemap[normed] = folded
819 storemap[normed] = folded
808
820
809 return folded
821 return folded
810
822
811 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
823 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
812 normed = util.normcase(path)
824 normed = util.normcase(path)
813 folded = self._map.filefoldmap.get(normed, None)
825 folded = self._map.filefoldmap.get(normed, None)
814 if folded is None:
826 if folded is None:
815 if isknown:
827 if isknown:
816 folded = path
828 folded = path
817 else:
829 else:
818 folded = self._discoverpath(
830 folded = self._discoverpath(
819 path, normed, ignoremissing, exists, self._map.filefoldmap
831 path, normed, ignoremissing, exists, self._map.filefoldmap
820 )
832 )
821 return folded
833 return folded
822
834
823 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
835 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
824 normed = util.normcase(path)
836 normed = util.normcase(path)
825 folded = self._map.filefoldmap.get(normed, None)
837 folded = self._map.filefoldmap.get(normed, None)
826 if folded is None:
838 if folded is None:
827 folded = self._map.dirfoldmap.get(normed, None)
839 folded = self._map.dirfoldmap.get(normed, None)
828 if folded is None:
840 if folded is None:
829 if isknown:
841 if isknown:
830 folded = path
842 folded = path
831 else:
843 else:
832 # store discovered result in dirfoldmap so that future
844 # store discovered result in dirfoldmap so that future
833 # normalizefile calls don't start matching directories
845 # normalizefile calls don't start matching directories
834 folded = self._discoverpath(
846 folded = self._discoverpath(
835 path, normed, ignoremissing, exists, self._map.dirfoldmap
847 path, normed, ignoremissing, exists, self._map.dirfoldmap
836 )
848 )
837 return folded
849 return folded
838
850
839 def normalize(self, path, isknown=False, ignoremissing=False):
851 def normalize(self, path, isknown=False, ignoremissing=False):
840 """
852 """
841 normalize the case of a pathname when on a casefolding filesystem
853 normalize the case of a pathname when on a casefolding filesystem
842
854
843 isknown specifies whether the filename came from walking the
855 isknown specifies whether the filename came from walking the
844 disk, to avoid extra filesystem access.
856 disk, to avoid extra filesystem access.
845
857
846 If ignoremissing is True, missing path are returned
858 If ignoremissing is True, missing path are returned
847 unchanged. Otherwise, we try harder to normalize possibly
859 unchanged. Otherwise, we try harder to normalize possibly
848 existing path components.
860 existing path components.
849
861
850 The normalized case is determined based on the following precedence:
862 The normalized case is determined based on the following precedence:
851
863
852 - version of name already stored in the dirstate
864 - version of name already stored in the dirstate
853 - version of name stored on disk
865 - version of name stored on disk
854 - version provided via command arguments
866 - version provided via command arguments
855 """
867 """
856
868
857 if self._checkcase:
869 if self._checkcase:
858 return self._normalize(path, isknown, ignoremissing)
870 return self._normalize(path, isknown, ignoremissing)
859 return path
871 return path
860
872
861 # XXX this method is barely used, as a result:
873 # XXX this method is barely used, as a result:
862 # - its semantic is unclear
874 # - its semantic is unclear
863 # - do we really needs it ?
875 # - do we really needs it ?
864 @requires_changing_parents
876 @requires_changing_parents
865 def clear(self):
877 def clear(self):
866 self._map.clear()
878 self._map.clear()
867 self._dirty = True
879 self._dirty = True
868
880
869 @requires_changing_parents
881 @requires_changing_parents
870 def rebuild(self, parent, allfiles, changedfiles=None):
882 def rebuild(self, parent, allfiles, changedfiles=None):
871 matcher = self._sparsematcher
883 matcher = self._sparsematcher
872 if matcher is not None and not matcher.always():
884 if matcher is not None and not matcher.always():
873 # should not add non-matching files
885 # should not add non-matching files
874 allfiles = [f for f in allfiles if matcher(f)]
886 allfiles = [f for f in allfiles if matcher(f)]
875 if changedfiles:
887 if changedfiles:
876 changedfiles = [f for f in changedfiles if matcher(f)]
888 changedfiles = [f for f in changedfiles if matcher(f)]
877
889
878 if changedfiles is not None:
890 if changedfiles is not None:
879 # these files will be deleted from the dirstate when they are
891 # these files will be deleted from the dirstate when they are
880 # not found to be in allfiles
892 # not found to be in allfiles
881 dirstatefilestoremove = {f for f in self if not matcher(f)}
893 dirstatefilestoremove = {f for f in self if not matcher(f)}
882 changedfiles = dirstatefilestoremove.union(changedfiles)
894 changedfiles = dirstatefilestoremove.union(changedfiles)
883
895
884 if changedfiles is None:
896 if changedfiles is None:
885 # Rebuild entire dirstate
897 # Rebuild entire dirstate
886 to_lookup = allfiles
898 to_lookup = allfiles
887 to_drop = []
899 to_drop = []
888 self.clear()
900 self.clear()
889 elif len(changedfiles) < 10:
901 elif len(changedfiles) < 10:
890 # Avoid turning allfiles into a set, which can be expensive if it's
902 # Avoid turning allfiles into a set, which can be expensive if it's
891 # large.
903 # large.
892 to_lookup = []
904 to_lookup = []
893 to_drop = []
905 to_drop = []
894 for f in changedfiles:
906 for f in changedfiles:
895 if f in allfiles:
907 if f in allfiles:
896 to_lookup.append(f)
908 to_lookup.append(f)
897 else:
909 else:
898 to_drop.append(f)
910 to_drop.append(f)
899 else:
911 else:
900 changedfilesset = set(changedfiles)
912 changedfilesset = set(changedfiles)
901 to_lookup = changedfilesset & set(allfiles)
913 to_lookup = changedfilesset & set(allfiles)
902 to_drop = changedfilesset - to_lookup
914 to_drop = changedfilesset - to_lookup
903
915
904 if self._origpl is None:
916 if self._origpl is None:
905 self._origpl = self._pl
917 self._origpl = self._pl
906 self._map.setparents(parent, self._nodeconstants.nullid)
918 self._map.setparents(parent, self._nodeconstants.nullid)
907
919
908 for f in to_lookup:
920 for f in to_lookup:
909 if self.in_merge:
921 if self.in_merge:
910 self.set_tracked(f)
922 self.set_tracked(f)
911 else:
923 else:
912 self._map.reset_state(
924 self._map.reset_state(
913 f,
925 f,
914 wc_tracked=True,
926 wc_tracked=True,
915 p1_tracked=True,
927 p1_tracked=True,
916 )
928 )
917 for f in to_drop:
929 for f in to_drop:
918 self._map.reset_state(f)
930 self._map.reset_state(f)
919
931
920 self._dirty = True
932 self._dirty = True
921
933
922 def identity(self):
934 def identity(self):
923 """Return identity of dirstate itself to detect changing in storage
935 """Return identity of dirstate itself to detect changing in storage
924
936
925 If identity of previous dirstate is equal to this, writing
937 If identity of previous dirstate is equal to this, writing
926 changes based on the former dirstate out can keep consistency.
938 changes based on the former dirstate out can keep consistency.
927 """
939 """
928 return self._map.identity
940 return self._map.identity
929
941
930 def write(self, tr):
942 def write(self, tr):
931 if not self._dirty:
943 if not self._dirty:
932 return
944 return
933
945
934 write_key = self._use_tracked_hint and self._dirty_tracked_set
946 write_key = self._use_tracked_hint and self._dirty_tracked_set
935 if tr:
947 if tr:
936 # make sure we invalidate the current change on abort
948 # make sure we invalidate the current change on abort
937 if tr is not None:
949 if tr is not None:
938 tr.addabort(
950 tr.addabort(
939 b'dirstate-invalidate',
951 b'dirstate-invalidate',
940 lambda tr: self.invalidate(),
952 lambda tr: self.invalidate(),
941 )
953 )
942 # delay writing in-memory changes out
954 # delay writing in-memory changes out
943 tr.addfilegenerator(
955 tr.addfilegenerator(
944 b'dirstate-1-main',
956 b'dirstate-1-main',
945 (self._filename,),
957 (self._filename,),
946 lambda f: self._writedirstate(tr, f),
958 lambda f: self._writedirstate(tr, f),
947 location=b'plain',
959 location=b'plain',
948 post_finalize=True,
960 post_finalize=True,
949 )
961 )
950 if write_key:
962 if write_key:
951 tr.addfilegenerator(
963 tr.addfilegenerator(
952 b'dirstate-2-key-post',
964 b'dirstate-2-key-post',
953 (self._filename_th,),
965 (self._filename_th,),
954 lambda f: self._write_tracked_hint(tr, f),
966 lambda f: self._write_tracked_hint(tr, f),
955 location=b'plain',
967 location=b'plain',
956 post_finalize=True,
968 post_finalize=True,
957 )
969 )
958 return
970 return
959
971
960 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
972 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
961 with file(self._filename) as f:
973 with file(self._filename) as f:
962 self._writedirstate(tr, f)
974 self._writedirstate(tr, f)
963 if write_key:
975 if write_key:
964 # we update the key-file after writing to make sure reader have a
976 # we update the key-file after writing to make sure reader have a
965 # key that match the newly written content
977 # key that match the newly written content
966 with file(self._filename_th) as f:
978 with file(self._filename_th) as f:
967 self._write_tracked_hint(tr, f)
979 self._write_tracked_hint(tr, f)
968
980
969 def delete_tracked_hint(self):
981 def delete_tracked_hint(self):
970 """remove the tracked_hint file
982 """remove the tracked_hint file
971
983
972 To be used by format downgrades operation"""
984 To be used by format downgrades operation"""
973 self._opener.unlink(self._filename_th)
985 self._opener.unlink(self._filename_th)
974 self._use_tracked_hint = False
986 self._use_tracked_hint = False
975
987
976 def addparentchangecallback(self, category, callback):
988 def addparentchangecallback(self, category, callback):
977 """add a callback to be called when the wd parents are changed
989 """add a callback to be called when the wd parents are changed
978
990
979 Callback will be called with the following arguments:
991 Callback will be called with the following arguments:
980 dirstate, (oldp1, oldp2), (newp1, newp2)
992 dirstate, (oldp1, oldp2), (newp1, newp2)
981
993
982 Category is a unique identifier to allow overwriting an old callback
994 Category is a unique identifier to allow overwriting an old callback
983 with a newer callback.
995 with a newer callback.
984 """
996 """
985 self._plchangecallbacks[category] = callback
997 self._plchangecallbacks[category] = callback
986
998
987 def _writedirstate(self, tr, st):
999 def _writedirstate(self, tr, st):
988 # notify callbacks about parents change
1000 # notify callbacks about parents change
989 if self._origpl is not None and self._origpl != self._pl:
1001 if self._origpl is not None and self._origpl != self._pl:
990 for c, callback in sorted(self._plchangecallbacks.items()):
1002 for c, callback in sorted(self._plchangecallbacks.items()):
991 callback(self, self._origpl, self._pl)
1003 callback(self, self._origpl, self._pl)
992 self._origpl = None
1004 self._origpl = None
993 self._map.write(tr, st)
1005 self._map.write(tr, st)
994 self._dirty = False
1006 self._dirty = False
995 self._dirty_tracked_set = False
1007 self._dirty_tracked_set = False
996
1008
997 def _write_tracked_hint(self, tr, f):
1009 def _write_tracked_hint(self, tr, f):
998 key = node.hex(uuid.uuid4().bytes)
1010 key = node.hex(uuid.uuid4().bytes)
999 f.write(b"1\n%s\n" % key) # 1 is the format version
1011 f.write(b"1\n%s\n" % key) # 1 is the format version
1000
1012
1001 def _dirignore(self, f):
1013 def _dirignore(self, f):
1002 if self._ignore(f):
1014 if self._ignore(f):
1003 return True
1015 return True
1004 for p in pathutil.finddirs(f):
1016 for p in pathutil.finddirs(f):
1005 if self._ignore(p):
1017 if self._ignore(p):
1006 return True
1018 return True
1007 return False
1019 return False
1008
1020
1009 def _ignorefiles(self):
1021 def _ignorefiles(self):
1010 files = []
1022 files = []
1011 if os.path.exists(self._join(b'.hgignore')):
1023 if os.path.exists(self._join(b'.hgignore')):
1012 files.append(self._join(b'.hgignore'))
1024 files.append(self._join(b'.hgignore'))
1013 for name, path in self._ui.configitems(b"ui"):
1025 for name, path in self._ui.configitems(b"ui"):
1014 if name == b'ignore' or name.startswith(b'ignore.'):
1026 if name == b'ignore' or name.startswith(b'ignore.'):
1015 # we need to use os.path.join here rather than self._join
1027 # we need to use os.path.join here rather than self._join
1016 # because path is arbitrary and user-specified
1028 # because path is arbitrary and user-specified
1017 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1029 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1018 return files
1030 return files
1019
1031
1020 def _ignorefileandline(self, f):
1032 def _ignorefileandline(self, f):
1021 files = collections.deque(self._ignorefiles())
1033 files = collections.deque(self._ignorefiles())
1022 visited = set()
1034 visited = set()
1023 while files:
1035 while files:
1024 i = files.popleft()
1036 i = files.popleft()
1025 patterns = matchmod.readpatternfile(
1037 patterns = matchmod.readpatternfile(
1026 i, self._ui.warn, sourceinfo=True
1038 i, self._ui.warn, sourceinfo=True
1027 )
1039 )
1028 for pattern, lineno, line in patterns:
1040 for pattern, lineno, line in patterns:
1029 kind, p = matchmod._patsplit(pattern, b'glob')
1041 kind, p = matchmod._patsplit(pattern, b'glob')
1030 if kind == b"subinclude":
1042 if kind == b"subinclude":
1031 if p not in visited:
1043 if p not in visited:
1032 files.append(p)
1044 files.append(p)
1033 continue
1045 continue
1034 m = matchmod.match(
1046 m = matchmod.match(
1035 self._root, b'', [], [pattern], warn=self._ui.warn
1047 self._root, b'', [], [pattern], warn=self._ui.warn
1036 )
1048 )
1037 if m(f):
1049 if m(f):
1038 return (i, lineno, line)
1050 return (i, lineno, line)
1039 visited.add(i)
1051 visited.add(i)
1040 return (None, -1, b"")
1052 return (None, -1, b"")
1041
1053
1042 def _walkexplicit(self, match, subrepos):
1054 def _walkexplicit(self, match, subrepos):
1043 """Get stat data about the files explicitly specified by match.
1055 """Get stat data about the files explicitly specified by match.
1044
1056
1045 Return a triple (results, dirsfound, dirsnotfound).
1057 Return a triple (results, dirsfound, dirsnotfound).
1046 - results is a mapping from filename to stat result. It also contains
1058 - results is a mapping from filename to stat result. It also contains
1047 listings mapping subrepos and .hg to None.
1059 listings mapping subrepos and .hg to None.
1048 - dirsfound is a list of files found to be directories.
1060 - dirsfound is a list of files found to be directories.
1049 - dirsnotfound is a list of files that the dirstate thinks are
1061 - dirsnotfound is a list of files that the dirstate thinks are
1050 directories and that were not found."""
1062 directories and that were not found."""
1051
1063
1052 def badtype(mode):
1064 def badtype(mode):
1053 kind = _(b'unknown')
1065 kind = _(b'unknown')
1054 if stat.S_ISCHR(mode):
1066 if stat.S_ISCHR(mode):
1055 kind = _(b'character device')
1067 kind = _(b'character device')
1056 elif stat.S_ISBLK(mode):
1068 elif stat.S_ISBLK(mode):
1057 kind = _(b'block device')
1069 kind = _(b'block device')
1058 elif stat.S_ISFIFO(mode):
1070 elif stat.S_ISFIFO(mode):
1059 kind = _(b'fifo')
1071 kind = _(b'fifo')
1060 elif stat.S_ISSOCK(mode):
1072 elif stat.S_ISSOCK(mode):
1061 kind = _(b'socket')
1073 kind = _(b'socket')
1062 elif stat.S_ISDIR(mode):
1074 elif stat.S_ISDIR(mode):
1063 kind = _(b'directory')
1075 kind = _(b'directory')
1064 return _(b'unsupported file type (type is %s)') % kind
1076 return _(b'unsupported file type (type is %s)') % kind
1065
1077
1066 badfn = match.bad
1078 badfn = match.bad
1067 dmap = self._map
1079 dmap = self._map
1068 lstat = os.lstat
1080 lstat = os.lstat
1069 getkind = stat.S_IFMT
1081 getkind = stat.S_IFMT
1070 dirkind = stat.S_IFDIR
1082 dirkind = stat.S_IFDIR
1071 regkind = stat.S_IFREG
1083 regkind = stat.S_IFREG
1072 lnkkind = stat.S_IFLNK
1084 lnkkind = stat.S_IFLNK
1073 join = self._join
1085 join = self._join
1074 dirsfound = []
1086 dirsfound = []
1075 foundadd = dirsfound.append
1087 foundadd = dirsfound.append
1076 dirsnotfound = []
1088 dirsnotfound = []
1077 notfoundadd = dirsnotfound.append
1089 notfoundadd = dirsnotfound.append
1078
1090
1079 if not match.isexact() and self._checkcase:
1091 if not match.isexact() and self._checkcase:
1080 normalize = self._normalize
1092 normalize = self._normalize
1081 else:
1093 else:
1082 normalize = None
1094 normalize = None
1083
1095
1084 files = sorted(match.files())
1096 files = sorted(match.files())
1085 subrepos.sort()
1097 subrepos.sort()
1086 i, j = 0, 0
1098 i, j = 0, 0
1087 while i < len(files) and j < len(subrepos):
1099 while i < len(files) and j < len(subrepos):
1088 subpath = subrepos[j] + b"/"
1100 subpath = subrepos[j] + b"/"
1089 if files[i] < subpath:
1101 if files[i] < subpath:
1090 i += 1
1102 i += 1
1091 continue
1103 continue
1092 while i < len(files) and files[i].startswith(subpath):
1104 while i < len(files) and files[i].startswith(subpath):
1093 del files[i]
1105 del files[i]
1094 j += 1
1106 j += 1
1095
1107
1096 if not files or b'' in files:
1108 if not files or b'' in files:
1097 files = [b'']
1109 files = [b'']
1098 # constructing the foldmap is expensive, so don't do it for the
1110 # constructing the foldmap is expensive, so don't do it for the
1099 # common case where files is ['']
1111 # common case where files is ['']
1100 normalize = None
1112 normalize = None
1101 results = dict.fromkeys(subrepos)
1113 results = dict.fromkeys(subrepos)
1102 results[b'.hg'] = None
1114 results[b'.hg'] = None
1103
1115
1104 for ff in files:
1116 for ff in files:
1105 if normalize:
1117 if normalize:
1106 nf = normalize(ff, False, True)
1118 nf = normalize(ff, False, True)
1107 else:
1119 else:
1108 nf = ff
1120 nf = ff
1109 if nf in results:
1121 if nf in results:
1110 continue
1122 continue
1111
1123
1112 try:
1124 try:
1113 st = lstat(join(nf))
1125 st = lstat(join(nf))
1114 kind = getkind(st.st_mode)
1126 kind = getkind(st.st_mode)
1115 if kind == dirkind:
1127 if kind == dirkind:
1116 if nf in dmap:
1128 if nf in dmap:
1117 # file replaced by dir on disk but still in dirstate
1129 # file replaced by dir on disk but still in dirstate
1118 results[nf] = None
1130 results[nf] = None
1119 foundadd((nf, ff))
1131 foundadd((nf, ff))
1120 elif kind == regkind or kind == lnkkind:
1132 elif kind == regkind or kind == lnkkind:
1121 results[nf] = st
1133 results[nf] = st
1122 else:
1134 else:
1123 badfn(ff, badtype(kind))
1135 badfn(ff, badtype(kind))
1124 if nf in dmap:
1136 if nf in dmap:
1125 results[nf] = None
1137 results[nf] = None
1126 except (OSError) as inst:
1138 except (OSError) as inst:
1127 # nf not found on disk - it is dirstate only
1139 # nf not found on disk - it is dirstate only
1128 if nf in dmap: # does it exactly match a missing file?
1140 if nf in dmap: # does it exactly match a missing file?
1129 results[nf] = None
1141 results[nf] = None
1130 else: # does it match a missing directory?
1142 else: # does it match a missing directory?
1131 if self._map.hasdir(nf):
1143 if self._map.hasdir(nf):
1132 notfoundadd(nf)
1144 notfoundadd(nf)
1133 else:
1145 else:
1134 badfn(ff, encoding.strtolocal(inst.strerror))
1146 badfn(ff, encoding.strtolocal(inst.strerror))
1135
1147
1136 # match.files() may contain explicitly-specified paths that shouldn't
1148 # match.files() may contain explicitly-specified paths that shouldn't
1137 # be taken; drop them from the list of files found. dirsfound/notfound
1149 # be taken; drop them from the list of files found. dirsfound/notfound
1138 # aren't filtered here because they will be tested later.
1150 # aren't filtered here because they will be tested later.
1139 if match.anypats():
1151 if match.anypats():
1140 for f in list(results):
1152 for f in list(results):
1141 if f == b'.hg' or f in subrepos:
1153 if f == b'.hg' or f in subrepos:
1142 # keep sentinel to disable further out-of-repo walks
1154 # keep sentinel to disable further out-of-repo walks
1143 continue
1155 continue
1144 if not match(f):
1156 if not match(f):
1145 del results[f]
1157 del results[f]
1146
1158
1147 # Case insensitive filesystems cannot rely on lstat() failing to detect
1159 # Case insensitive filesystems cannot rely on lstat() failing to detect
1148 # a case-only rename. Prune the stat object for any file that does not
1160 # a case-only rename. Prune the stat object for any file that does not
1149 # match the case in the filesystem, if there are multiple files that
1161 # match the case in the filesystem, if there are multiple files that
1150 # normalize to the same path.
1162 # normalize to the same path.
1151 if match.isexact() and self._checkcase:
1163 if match.isexact() and self._checkcase:
1152 normed = {}
1164 normed = {}
1153
1165
1154 for f, st in results.items():
1166 for f, st in results.items():
1155 if st is None:
1167 if st is None:
1156 continue
1168 continue
1157
1169
1158 nc = util.normcase(f)
1170 nc = util.normcase(f)
1159 paths = normed.get(nc)
1171 paths = normed.get(nc)
1160
1172
1161 if paths is None:
1173 if paths is None:
1162 paths = set()
1174 paths = set()
1163 normed[nc] = paths
1175 normed[nc] = paths
1164
1176
1165 paths.add(f)
1177 paths.add(f)
1166
1178
1167 for norm, paths in normed.items():
1179 for norm, paths in normed.items():
1168 if len(paths) > 1:
1180 if len(paths) > 1:
1169 for path in paths:
1181 for path in paths:
1170 folded = self._discoverpath(
1182 folded = self._discoverpath(
1171 path, norm, True, None, self._map.dirfoldmap
1183 path, norm, True, None, self._map.dirfoldmap
1172 )
1184 )
1173 if path != folded:
1185 if path != folded:
1174 results[path] = None
1186 results[path] = None
1175
1187
1176 return results, dirsfound, dirsnotfound
1188 return results, dirsfound, dirsnotfound
1177
1189
1178 def walk(self, match, subrepos, unknown, ignored, full=True):
1190 def walk(self, match, subrepos, unknown, ignored, full=True):
1179 """
1191 """
1180 Walk recursively through the directory tree, finding all files
1192 Walk recursively through the directory tree, finding all files
1181 matched by match.
1193 matched by match.
1182
1194
1183 If full is False, maybe skip some known-clean files.
1195 If full is False, maybe skip some known-clean files.
1184
1196
1185 Return a dict mapping filename to stat-like object (either
1197 Return a dict mapping filename to stat-like object (either
1186 mercurial.osutil.stat instance or return value of os.stat()).
1198 mercurial.osutil.stat instance or return value of os.stat()).
1187
1199
1188 """
1200 """
1189 # full is a flag that extensions that hook into walk can use -- this
1201 # full is a flag that extensions that hook into walk can use -- this
1190 # implementation doesn't use it at all. This satisfies the contract
1202 # implementation doesn't use it at all. This satisfies the contract
1191 # because we only guarantee a "maybe".
1203 # because we only guarantee a "maybe".
1192
1204
1193 if ignored:
1205 if ignored:
1194 ignore = util.never
1206 ignore = util.never
1195 dirignore = util.never
1207 dirignore = util.never
1196 elif unknown:
1208 elif unknown:
1197 ignore = self._ignore
1209 ignore = self._ignore
1198 dirignore = self._dirignore
1210 dirignore = self._dirignore
1199 else:
1211 else:
1200 # if not unknown and not ignored, drop dir recursion and step 2
1212 # if not unknown and not ignored, drop dir recursion and step 2
1201 ignore = util.always
1213 ignore = util.always
1202 dirignore = util.always
1214 dirignore = util.always
1203
1215
1204 if self._sparsematchfn is not None:
1216 if self._sparsematchfn is not None:
1205 em = matchmod.exact(match.files())
1217 em = matchmod.exact(match.files())
1206 sm = matchmod.unionmatcher([self._sparsematcher, em])
1218 sm = matchmod.unionmatcher([self._sparsematcher, em])
1207 match = matchmod.intersectmatchers(match, sm)
1219 match = matchmod.intersectmatchers(match, sm)
1208
1220
1209 matchfn = match.matchfn
1221 matchfn = match.matchfn
1210 matchalways = match.always()
1222 matchalways = match.always()
1211 matchtdir = match.traversedir
1223 matchtdir = match.traversedir
1212 dmap = self._map
1224 dmap = self._map
1213 listdir = util.listdir
1225 listdir = util.listdir
1214 lstat = os.lstat
1226 lstat = os.lstat
1215 dirkind = stat.S_IFDIR
1227 dirkind = stat.S_IFDIR
1216 regkind = stat.S_IFREG
1228 regkind = stat.S_IFREG
1217 lnkkind = stat.S_IFLNK
1229 lnkkind = stat.S_IFLNK
1218 join = self._join
1230 join = self._join
1219
1231
1220 exact = skipstep3 = False
1232 exact = skipstep3 = False
1221 if match.isexact(): # match.exact
1233 if match.isexact(): # match.exact
1222 exact = True
1234 exact = True
1223 dirignore = util.always # skip step 2
1235 dirignore = util.always # skip step 2
1224 elif match.prefix(): # match.match, no patterns
1236 elif match.prefix(): # match.match, no patterns
1225 skipstep3 = True
1237 skipstep3 = True
1226
1238
1227 if not exact and self._checkcase:
1239 if not exact and self._checkcase:
1228 normalize = self._normalize
1240 normalize = self._normalize
1229 normalizefile = self._normalizefile
1241 normalizefile = self._normalizefile
1230 skipstep3 = False
1242 skipstep3 = False
1231 else:
1243 else:
1232 normalize = self._normalize
1244 normalize = self._normalize
1233 normalizefile = None
1245 normalizefile = None
1234
1246
1235 # step 1: find all explicit files
1247 # step 1: find all explicit files
1236 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1248 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1237 if matchtdir:
1249 if matchtdir:
1238 for d in work:
1250 for d in work:
1239 matchtdir(d[0])
1251 matchtdir(d[0])
1240 for d in dirsnotfound:
1252 for d in dirsnotfound:
1241 matchtdir(d)
1253 matchtdir(d)
1242
1254
1243 skipstep3 = skipstep3 and not (work or dirsnotfound)
1255 skipstep3 = skipstep3 and not (work or dirsnotfound)
1244 work = [d for d in work if not dirignore(d[0])]
1256 work = [d for d in work if not dirignore(d[0])]
1245
1257
1246 # step 2: visit subdirectories
1258 # step 2: visit subdirectories
1247 def traverse(work, alreadynormed):
1259 def traverse(work, alreadynormed):
1248 wadd = work.append
1260 wadd = work.append
1249 while work:
1261 while work:
1250 tracing.counter('dirstate.walk work', len(work))
1262 tracing.counter('dirstate.walk work', len(work))
1251 nd = work.pop()
1263 nd = work.pop()
1252 visitentries = match.visitchildrenset(nd)
1264 visitentries = match.visitchildrenset(nd)
1253 if not visitentries:
1265 if not visitentries:
1254 continue
1266 continue
1255 if visitentries == b'this' or visitentries == b'all':
1267 if visitentries == b'this' or visitentries == b'all':
1256 visitentries = None
1268 visitentries = None
1257 skip = None
1269 skip = None
1258 if nd != b'':
1270 if nd != b'':
1259 skip = b'.hg'
1271 skip = b'.hg'
1260 try:
1272 try:
1261 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1273 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1262 entries = listdir(join(nd), stat=True, skip=skip)
1274 entries = listdir(join(nd), stat=True, skip=skip)
1263 except (PermissionError, FileNotFoundError) as inst:
1275 except (PermissionError, FileNotFoundError) as inst:
1264 match.bad(
1276 match.bad(
1265 self.pathto(nd), encoding.strtolocal(inst.strerror)
1277 self.pathto(nd), encoding.strtolocal(inst.strerror)
1266 )
1278 )
1267 continue
1279 continue
1268 for f, kind, st in entries:
1280 for f, kind, st in entries:
1269 # Some matchers may return files in the visitentries set,
1281 # Some matchers may return files in the visitentries set,
1270 # instead of 'this', if the matcher explicitly mentions them
1282 # instead of 'this', if the matcher explicitly mentions them
1271 # and is not an exactmatcher. This is acceptable; we do not
1283 # and is not an exactmatcher. This is acceptable; we do not
1272 # make any hard assumptions about file-or-directory below
1284 # make any hard assumptions about file-or-directory below
1273 # based on the presence of `f` in visitentries. If
1285 # based on the presence of `f` in visitentries. If
1274 # visitchildrenset returned a set, we can always skip the
1286 # visitchildrenset returned a set, we can always skip the
1275 # entries *not* in the set it provided regardless of whether
1287 # entries *not* in the set it provided regardless of whether
1276 # they're actually a file or a directory.
1288 # they're actually a file or a directory.
1277 if visitentries and f not in visitentries:
1289 if visitentries and f not in visitentries:
1278 continue
1290 continue
1279 if normalizefile:
1291 if normalizefile:
1280 # even though f might be a directory, we're only
1292 # even though f might be a directory, we're only
1281 # interested in comparing it to files currently in the
1293 # interested in comparing it to files currently in the
1282 # dmap -- therefore normalizefile is enough
1294 # dmap -- therefore normalizefile is enough
1283 nf = normalizefile(
1295 nf = normalizefile(
1284 nd and (nd + b"/" + f) or f, True, True
1296 nd and (nd + b"/" + f) or f, True, True
1285 )
1297 )
1286 else:
1298 else:
1287 nf = nd and (nd + b"/" + f) or f
1299 nf = nd and (nd + b"/" + f) or f
1288 if nf not in results:
1300 if nf not in results:
1289 if kind == dirkind:
1301 if kind == dirkind:
1290 if not ignore(nf):
1302 if not ignore(nf):
1291 if matchtdir:
1303 if matchtdir:
1292 matchtdir(nf)
1304 matchtdir(nf)
1293 wadd(nf)
1305 wadd(nf)
1294 if nf in dmap and (matchalways or matchfn(nf)):
1306 if nf in dmap and (matchalways or matchfn(nf)):
1295 results[nf] = None
1307 results[nf] = None
1296 elif kind == regkind or kind == lnkkind:
1308 elif kind == regkind or kind == lnkkind:
1297 if nf in dmap:
1309 if nf in dmap:
1298 if matchalways or matchfn(nf):
1310 if matchalways or matchfn(nf):
1299 results[nf] = st
1311 results[nf] = st
1300 elif (matchalways or matchfn(nf)) and not ignore(
1312 elif (matchalways or matchfn(nf)) and not ignore(
1301 nf
1313 nf
1302 ):
1314 ):
1303 # unknown file -- normalize if necessary
1315 # unknown file -- normalize if necessary
1304 if not alreadynormed:
1316 if not alreadynormed:
1305 nf = normalize(nf, False, True)
1317 nf = normalize(nf, False, True)
1306 results[nf] = st
1318 results[nf] = st
1307 elif nf in dmap and (matchalways or matchfn(nf)):
1319 elif nf in dmap and (matchalways or matchfn(nf)):
1308 results[nf] = None
1320 results[nf] = None
1309
1321
1310 for nd, d in work:
1322 for nd, d in work:
1311 # alreadynormed means that processwork doesn't have to do any
1323 # alreadynormed means that processwork doesn't have to do any
1312 # expensive directory normalization
1324 # expensive directory normalization
1313 alreadynormed = not normalize or nd == d
1325 alreadynormed = not normalize or nd == d
1314 traverse([d], alreadynormed)
1326 traverse([d], alreadynormed)
1315
1327
1316 for s in subrepos:
1328 for s in subrepos:
1317 del results[s]
1329 del results[s]
1318 del results[b'.hg']
1330 del results[b'.hg']
1319
1331
1320 # step 3: visit remaining files from dmap
1332 # step 3: visit remaining files from dmap
1321 if not skipstep3 and not exact:
1333 if not skipstep3 and not exact:
1322 # If a dmap file is not in results yet, it was either
1334 # If a dmap file is not in results yet, it was either
1323 # a) not matching matchfn b) ignored, c) missing, or d) under a
1335 # a) not matching matchfn b) ignored, c) missing, or d) under a
1324 # symlink directory.
1336 # symlink directory.
1325 if not results and matchalways:
1337 if not results and matchalways:
1326 visit = [f for f in dmap]
1338 visit = [f for f in dmap]
1327 else:
1339 else:
1328 visit = [f for f in dmap if f not in results and matchfn(f)]
1340 visit = [f for f in dmap if f not in results and matchfn(f)]
1329 visit.sort()
1341 visit.sort()
1330
1342
1331 if unknown:
1343 if unknown:
1332 # unknown == True means we walked all dirs under the roots
1344 # unknown == True means we walked all dirs under the roots
1333 # that wasn't ignored, and everything that matched was stat'ed
1345 # that wasn't ignored, and everything that matched was stat'ed
1334 # and is already in results.
1346 # and is already in results.
1335 # The rest must thus be ignored or under a symlink.
1347 # The rest must thus be ignored or under a symlink.
1336 audit_path = pathutil.pathauditor(self._root, cached=True)
1348 audit_path = pathutil.pathauditor(self._root, cached=True)
1337
1349
1338 for nf in iter(visit):
1350 for nf in iter(visit):
1339 # If a stat for the same file was already added with a
1351 # If a stat for the same file was already added with a
1340 # different case, don't add one for this, since that would
1352 # different case, don't add one for this, since that would
1341 # make it appear as if the file exists under both names
1353 # make it appear as if the file exists under both names
1342 # on disk.
1354 # on disk.
1343 if (
1355 if (
1344 normalizefile
1356 normalizefile
1345 and normalizefile(nf, True, True) in results
1357 and normalizefile(nf, True, True) in results
1346 ):
1358 ):
1347 results[nf] = None
1359 results[nf] = None
1348 # Report ignored items in the dmap as long as they are not
1360 # Report ignored items in the dmap as long as they are not
1349 # under a symlink directory.
1361 # under a symlink directory.
1350 elif audit_path.check(nf):
1362 elif audit_path.check(nf):
1351 try:
1363 try:
1352 results[nf] = lstat(join(nf))
1364 results[nf] = lstat(join(nf))
1353 # file was just ignored, no links, and exists
1365 # file was just ignored, no links, and exists
1354 except OSError:
1366 except OSError:
1355 # file doesn't exist
1367 # file doesn't exist
1356 results[nf] = None
1368 results[nf] = None
1357 else:
1369 else:
1358 # It's either missing or under a symlink directory
1370 # It's either missing or under a symlink directory
1359 # which we in this case report as missing
1371 # which we in this case report as missing
1360 results[nf] = None
1372 results[nf] = None
1361 else:
1373 else:
1362 # We may not have walked the full directory tree above,
1374 # We may not have walked the full directory tree above,
1363 # so stat and check everything we missed.
1375 # so stat and check everything we missed.
1364 iv = iter(visit)
1376 iv = iter(visit)
1365 for st in util.statfiles([join(i) for i in visit]):
1377 for st in util.statfiles([join(i) for i in visit]):
1366 results[next(iv)] = st
1378 results[next(iv)] = st
1367 return results
1379 return results
1368
1380
1369 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1381 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1370 if self._sparsematchfn is not None:
1382 if self._sparsematchfn is not None:
1371 em = matchmod.exact(matcher.files())
1383 em = matchmod.exact(matcher.files())
1372 sm = matchmod.unionmatcher([self._sparsematcher, em])
1384 sm = matchmod.unionmatcher([self._sparsematcher, em])
1373 matcher = matchmod.intersectmatchers(matcher, sm)
1385 matcher = matchmod.intersectmatchers(matcher, sm)
1374 # Force Rayon (Rust parallelism library) to respect the number of
1386 # Force Rayon (Rust parallelism library) to respect the number of
1375 # workers. This is a temporary workaround until Rust code knows
1387 # workers. This is a temporary workaround until Rust code knows
1376 # how to read the config file.
1388 # how to read the config file.
1377 numcpus = self._ui.configint(b"worker", b"numcpus")
1389 numcpus = self._ui.configint(b"worker", b"numcpus")
1378 if numcpus is not None:
1390 if numcpus is not None:
1379 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1391 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1380
1392
1381 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1393 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1382 if not workers_enabled:
1394 if not workers_enabled:
1383 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1395 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1384
1396
1385 (
1397 (
1386 lookup,
1398 lookup,
1387 modified,
1399 modified,
1388 added,
1400 added,
1389 removed,
1401 removed,
1390 deleted,
1402 deleted,
1391 clean,
1403 clean,
1392 ignored,
1404 ignored,
1393 unknown,
1405 unknown,
1394 warnings,
1406 warnings,
1395 bad,
1407 bad,
1396 traversed,
1408 traversed,
1397 dirty,
1409 dirty,
1398 ) = rustmod.status(
1410 ) = rustmod.status(
1399 self._map._map,
1411 self._map._map,
1400 matcher,
1412 matcher,
1401 self._rootdir,
1413 self._rootdir,
1402 self._ignorefiles(),
1414 self._ignorefiles(),
1403 self._checkexec,
1415 self._checkexec,
1404 bool(list_clean),
1416 bool(list_clean),
1405 bool(list_ignored),
1417 bool(list_ignored),
1406 bool(list_unknown),
1418 bool(list_unknown),
1407 bool(matcher.traversedir),
1419 bool(matcher.traversedir),
1408 )
1420 )
1409
1421
1410 self._dirty |= dirty
1422 self._dirty |= dirty
1411
1423
1412 if matcher.traversedir:
1424 if matcher.traversedir:
1413 for dir in traversed:
1425 for dir in traversed:
1414 matcher.traversedir(dir)
1426 matcher.traversedir(dir)
1415
1427
1416 if self._ui.warn:
1428 if self._ui.warn:
1417 for item in warnings:
1429 for item in warnings:
1418 if isinstance(item, tuple):
1430 if isinstance(item, tuple):
1419 file_path, syntax = item
1431 file_path, syntax = item
1420 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1432 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1421 file_path,
1433 file_path,
1422 syntax,
1434 syntax,
1423 )
1435 )
1424 self._ui.warn(msg)
1436 self._ui.warn(msg)
1425 else:
1437 else:
1426 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1438 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1427 self._ui.warn(
1439 self._ui.warn(
1428 msg
1440 msg
1429 % (
1441 % (
1430 pathutil.canonpath(
1442 pathutil.canonpath(
1431 self._rootdir, self._rootdir, item
1443 self._rootdir, self._rootdir, item
1432 ),
1444 ),
1433 b"No such file or directory",
1445 b"No such file or directory",
1434 )
1446 )
1435 )
1447 )
1436
1448
1437 for fn, message in bad:
1449 for fn, message in bad:
1438 matcher.bad(fn, encoding.strtolocal(message))
1450 matcher.bad(fn, encoding.strtolocal(message))
1439
1451
1440 status = scmutil.status(
1452 status = scmutil.status(
1441 modified=modified,
1453 modified=modified,
1442 added=added,
1454 added=added,
1443 removed=removed,
1455 removed=removed,
1444 deleted=deleted,
1456 deleted=deleted,
1445 unknown=unknown,
1457 unknown=unknown,
1446 ignored=ignored,
1458 ignored=ignored,
1447 clean=clean,
1459 clean=clean,
1448 )
1460 )
1449 return (lookup, status)
1461 return (lookup, status)
1450
1462
1451 # XXX since this can make the dirstate dirty (through rust), we should
1463 # XXX since this can make the dirstate dirty (through rust), we should
1452 # enforce that it is done withing an appropriate change-context that scope
1464 # enforce that it is done withing an appropriate change-context that scope
1453 # the change and ensure it eventually get written on disk (or rolled back)
1465 # the change and ensure it eventually get written on disk (or rolled back)
1454 def status(self, match, subrepos, ignored, clean, unknown):
1466 def status(self, match, subrepos, ignored, clean, unknown):
1455 """Determine the status of the working copy relative to the
1467 """Determine the status of the working copy relative to the
1456 dirstate and return a pair of (unsure, status), where status is of type
1468 dirstate and return a pair of (unsure, status), where status is of type
1457 scmutil.status and:
1469 scmutil.status and:
1458
1470
1459 unsure:
1471 unsure:
1460 files that might have been modified since the dirstate was
1472 files that might have been modified since the dirstate was
1461 written, but need to be read to be sure (size is the same
1473 written, but need to be read to be sure (size is the same
1462 but mtime differs)
1474 but mtime differs)
1463 status.modified:
1475 status.modified:
1464 files that have definitely been modified since the dirstate
1476 files that have definitely been modified since the dirstate
1465 was written (different size or mode)
1477 was written (different size or mode)
1466 status.clean:
1478 status.clean:
1467 files that have definitely not been modified since the
1479 files that have definitely not been modified since the
1468 dirstate was written
1480 dirstate was written
1469 """
1481 """
1470 listignored, listclean, listunknown = ignored, clean, unknown
1482 listignored, listclean, listunknown = ignored, clean, unknown
1471 lookup, modified, added, unknown, ignored = [], [], [], [], []
1483 lookup, modified, added, unknown, ignored = [], [], [], [], []
1472 removed, deleted, clean = [], [], []
1484 removed, deleted, clean = [], [], []
1473
1485
1474 dmap = self._map
1486 dmap = self._map
1475 dmap.preload()
1487 dmap.preload()
1476
1488
1477 use_rust = True
1489 use_rust = True
1478
1490
1479 allowed_matchers = (
1491 allowed_matchers = (
1480 matchmod.alwaysmatcher,
1492 matchmod.alwaysmatcher,
1481 matchmod.differencematcher,
1493 matchmod.differencematcher,
1482 matchmod.exactmatcher,
1494 matchmod.exactmatcher,
1483 matchmod.includematcher,
1495 matchmod.includematcher,
1484 matchmod.intersectionmatcher,
1496 matchmod.intersectionmatcher,
1485 matchmod.nevermatcher,
1497 matchmod.nevermatcher,
1486 matchmod.unionmatcher,
1498 matchmod.unionmatcher,
1487 )
1499 )
1488
1500
1489 if rustmod is None:
1501 if rustmod is None:
1490 use_rust = False
1502 use_rust = False
1491 elif self._checkcase:
1503 elif self._checkcase:
1492 # Case-insensitive filesystems are not handled yet
1504 # Case-insensitive filesystems are not handled yet
1493 use_rust = False
1505 use_rust = False
1494 elif subrepos:
1506 elif subrepos:
1495 use_rust = False
1507 use_rust = False
1496 elif not isinstance(match, allowed_matchers):
1508 elif not isinstance(match, allowed_matchers):
1497 # Some matchers have yet to be implemented
1509 # Some matchers have yet to be implemented
1498 use_rust = False
1510 use_rust = False
1499
1511
1500 # Get the time from the filesystem so we can disambiguate files that
1512 # Get the time from the filesystem so we can disambiguate files that
1501 # appear modified in the present or future.
1513 # appear modified in the present or future.
1502 try:
1514 try:
1503 mtime_boundary = timestamp.get_fs_now(self._opener)
1515 mtime_boundary = timestamp.get_fs_now(self._opener)
1504 except OSError:
1516 except OSError:
1505 # In largefiles or readonly context
1517 # In largefiles or readonly context
1506 mtime_boundary = None
1518 mtime_boundary = None
1507
1519
1508 if use_rust:
1520 if use_rust:
1509 try:
1521 try:
1510 res = self._rust_status(
1522 res = self._rust_status(
1511 match, listclean, listignored, listunknown
1523 match, listclean, listignored, listunknown
1512 )
1524 )
1513 return res + (mtime_boundary,)
1525 return res + (mtime_boundary,)
1514 except rustmod.FallbackError:
1526 except rustmod.FallbackError:
1515 pass
1527 pass
1516
1528
1517 def noop(f):
1529 def noop(f):
1518 pass
1530 pass
1519
1531
1520 dcontains = dmap.__contains__
1532 dcontains = dmap.__contains__
1521 dget = dmap.__getitem__
1533 dget = dmap.__getitem__
1522 ladd = lookup.append # aka "unsure"
1534 ladd = lookup.append # aka "unsure"
1523 madd = modified.append
1535 madd = modified.append
1524 aadd = added.append
1536 aadd = added.append
1525 uadd = unknown.append if listunknown else noop
1537 uadd = unknown.append if listunknown else noop
1526 iadd = ignored.append if listignored else noop
1538 iadd = ignored.append if listignored else noop
1527 radd = removed.append
1539 radd = removed.append
1528 dadd = deleted.append
1540 dadd = deleted.append
1529 cadd = clean.append if listclean else noop
1541 cadd = clean.append if listclean else noop
1530 mexact = match.exact
1542 mexact = match.exact
1531 dirignore = self._dirignore
1543 dirignore = self._dirignore
1532 checkexec = self._checkexec
1544 checkexec = self._checkexec
1533 checklink = self._checklink
1545 checklink = self._checklink
1534 copymap = self._map.copymap
1546 copymap = self._map.copymap
1535
1547
1536 # We need to do full walks when either
1548 # We need to do full walks when either
1537 # - we're listing all clean files, or
1549 # - we're listing all clean files, or
1538 # - match.traversedir does something, because match.traversedir should
1550 # - match.traversedir does something, because match.traversedir should
1539 # be called for every dir in the working dir
1551 # be called for every dir in the working dir
1540 full = listclean or match.traversedir is not None
1552 full = listclean or match.traversedir is not None
1541 for fn, st in self.walk(
1553 for fn, st in self.walk(
1542 match, subrepos, listunknown, listignored, full=full
1554 match, subrepos, listunknown, listignored, full=full
1543 ).items():
1555 ).items():
1544 if not dcontains(fn):
1556 if not dcontains(fn):
1545 if (listignored or mexact(fn)) and dirignore(fn):
1557 if (listignored or mexact(fn)) and dirignore(fn):
1546 if listignored:
1558 if listignored:
1547 iadd(fn)
1559 iadd(fn)
1548 else:
1560 else:
1549 uadd(fn)
1561 uadd(fn)
1550 continue
1562 continue
1551
1563
1552 t = dget(fn)
1564 t = dget(fn)
1553 mode = t.mode
1565 mode = t.mode
1554 size = t.size
1566 size = t.size
1555
1567
1556 if not st and t.tracked:
1568 if not st and t.tracked:
1557 dadd(fn)
1569 dadd(fn)
1558 elif t.p2_info:
1570 elif t.p2_info:
1559 madd(fn)
1571 madd(fn)
1560 elif t.added:
1572 elif t.added:
1561 aadd(fn)
1573 aadd(fn)
1562 elif t.removed:
1574 elif t.removed:
1563 radd(fn)
1575 radd(fn)
1564 elif t.tracked:
1576 elif t.tracked:
1565 if not checklink and t.has_fallback_symlink:
1577 if not checklink and t.has_fallback_symlink:
1566 # If the file system does not support symlink, the mode
1578 # If the file system does not support symlink, the mode
1567 # might not be correctly stored in the dirstate, so do not
1579 # might not be correctly stored in the dirstate, so do not
1568 # trust it.
1580 # trust it.
1569 ladd(fn)
1581 ladd(fn)
1570 elif not checkexec and t.has_fallback_exec:
1582 elif not checkexec and t.has_fallback_exec:
1571 # If the file system does not support exec bits, the mode
1583 # If the file system does not support exec bits, the mode
1572 # might not be correctly stored in the dirstate, so do not
1584 # might not be correctly stored in the dirstate, so do not
1573 # trust it.
1585 # trust it.
1574 ladd(fn)
1586 ladd(fn)
1575 elif (
1587 elif (
1576 size >= 0
1588 size >= 0
1577 and (
1589 and (
1578 (size != st.st_size and size != st.st_size & _rangemask)
1590 (size != st.st_size and size != st.st_size & _rangemask)
1579 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1591 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1580 )
1592 )
1581 or fn in copymap
1593 or fn in copymap
1582 ):
1594 ):
1583 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1595 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1584 # issue6456: Size returned may be longer due to
1596 # issue6456: Size returned may be longer due to
1585 # encryption on EXT-4 fscrypt, undecided.
1597 # encryption on EXT-4 fscrypt, undecided.
1586 ladd(fn)
1598 ladd(fn)
1587 else:
1599 else:
1588 madd(fn)
1600 madd(fn)
1589 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1601 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1590 # There might be a change in the future if for example the
1602 # There might be a change in the future if for example the
1591 # internal clock is off, but this is a case where the issues
1603 # internal clock is off, but this is a case where the issues
1592 # the user would face would be a lot worse and there is
1604 # the user would face would be a lot worse and there is
1593 # nothing we can really do.
1605 # nothing we can really do.
1594 ladd(fn)
1606 ladd(fn)
1595 elif listclean:
1607 elif listclean:
1596 cadd(fn)
1608 cadd(fn)
1597 status = scmutil.status(
1609 status = scmutil.status(
1598 modified, added, removed, deleted, unknown, ignored, clean
1610 modified, added, removed, deleted, unknown, ignored, clean
1599 )
1611 )
1600 return (lookup, status, mtime_boundary)
1612 return (lookup, status, mtime_boundary)
1601
1613
1602 def matches(self, match):
1614 def matches(self, match):
1603 """
1615 """
1604 return files in the dirstate (in whatever state) filtered by match
1616 return files in the dirstate (in whatever state) filtered by match
1605 """
1617 """
1606 dmap = self._map
1618 dmap = self._map
1607 if rustmod is not None:
1619 if rustmod is not None:
1608 dmap = self._map._map
1620 dmap = self._map._map
1609
1621
1610 if match.always():
1622 if match.always():
1611 return dmap.keys()
1623 return dmap.keys()
1612 files = match.files()
1624 files = match.files()
1613 if match.isexact():
1625 if match.isexact():
1614 # fast path -- filter the other way around, since typically files is
1626 # fast path -- filter the other way around, since typically files is
1615 # much smaller than dmap
1627 # much smaller than dmap
1616 return [f for f in files if f in dmap]
1628 return [f for f in files if f in dmap]
1617 if match.prefix() and all(fn in dmap for fn in files):
1629 if match.prefix() and all(fn in dmap for fn in files):
1618 # fast path -- all the values are known to be files, so just return
1630 # fast path -- all the values are known to be files, so just return
1619 # that
1631 # that
1620 return list(files)
1632 return list(files)
1621 return [f for f in dmap if match(f)]
1633 return [f for f in dmap if match(f)]
1622
1634
1623 def _actualfilename(self, tr):
1635 def _actualfilename(self, tr):
1624 if tr:
1636 if tr:
1625 return self._pendingfilename
1637 return self._pendingfilename
1626 else:
1638 else:
1627 return self._filename
1639 return self._filename
1628
1640
1629 def all_file_names(self):
1641 def all_file_names(self):
1630 """list all filename currently used by this dirstate
1642 """list all filename currently used by this dirstate
1631
1643
1632 This is only used to do `hg rollback` related backup in the transaction
1644 This is only used to do `hg rollback` related backup in the transaction
1633 """
1645 """
1634 if not self._opener.exists(self._filename):
1646 if not self._opener.exists(self._filename):
1635 # no data every written to disk yet
1647 # no data every written to disk yet
1636 return ()
1648 return ()
1637 elif self._use_dirstate_v2:
1649 elif self._use_dirstate_v2:
1638 return (
1650 return (
1639 self._filename,
1651 self._filename,
1640 self._map.docket.data_filename(),
1652 self._map.docket.data_filename(),
1641 )
1653 )
1642 else:
1654 else:
1643 return (self._filename,)
1655 return (self._filename,)
1644
1656
1645 def verify(self, m1, m2, p1, narrow_matcher=None):
1657 def verify(self, m1, m2, p1, narrow_matcher=None):
1646 """
1658 """
1647 check the dirstate contents against the parent manifest and yield errors
1659 check the dirstate contents against the parent manifest and yield errors
1648 """
1660 """
1649 missing_from_p1 = _(
1661 missing_from_p1 = _(
1650 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1662 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1651 )
1663 )
1652 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1664 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1653 missing_from_ps = _(
1665 missing_from_ps = _(
1654 b"%s marked as modified, but not in either manifest\n"
1666 b"%s marked as modified, but not in either manifest\n"
1655 )
1667 )
1656 missing_from_ds = _(
1668 missing_from_ds = _(
1657 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1669 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1658 )
1670 )
1659 for f, entry in self.items():
1671 for f, entry in self.items():
1660 if entry.p1_tracked:
1672 if entry.p1_tracked:
1661 if entry.modified and f not in m1 and f not in m2:
1673 if entry.modified and f not in m1 and f not in m2:
1662 yield missing_from_ps % f
1674 yield missing_from_ps % f
1663 elif f not in m1:
1675 elif f not in m1:
1664 yield missing_from_p1 % (f, node.short(p1))
1676 yield missing_from_p1 % (f, node.short(p1))
1665 if entry.added and f in m1:
1677 if entry.added and f in m1:
1666 yield unexpected_in_p1 % f
1678 yield unexpected_in_p1 % f
1667 for f in m1:
1679 for f in m1:
1668 if narrow_matcher is not None and not narrow_matcher(f):
1680 if narrow_matcher is not None and not narrow_matcher(f):
1669 continue
1681 continue
1670 entry = self.get_entry(f)
1682 entry = self.get_entry(f)
1671 if not entry.p1_tracked:
1683 if not entry.p1_tracked:
1672 yield missing_from_ds % (f, node.short(p1))
1684 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now