##// END OF EJS Templates
dirstate: use the new `check_invalidated` decorator for `_changing`...
marmoute -
r51014:de42ba9d default
parent child Browse files
Show More
@@ -1,1684 +1,1682 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136 def __init__(
136 def __init__(
137 self,
137 self,
138 opener,
138 opener,
139 ui,
139 ui,
140 root,
140 root,
141 validate,
141 validate,
142 sparsematchfn,
142 sparsematchfn,
143 nodeconstants,
143 nodeconstants,
144 use_dirstate_v2,
144 use_dirstate_v2,
145 use_tracked_hint=False,
145 use_tracked_hint=False,
146 ):
146 ):
147 """Create a new dirstate object.
147 """Create a new dirstate object.
148
148
149 opener is an open()-like callable that can be used to open the
149 opener is an open()-like callable that can be used to open the
150 dirstate file; root is the root of the directory tracked by
150 dirstate file; root is the root of the directory tracked by
151 the dirstate.
151 the dirstate.
152 """
152 """
153 self._use_dirstate_v2 = use_dirstate_v2
153 self._use_dirstate_v2 = use_dirstate_v2
154 self._use_tracked_hint = use_tracked_hint
154 self._use_tracked_hint = use_tracked_hint
155 self._nodeconstants = nodeconstants
155 self._nodeconstants = nodeconstants
156 self._opener = opener
156 self._opener = opener
157 self._validate = validate
157 self._validate = validate
158 self._root = root
158 self._root = root
159 # Either build a sparse-matcher or None if sparse is disabled
159 # Either build a sparse-matcher or None if sparse is disabled
160 self._sparsematchfn = sparsematchfn
160 self._sparsematchfn = sparsematchfn
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
161 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
162 # UNC path pointing to root share (issue4557)
162 # UNC path pointing to root share (issue4557)
163 self._rootdir = pathutil.normasprefix(root)
163 self._rootdir = pathutil.normasprefix(root)
164 # True is any internal state may be different
164 # True is any internal state may be different
165 self._dirty = False
165 self._dirty = False
166 # True if the set of tracked file may be different
166 # True if the set of tracked file may be different
167 self._dirty_tracked_set = False
167 self._dirty_tracked_set = False
168 self._ui = ui
168 self._ui = ui
169 self._filecache = {}
169 self._filecache = {}
170 # nesting level of `changing_parents` context
170 # nesting level of `changing_parents` context
171 self._changing_level = 0
171 self._changing_level = 0
172 # the change currently underway
172 # the change currently underway
173 self._change_type = None
173 self._change_type = None
174 # True if the current dirstate changing operations have been
174 # True if the current dirstate changing operations have been
175 # invalidated (used to make sure all nested contexts have been exited)
175 # invalidated (used to make sure all nested contexts have been exited)
176 self._invalidated_context = False
176 self._invalidated_context = False
177 self._filename = b'dirstate'
177 self._filename = b'dirstate'
178 self._filename_th = b'dirstate-tracked-hint'
178 self._filename_th = b'dirstate-tracked-hint'
179 self._pendingfilename = b'%s.pending' % self._filename
179 self._pendingfilename = b'%s.pending' % self._filename
180 self._plchangecallbacks = {}
180 self._plchangecallbacks = {}
181 self._origpl = None
181 self._origpl = None
182 self._mapcls = dirstatemap.dirstatemap
182 self._mapcls = dirstatemap.dirstatemap
183 # Access and cache cwd early, so we don't access it for the first time
183 # Access and cache cwd early, so we don't access it for the first time
184 # after a working-copy update caused it to not exist (accessing it then
184 # after a working-copy update caused it to not exist (accessing it then
185 # raises an exception).
185 # raises an exception).
186 self._cwd
186 self._cwd
187
187
188 def prefetch_parents(self):
188 def prefetch_parents(self):
189 """make sure the parents are loaded
189 """make sure the parents are loaded
190
190
191 Used to avoid a race condition.
191 Used to avoid a race condition.
192 """
192 """
193 self._pl
193 self._pl
194
194
195 @contextlib.contextmanager
195 @contextlib.contextmanager
196 @check_invalidated
196 def _changing(self, repo, change_type):
197 def _changing(self, repo, change_type):
197 if repo.currentwlock() is None:
198 if repo.currentwlock() is None:
198 msg = b"trying to change the dirstate without holding the wlock"
199 msg = b"trying to change the dirstate without holding the wlock"
199 raise error.ProgrammingError(msg)
200 raise error.ProgrammingError(msg)
200 if self._invalidated_context:
201 msg = "trying to use an invalidated dirstate before it has reset"
202 raise error.ProgrammingError(msg)
203
201
204 has_tr = repo.currenttransaction() is not None
202 has_tr = repo.currenttransaction() is not None
205 if not has_tr and self._changing_level == 0 and self._dirty:
203 if not has_tr and self._changing_level == 0 and self._dirty:
206 msg = "entering a changing context, but dirstate is already dirty"
204 msg = "entering a changing context, but dirstate is already dirty"
207 raise error.ProgrammingError(msg)
205 raise error.ProgrammingError(msg)
208
206
209 # different type of change are mutually exclusive
207 # different type of change are mutually exclusive
210 if self._change_type is None:
208 if self._change_type is None:
211 assert self._changing_level == 0
209 assert self._changing_level == 0
212 self._change_type = change_type
210 self._change_type = change_type
213 elif self._change_type != change_type:
211 elif self._change_type != change_type:
214 msg = (
212 msg = (
215 'trying to open "%s" dirstate-changing context while a "%s" is'
213 'trying to open "%s" dirstate-changing context while a "%s" is'
216 ' already open'
214 ' already open'
217 )
215 )
218 msg %= (change_type, self._change_type)
216 msg %= (change_type, self._change_type)
219 raise error.ProgrammingError(msg)
217 raise error.ProgrammingError(msg)
220 self._changing_level += 1
218 self._changing_level += 1
221 try:
219 try:
222 yield
220 yield
223 except: # re-raises
221 except: # re-raises
224 self.invalidate()
222 self.invalidate()
225 raise
223 raise
226 finally:
224 finally:
227 tr = repo.currenttransaction()
225 tr = repo.currenttransaction()
228 if self._changing_level > 0:
226 if self._changing_level > 0:
229 if self._invalidated_context:
227 if self._invalidated_context:
230 # make sure we invalidate anything an upper context might
228 # make sure we invalidate anything an upper context might
231 # have changed.
229 # have changed.
232 self.invalidate()
230 self.invalidate()
233 self._changing_level -= 1
231 self._changing_level -= 1
234 # The invalidation is complete once we exit the final context
232 # The invalidation is complete once we exit the final context
235 # manager
233 # manager
236 if self._changing_level <= 0:
234 if self._changing_level <= 0:
237 self._change_type = None
235 self._change_type = None
238 assert self._changing_level == 0
236 assert self._changing_level == 0
239 if self._invalidated_context:
237 if self._invalidated_context:
240 self._invalidated_context = False
238 self._invalidated_context = False
241 else:
239 else:
242 # When an exception occured, `_invalidated_context`
240 # When an exception occured, `_invalidated_context`
243 # would have been set to True by the `invalidate`
241 # would have been set to True by the `invalidate`
244 # call earlier.
242 # call earlier.
245 #
243 #
246 # We don't have more straightforward code, because the
244 # We don't have more straightforward code, because the
247 # Exception catching (and the associated `invalidate`
245 # Exception catching (and the associated `invalidate`
248 # calling) might have been called by a nested context
246 # calling) might have been called by a nested context
249 # instead of the top level one.
247 # instead of the top level one.
250 self.write(tr)
248 self.write(tr)
251 if has_tr != (tr is not None):
249 if has_tr != (tr is not None):
252 if has_tr:
250 if has_tr:
253 m = "transaction vanished while changing dirstate"
251 m = "transaction vanished while changing dirstate"
254 else:
252 else:
255 m = "transaction appeared while changing dirstate"
253 m = "transaction appeared while changing dirstate"
256 raise error.ProgrammingError(m)
254 raise error.ProgrammingError(m)
257
255
258 @contextlib.contextmanager
256 @contextlib.contextmanager
259 def changing_parents(self, repo):
257 def changing_parents(self, repo):
260 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
258 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
261 yield c
259 yield c
262
260
263 @contextlib.contextmanager
261 @contextlib.contextmanager
264 def changing_files(self, repo):
262 def changing_files(self, repo):
265 with self._changing(repo, CHANGE_TYPE_FILES) as c:
263 with self._changing(repo, CHANGE_TYPE_FILES) as c:
266 yield c
264 yield c
267
265
268 # here to help migration to the new code
266 # here to help migration to the new code
269 def parentchange(self):
267 def parentchange(self):
270 msg = (
268 msg = (
271 "Mercurial 6.4 and later requires call to "
269 "Mercurial 6.4 and later requires call to "
272 "`dirstate.changing_parents(repo)`"
270 "`dirstate.changing_parents(repo)`"
273 )
271 )
274 raise error.ProgrammingError(msg)
272 raise error.ProgrammingError(msg)
275
273
276 @property
274 @property
277 def is_changing_any(self):
275 def is_changing_any(self):
278 """Returns true if the dirstate is in the middle of a set of changes.
276 """Returns true if the dirstate is in the middle of a set of changes.
279
277
280 This returns True for any kind of change.
278 This returns True for any kind of change.
281 """
279 """
282 return self._changing_level > 0
280 return self._changing_level > 0
283
281
284 def pendingparentchange(self):
282 def pendingparentchange(self):
285 return self.is_changing_parent()
283 return self.is_changing_parent()
286
284
287 def is_changing_parent(self):
285 def is_changing_parent(self):
288 """Returns true if the dirstate is in the middle of a set of changes
286 """Returns true if the dirstate is in the middle of a set of changes
289 that modify the dirstate parent.
287 that modify the dirstate parent.
290 """
288 """
291 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
289 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
292 return self.is_changing_parents
290 return self.is_changing_parents
293
291
294 @property
292 @property
295 def is_changing_parents(self):
293 def is_changing_parents(self):
296 """Returns true if the dirstate is in the middle of a set of changes
294 """Returns true if the dirstate is in the middle of a set of changes
297 that modify the dirstate parent.
295 that modify the dirstate parent.
298 """
296 """
299 if self._changing_level <= 0:
297 if self._changing_level <= 0:
300 return False
298 return False
301 return self._change_type == CHANGE_TYPE_PARENTS
299 return self._change_type == CHANGE_TYPE_PARENTS
302
300
303 @property
301 @property
304 def is_changing_files(self):
302 def is_changing_files(self):
305 """Returns true if the dirstate is in the middle of a set of changes
303 """Returns true if the dirstate is in the middle of a set of changes
306 that modify the files tracked or their sources.
304 that modify the files tracked or their sources.
307 """
305 """
308 if self._changing_level <= 0:
306 if self._changing_level <= 0:
309 return False
307 return False
310 return self._change_type == CHANGE_TYPE_FILES
308 return self._change_type == CHANGE_TYPE_FILES
311
309
312 @propertycache
310 @propertycache
313 def _map(self):
311 def _map(self):
314 """Return the dirstate contents (see documentation for dirstatemap)."""
312 """Return the dirstate contents (see documentation for dirstatemap)."""
315 self._map = self._mapcls(
313 self._map = self._mapcls(
316 self._ui,
314 self._ui,
317 self._opener,
315 self._opener,
318 self._root,
316 self._root,
319 self._nodeconstants,
317 self._nodeconstants,
320 self._use_dirstate_v2,
318 self._use_dirstate_v2,
321 )
319 )
322 return self._map
320 return self._map
323
321
324 @property
322 @property
325 def _sparsematcher(self):
323 def _sparsematcher(self):
326 """The matcher for the sparse checkout.
324 """The matcher for the sparse checkout.
327
325
328 The working directory may not include every file from a manifest. The
326 The working directory may not include every file from a manifest. The
329 matcher obtained by this property will match a path if it is to be
327 matcher obtained by this property will match a path if it is to be
330 included in the working directory.
328 included in the working directory.
331
329
332 When sparse if disabled, return None.
330 When sparse if disabled, return None.
333 """
331 """
334 if self._sparsematchfn is None:
332 if self._sparsematchfn is None:
335 return None
333 return None
336 # TODO there is potential to cache this property. For now, the matcher
334 # TODO there is potential to cache this property. For now, the matcher
337 # is resolved on every access. (But the called function does use a
335 # is resolved on every access. (But the called function does use a
338 # cache to keep the lookup fast.)
336 # cache to keep the lookup fast.)
339 return self._sparsematchfn()
337 return self._sparsematchfn()
340
338
341 @repocache(b'branch')
339 @repocache(b'branch')
342 def _branch(self):
340 def _branch(self):
343 try:
341 try:
344 return self._opener.read(b"branch").strip() or b"default"
342 return self._opener.read(b"branch").strip() or b"default"
345 except FileNotFoundError:
343 except FileNotFoundError:
346 return b"default"
344 return b"default"
347
345
348 @property
346 @property
349 def _pl(self):
347 def _pl(self):
350 return self._map.parents()
348 return self._map.parents()
351
349
352 def hasdir(self, d):
350 def hasdir(self, d):
353 return self._map.hastrackeddir(d)
351 return self._map.hastrackeddir(d)
354
352
355 @rootcache(b'.hgignore')
353 @rootcache(b'.hgignore')
356 def _ignore(self):
354 def _ignore(self):
357 files = self._ignorefiles()
355 files = self._ignorefiles()
358 if not files:
356 if not files:
359 return matchmod.never()
357 return matchmod.never()
360
358
361 pats = [b'include:%s' % f for f in files]
359 pats = [b'include:%s' % f for f in files]
362 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
360 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
363
361
364 @propertycache
362 @propertycache
365 def _slash(self):
363 def _slash(self):
366 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
364 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
367
365
368 @propertycache
366 @propertycache
369 def _checklink(self):
367 def _checklink(self):
370 return util.checklink(self._root)
368 return util.checklink(self._root)
371
369
372 @propertycache
370 @propertycache
373 def _checkexec(self):
371 def _checkexec(self):
374 return bool(util.checkexec(self._root))
372 return bool(util.checkexec(self._root))
375
373
376 @propertycache
374 @propertycache
377 def _checkcase(self):
375 def _checkcase(self):
378 return not util.fscasesensitive(self._join(b'.hg'))
376 return not util.fscasesensitive(self._join(b'.hg'))
379
377
380 def _join(self, f):
378 def _join(self, f):
381 # much faster than os.path.join()
379 # much faster than os.path.join()
382 # it's safe because f is always a relative path
380 # it's safe because f is always a relative path
383 return self._rootdir + f
381 return self._rootdir + f
384
382
385 def flagfunc(self, buildfallback):
383 def flagfunc(self, buildfallback):
386 """build a callable that returns flags associated with a filename
384 """build a callable that returns flags associated with a filename
387
385
388 The information is extracted from three possible layers:
386 The information is extracted from three possible layers:
389 1. the file system if it supports the information
387 1. the file system if it supports the information
390 2. the "fallback" information stored in the dirstate if any
388 2. the "fallback" information stored in the dirstate if any
391 3. a more expensive mechanism inferring the flags from the parents.
389 3. a more expensive mechanism inferring the flags from the parents.
392 """
390 """
393
391
394 # small hack to cache the result of buildfallback()
392 # small hack to cache the result of buildfallback()
395 fallback_func = []
393 fallback_func = []
396
394
397 def get_flags(x):
395 def get_flags(x):
398 entry = None
396 entry = None
399 fallback_value = None
397 fallback_value = None
400 try:
398 try:
401 st = os.lstat(self._join(x))
399 st = os.lstat(self._join(x))
402 except OSError:
400 except OSError:
403 return b''
401 return b''
404
402
405 if self._checklink:
403 if self._checklink:
406 if util.statislink(st):
404 if util.statislink(st):
407 return b'l'
405 return b'l'
408 else:
406 else:
409 entry = self.get_entry(x)
407 entry = self.get_entry(x)
410 if entry.has_fallback_symlink:
408 if entry.has_fallback_symlink:
411 if entry.fallback_symlink:
409 if entry.fallback_symlink:
412 return b'l'
410 return b'l'
413 else:
411 else:
414 if not fallback_func:
412 if not fallback_func:
415 fallback_func.append(buildfallback())
413 fallback_func.append(buildfallback())
416 fallback_value = fallback_func[0](x)
414 fallback_value = fallback_func[0](x)
417 if b'l' in fallback_value:
415 if b'l' in fallback_value:
418 return b'l'
416 return b'l'
419
417
420 if self._checkexec:
418 if self._checkexec:
421 if util.statisexec(st):
419 if util.statisexec(st):
422 return b'x'
420 return b'x'
423 else:
421 else:
424 if entry is None:
422 if entry is None:
425 entry = self.get_entry(x)
423 entry = self.get_entry(x)
426 if entry.has_fallback_exec:
424 if entry.has_fallback_exec:
427 if entry.fallback_exec:
425 if entry.fallback_exec:
428 return b'x'
426 return b'x'
429 else:
427 else:
430 if fallback_value is None:
428 if fallback_value is None:
431 if not fallback_func:
429 if not fallback_func:
432 fallback_func.append(buildfallback())
430 fallback_func.append(buildfallback())
433 fallback_value = fallback_func[0](x)
431 fallback_value = fallback_func[0](x)
434 if b'x' in fallback_value:
432 if b'x' in fallback_value:
435 return b'x'
433 return b'x'
436 return b''
434 return b''
437
435
438 return get_flags
436 return get_flags
439
437
440 @propertycache
438 @propertycache
441 def _cwd(self):
439 def _cwd(self):
442 # internal config: ui.forcecwd
440 # internal config: ui.forcecwd
443 forcecwd = self._ui.config(b'ui', b'forcecwd')
441 forcecwd = self._ui.config(b'ui', b'forcecwd')
444 if forcecwd:
442 if forcecwd:
445 return forcecwd
443 return forcecwd
446 return encoding.getcwd()
444 return encoding.getcwd()
447
445
448 def getcwd(self):
446 def getcwd(self):
449 """Return the path from which a canonical path is calculated.
447 """Return the path from which a canonical path is calculated.
450
448
451 This path should be used to resolve file patterns or to convert
449 This path should be used to resolve file patterns or to convert
452 canonical paths back to file paths for display. It shouldn't be
450 canonical paths back to file paths for display. It shouldn't be
453 used to get real file paths. Use vfs functions instead.
451 used to get real file paths. Use vfs functions instead.
454 """
452 """
455 cwd = self._cwd
453 cwd = self._cwd
456 if cwd == self._root:
454 if cwd == self._root:
457 return b''
455 return b''
458 # self._root ends with a path separator if self._root is '/' or 'C:\'
456 # self._root ends with a path separator if self._root is '/' or 'C:\'
459 rootsep = self._root
457 rootsep = self._root
460 if not util.endswithsep(rootsep):
458 if not util.endswithsep(rootsep):
461 rootsep += pycompat.ossep
459 rootsep += pycompat.ossep
462 if cwd.startswith(rootsep):
460 if cwd.startswith(rootsep):
463 return cwd[len(rootsep) :]
461 return cwd[len(rootsep) :]
464 else:
462 else:
465 # we're outside the repo. return an absolute path.
463 # we're outside the repo. return an absolute path.
466 return cwd
464 return cwd
467
465
468 def pathto(self, f, cwd=None):
466 def pathto(self, f, cwd=None):
469 if cwd is None:
467 if cwd is None:
470 cwd = self.getcwd()
468 cwd = self.getcwd()
471 path = util.pathto(self._root, cwd, f)
469 path = util.pathto(self._root, cwd, f)
472 if self._slash:
470 if self._slash:
473 return util.pconvert(path)
471 return util.pconvert(path)
474 return path
472 return path
475
473
476 def get_entry(self, path):
474 def get_entry(self, path):
477 """return a DirstateItem for the associated path"""
475 """return a DirstateItem for the associated path"""
478 entry = self._map.get(path)
476 entry = self._map.get(path)
479 if entry is None:
477 if entry is None:
480 return DirstateItem()
478 return DirstateItem()
481 return entry
479 return entry
482
480
483 def __contains__(self, key):
481 def __contains__(self, key):
484 return key in self._map
482 return key in self._map
485
483
486 def __iter__(self):
484 def __iter__(self):
487 return iter(sorted(self._map))
485 return iter(sorted(self._map))
488
486
489 def items(self):
487 def items(self):
490 return self._map.items()
488 return self._map.items()
491
489
492 iteritems = items
490 iteritems = items
493
491
494 def parents(self):
492 def parents(self):
495 return [self._validate(p) for p in self._pl]
493 return [self._validate(p) for p in self._pl]
496
494
497 def p1(self):
495 def p1(self):
498 return self._validate(self._pl[0])
496 return self._validate(self._pl[0])
499
497
500 def p2(self):
498 def p2(self):
501 return self._validate(self._pl[1])
499 return self._validate(self._pl[1])
502
500
503 @property
501 @property
504 def in_merge(self):
502 def in_merge(self):
505 """True if a merge is in progress"""
503 """True if a merge is in progress"""
506 return self._pl[1] != self._nodeconstants.nullid
504 return self._pl[1] != self._nodeconstants.nullid
507
505
508 def branch(self):
506 def branch(self):
509 return encoding.tolocal(self._branch)
507 return encoding.tolocal(self._branch)
510
508
511 @requires_changing_parents
509 @requires_changing_parents
512 def setparents(self, p1, p2=None):
510 def setparents(self, p1, p2=None):
513 """Set dirstate parents to p1 and p2.
511 """Set dirstate parents to p1 and p2.
514
512
515 When moving from two parents to one, "merged" entries a
513 When moving from two parents to one, "merged" entries a
516 adjusted to normal and previous copy records discarded and
514 adjusted to normal and previous copy records discarded and
517 returned by the call.
515 returned by the call.
518
516
519 See localrepo.setparents()
517 See localrepo.setparents()
520 """
518 """
521 if p2 is None:
519 if p2 is None:
522 p2 = self._nodeconstants.nullid
520 p2 = self._nodeconstants.nullid
523 if self._changing_level == 0:
521 if self._changing_level == 0:
524 raise ValueError(
522 raise ValueError(
525 b"cannot set dirstate parent outside of "
523 b"cannot set dirstate parent outside of "
526 b"dirstate.changing_parents context manager"
524 b"dirstate.changing_parents context manager"
527 )
525 )
528
526
529 self._dirty = True
527 self._dirty = True
530 oldp2 = self._pl[1]
528 oldp2 = self._pl[1]
531 if self._origpl is None:
529 if self._origpl is None:
532 self._origpl = self._pl
530 self._origpl = self._pl
533 nullid = self._nodeconstants.nullid
531 nullid = self._nodeconstants.nullid
534 # True if we need to fold p2 related state back to a linear case
532 # True if we need to fold p2 related state back to a linear case
535 fold_p2 = oldp2 != nullid and p2 == nullid
533 fold_p2 = oldp2 != nullid and p2 == nullid
536 return self._map.setparents(p1, p2, fold_p2=fold_p2)
534 return self._map.setparents(p1, p2, fold_p2=fold_p2)
537
535
538 def setbranch(self, branch):
536 def setbranch(self, branch):
539 self.__class__._branch.set(self, encoding.fromlocal(branch))
537 self.__class__._branch.set(self, encoding.fromlocal(branch))
540 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
538 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
541 try:
539 try:
542 f.write(self._branch + b'\n')
540 f.write(self._branch + b'\n')
543 f.close()
541 f.close()
544
542
545 # make sure filecache has the correct stat info for _branch after
543 # make sure filecache has the correct stat info for _branch after
546 # replacing the underlying file
544 # replacing the underlying file
547 ce = self._filecache[b'_branch']
545 ce = self._filecache[b'_branch']
548 if ce:
546 if ce:
549 ce.refresh()
547 ce.refresh()
550 except: # re-raises
548 except: # re-raises
551 f.discard()
549 f.discard()
552 raise
550 raise
553
551
554 def invalidate(self):
552 def invalidate(self):
555 """Causes the next access to reread the dirstate.
553 """Causes the next access to reread the dirstate.
556
554
557 This is different from localrepo.invalidatedirstate() because it always
555 This is different from localrepo.invalidatedirstate() because it always
558 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
556 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
559 check whether the dirstate has changed before rereading it."""
557 check whether the dirstate has changed before rereading it."""
560
558
561 for a in ("_map", "_branch", "_ignore"):
559 for a in ("_map", "_branch", "_ignore"):
562 if a in self.__dict__:
560 if a in self.__dict__:
563 delattr(self, a)
561 delattr(self, a)
564 self._dirty = False
562 self._dirty = False
565 self._dirty_tracked_set = False
563 self._dirty_tracked_set = False
566 self._invalidated_context = self._changing_level > 0
564 self._invalidated_context = self._changing_level > 0
567 self._origpl = None
565 self._origpl = None
568
566
569 @requires_changing_any
567 @requires_changing_any
570 def copy(self, source, dest):
568 def copy(self, source, dest):
571 """Mark dest as a copy of source. Unmark dest if source is None."""
569 """Mark dest as a copy of source. Unmark dest if source is None."""
572 if source == dest:
570 if source == dest:
573 return
571 return
574 self._dirty = True
572 self._dirty = True
575 if source is not None:
573 if source is not None:
576 self._check_sparse(source)
574 self._check_sparse(source)
577 self._map.copymap[dest] = source
575 self._map.copymap[dest] = source
578 else:
576 else:
579 self._map.copymap.pop(dest, None)
577 self._map.copymap.pop(dest, None)
580
578
581 def copied(self, file):
579 def copied(self, file):
582 return self._map.copymap.get(file, None)
580 return self._map.copymap.get(file, None)
583
581
584 def copies(self):
582 def copies(self):
585 return self._map.copymap
583 return self._map.copymap
586
584
587 @requires_changing_files
585 @requires_changing_files
588 def set_tracked(self, filename, reset_copy=False):
586 def set_tracked(self, filename, reset_copy=False):
589 """a "public" method for generic code to mark a file as tracked
587 """a "public" method for generic code to mark a file as tracked
590
588
591 This function is to be called outside of "update/merge" case. For
589 This function is to be called outside of "update/merge" case. For
592 example by a command like `hg add X`.
590 example by a command like `hg add X`.
593
591
594 if reset_copy is set, any existing copy information will be dropped.
592 if reset_copy is set, any existing copy information will be dropped.
595
593
596 return True the file was previously untracked, False otherwise.
594 return True the file was previously untracked, False otherwise.
597 """
595 """
598 self._dirty = True
596 self._dirty = True
599 entry = self._map.get(filename)
597 entry = self._map.get(filename)
600 if entry is None or not entry.tracked:
598 if entry is None or not entry.tracked:
601 self._check_new_tracked_filename(filename)
599 self._check_new_tracked_filename(filename)
602 pre_tracked = self._map.set_tracked(filename)
600 pre_tracked = self._map.set_tracked(filename)
603 if reset_copy:
601 if reset_copy:
604 self._map.copymap.pop(filename, None)
602 self._map.copymap.pop(filename, None)
605 if pre_tracked:
603 if pre_tracked:
606 self._dirty_tracked_set = True
604 self._dirty_tracked_set = True
607 return pre_tracked
605 return pre_tracked
608
606
609 @requires_changing_files
607 @requires_changing_files
610 def set_untracked(self, filename):
608 def set_untracked(self, filename):
611 """a "public" method for generic code to mark a file as untracked
609 """a "public" method for generic code to mark a file as untracked
612
610
613 This function is to be called outside of "update/merge" case. For
611 This function is to be called outside of "update/merge" case. For
614 example by a command like `hg remove X`.
612 example by a command like `hg remove X`.
615
613
616 return True the file was previously tracked, False otherwise.
614 return True the file was previously tracked, False otherwise.
617 """
615 """
618 ret = self._map.set_untracked(filename)
616 ret = self._map.set_untracked(filename)
619 if ret:
617 if ret:
620 self._dirty = True
618 self._dirty = True
621 self._dirty_tracked_set = True
619 self._dirty_tracked_set = True
622 return ret
620 return ret
623
621
624 @requires_not_changing_parents
622 @requires_not_changing_parents
625 def set_clean(self, filename, parentfiledata):
623 def set_clean(self, filename, parentfiledata):
626 """record that the current state of the file on disk is known to be clean"""
624 """record that the current state of the file on disk is known to be clean"""
627 self._dirty = True
625 self._dirty = True
628 if not self._map[filename].tracked:
626 if not self._map[filename].tracked:
629 self._check_new_tracked_filename(filename)
627 self._check_new_tracked_filename(filename)
630 (mode, size, mtime) = parentfiledata
628 (mode, size, mtime) = parentfiledata
631 self._map.set_clean(filename, mode, size, mtime)
629 self._map.set_clean(filename, mode, size, mtime)
632
630
633 @requires_not_changing_parents
631 @requires_not_changing_parents
634 def set_possibly_dirty(self, filename):
632 def set_possibly_dirty(self, filename):
635 """record that the current state of the file on disk is unknown"""
633 """record that the current state of the file on disk is unknown"""
636 self._dirty = True
634 self._dirty = True
637 self._map.set_possibly_dirty(filename)
635 self._map.set_possibly_dirty(filename)
638
636
639 @requires_changing_parents
637 @requires_changing_parents
640 def update_file_p1(
638 def update_file_p1(
641 self,
639 self,
642 filename,
640 filename,
643 p1_tracked,
641 p1_tracked,
644 ):
642 ):
645 """Set a file as tracked in the parent (or not)
643 """Set a file as tracked in the parent (or not)
646
644
647 This is to be called when adjust the dirstate to a new parent after an history
645 This is to be called when adjust the dirstate to a new parent after an history
648 rewriting operation.
646 rewriting operation.
649
647
650 It should not be called during a merge (p2 != nullid) and only within
648 It should not be called during a merge (p2 != nullid) and only within
651 a `with dirstate.changing_parents(repo):` context.
649 a `with dirstate.changing_parents(repo):` context.
652 """
650 """
653 if self.in_merge:
651 if self.in_merge:
654 msg = b'update_file_reference should not be called when merging'
652 msg = b'update_file_reference should not be called when merging'
655 raise error.ProgrammingError(msg)
653 raise error.ProgrammingError(msg)
656 entry = self._map.get(filename)
654 entry = self._map.get(filename)
657 if entry is None:
655 if entry is None:
658 wc_tracked = False
656 wc_tracked = False
659 else:
657 else:
660 wc_tracked = entry.tracked
658 wc_tracked = entry.tracked
661 if not (p1_tracked or wc_tracked):
659 if not (p1_tracked or wc_tracked):
662 # the file is no longer relevant to anyone
660 # the file is no longer relevant to anyone
663 if self._map.get(filename) is not None:
661 if self._map.get(filename) is not None:
664 self._map.reset_state(filename)
662 self._map.reset_state(filename)
665 self._dirty = True
663 self._dirty = True
666 elif (not p1_tracked) and wc_tracked:
664 elif (not p1_tracked) and wc_tracked:
667 if entry is not None and entry.added:
665 if entry is not None and entry.added:
668 return # avoid dropping copy information (maybe?)
666 return # avoid dropping copy information (maybe?)
669
667
670 self._map.reset_state(
668 self._map.reset_state(
671 filename,
669 filename,
672 wc_tracked,
670 wc_tracked,
673 p1_tracked,
671 p1_tracked,
674 # the underlying reference might have changed, we will have to
672 # the underlying reference might have changed, we will have to
675 # check it.
673 # check it.
676 has_meaningful_mtime=False,
674 has_meaningful_mtime=False,
677 )
675 )
678
676
679 @requires_changing_parents
677 @requires_changing_parents
680 def update_file(
678 def update_file(
681 self,
679 self,
682 filename,
680 filename,
683 wc_tracked,
681 wc_tracked,
684 p1_tracked,
682 p1_tracked,
685 p2_info=False,
683 p2_info=False,
686 possibly_dirty=False,
684 possibly_dirty=False,
687 parentfiledata=None,
685 parentfiledata=None,
688 ):
686 ):
689 """update the information about a file in the dirstate
687 """update the information about a file in the dirstate
690
688
691 This is to be called when the direstates parent changes to keep track
689 This is to be called when the direstates parent changes to keep track
692 of what is the file situation in regards to the working copy and its parent.
690 of what is the file situation in regards to the working copy and its parent.
693
691
694 This function must be called within a `dirstate.changing_parents` context.
692 This function must be called within a `dirstate.changing_parents` context.
695
693
696 note: the API is at an early stage and we might need to adjust it
694 note: the API is at an early stage and we might need to adjust it
697 depending of what information ends up being relevant and useful to
695 depending of what information ends up being relevant and useful to
698 other processing.
696 other processing.
699 """
697 """
700 self._update_file(
698 self._update_file(
701 filename=filename,
699 filename=filename,
702 wc_tracked=wc_tracked,
700 wc_tracked=wc_tracked,
703 p1_tracked=p1_tracked,
701 p1_tracked=p1_tracked,
704 p2_info=p2_info,
702 p2_info=p2_info,
705 possibly_dirty=possibly_dirty,
703 possibly_dirty=possibly_dirty,
706 parentfiledata=parentfiledata,
704 parentfiledata=parentfiledata,
707 )
705 )
708
706
709 # XXX since this make the dirstate dirty, we should enforce that it is done
707 # XXX since this make the dirstate dirty, we should enforce that it is done
710 # withing an appropriate change-context that scope the change and ensure it
708 # withing an appropriate change-context that scope the change and ensure it
711 # eventually get written on disk (or rolled back)
709 # eventually get written on disk (or rolled back)
712 def hacky_extension_update_file(self, *args, **kwargs):
710 def hacky_extension_update_file(self, *args, **kwargs):
713 """NEVER USE THIS, YOU DO NOT NEED IT
711 """NEVER USE THIS, YOU DO NOT NEED IT
714
712
715 This function is a variant of "update_file" to be called by a small set
713 This function is a variant of "update_file" to be called by a small set
716 of extensions, it also adjust the internal state of file, but can be
714 of extensions, it also adjust the internal state of file, but can be
717 called outside an `changing_parents` context.
715 called outside an `changing_parents` context.
718
716
719 A very small number of extension meddle with the working copy content
717 A very small number of extension meddle with the working copy content
720 in a way that requires to adjust the dirstate accordingly. At the time
718 in a way that requires to adjust the dirstate accordingly. At the time
721 this command is written they are :
719 this command is written they are :
722 - keyword,
720 - keyword,
723 - largefile,
721 - largefile,
724 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
722 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
725
723
726 This function could probably be replaced by more semantic one (like
724 This function could probably be replaced by more semantic one (like
727 "adjust expected size" or "always revalidate file content", etc)
725 "adjust expected size" or "always revalidate file content", etc)
728 however at the time where this is writen, this is too much of a detour
726 however at the time where this is writen, this is too much of a detour
729 to be considered.
727 to be considered.
730 """
728 """
731 self._update_file(
729 self._update_file(
732 *args,
730 *args,
733 **kwargs,
731 **kwargs,
734 )
732 )
735
733
736 def _update_file(
734 def _update_file(
737 self,
735 self,
738 filename,
736 filename,
739 wc_tracked,
737 wc_tracked,
740 p1_tracked,
738 p1_tracked,
741 p2_info=False,
739 p2_info=False,
742 possibly_dirty=False,
740 possibly_dirty=False,
743 parentfiledata=None,
741 parentfiledata=None,
744 ):
742 ):
745
743
746 # note: I do not think we need to double check name clash here since we
744 # note: I do not think we need to double check name clash here since we
747 # are in a update/merge case that should already have taken care of
745 # are in a update/merge case that should already have taken care of
748 # this. The test agrees
746 # this. The test agrees
749
747
750 self._dirty = True
748 self._dirty = True
751 old_entry = self._map.get(filename)
749 old_entry = self._map.get(filename)
752 if old_entry is None:
750 if old_entry is None:
753 prev_tracked = False
751 prev_tracked = False
754 else:
752 else:
755 prev_tracked = old_entry.tracked
753 prev_tracked = old_entry.tracked
756 if prev_tracked != wc_tracked:
754 if prev_tracked != wc_tracked:
757 self._dirty_tracked_set = True
755 self._dirty_tracked_set = True
758
756
759 self._map.reset_state(
757 self._map.reset_state(
760 filename,
758 filename,
761 wc_tracked,
759 wc_tracked,
762 p1_tracked,
760 p1_tracked,
763 p2_info=p2_info,
761 p2_info=p2_info,
764 has_meaningful_mtime=not possibly_dirty,
762 has_meaningful_mtime=not possibly_dirty,
765 parentfiledata=parentfiledata,
763 parentfiledata=parentfiledata,
766 )
764 )
767
765
768 def _check_new_tracked_filename(self, filename):
766 def _check_new_tracked_filename(self, filename):
769 scmutil.checkfilename(filename)
767 scmutil.checkfilename(filename)
770 if self._map.hastrackeddir(filename):
768 if self._map.hastrackeddir(filename):
771 msg = _(b'directory %r already in dirstate')
769 msg = _(b'directory %r already in dirstate')
772 msg %= pycompat.bytestr(filename)
770 msg %= pycompat.bytestr(filename)
773 raise error.Abort(msg)
771 raise error.Abort(msg)
774 # shadows
772 # shadows
775 for d in pathutil.finddirs(filename):
773 for d in pathutil.finddirs(filename):
776 if self._map.hastrackeddir(d):
774 if self._map.hastrackeddir(d):
777 break
775 break
778 entry = self._map.get(d)
776 entry = self._map.get(d)
779 if entry is not None and not entry.removed:
777 if entry is not None and not entry.removed:
780 msg = _(b'file %r in dirstate clashes with %r')
778 msg = _(b'file %r in dirstate clashes with %r')
781 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
779 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
782 raise error.Abort(msg)
780 raise error.Abort(msg)
783 self._check_sparse(filename)
781 self._check_sparse(filename)
784
782
785 def _check_sparse(self, filename):
783 def _check_sparse(self, filename):
786 """Check that a filename is inside the sparse profile"""
784 """Check that a filename is inside the sparse profile"""
787 sparsematch = self._sparsematcher
785 sparsematch = self._sparsematcher
788 if sparsematch is not None and not sparsematch.always():
786 if sparsematch is not None and not sparsematch.always():
789 if not sparsematch(filename):
787 if not sparsematch(filename):
790 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
788 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
791 hint = _(
789 hint = _(
792 b'include file with `hg debugsparse --include <pattern>` or use '
790 b'include file with `hg debugsparse --include <pattern>` or use '
793 b'`hg add -s <file>` to include file directory while adding'
791 b'`hg add -s <file>` to include file directory while adding'
794 )
792 )
795 raise error.Abort(msg % filename, hint=hint)
793 raise error.Abort(msg % filename, hint=hint)
796
794
797 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
795 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
798 if exists is None:
796 if exists is None:
799 exists = os.path.lexists(os.path.join(self._root, path))
797 exists = os.path.lexists(os.path.join(self._root, path))
800 if not exists:
798 if not exists:
801 # Maybe a path component exists
799 # Maybe a path component exists
802 if not ignoremissing and b'/' in path:
800 if not ignoremissing and b'/' in path:
803 d, f = path.rsplit(b'/', 1)
801 d, f = path.rsplit(b'/', 1)
804 d = self._normalize(d, False, ignoremissing, None)
802 d = self._normalize(d, False, ignoremissing, None)
805 folded = d + b"/" + f
803 folded = d + b"/" + f
806 else:
804 else:
807 # No path components, preserve original case
805 # No path components, preserve original case
808 folded = path
806 folded = path
809 else:
807 else:
810 # recursively normalize leading directory components
808 # recursively normalize leading directory components
811 # against dirstate
809 # against dirstate
812 if b'/' in normed:
810 if b'/' in normed:
813 d, f = normed.rsplit(b'/', 1)
811 d, f = normed.rsplit(b'/', 1)
814 d = self._normalize(d, False, ignoremissing, True)
812 d = self._normalize(d, False, ignoremissing, True)
815 r = self._root + b"/" + d
813 r = self._root + b"/" + d
816 folded = d + b"/" + util.fspath(f, r)
814 folded = d + b"/" + util.fspath(f, r)
817 else:
815 else:
818 folded = util.fspath(normed, self._root)
816 folded = util.fspath(normed, self._root)
819 storemap[normed] = folded
817 storemap[normed] = folded
820
818
821 return folded
819 return folded
822
820
823 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
821 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
824 normed = util.normcase(path)
822 normed = util.normcase(path)
825 folded = self._map.filefoldmap.get(normed, None)
823 folded = self._map.filefoldmap.get(normed, None)
826 if folded is None:
824 if folded is None:
827 if isknown:
825 if isknown:
828 folded = path
826 folded = path
829 else:
827 else:
830 folded = self._discoverpath(
828 folded = self._discoverpath(
831 path, normed, ignoremissing, exists, self._map.filefoldmap
829 path, normed, ignoremissing, exists, self._map.filefoldmap
832 )
830 )
833 return folded
831 return folded
834
832
835 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
833 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
836 normed = util.normcase(path)
834 normed = util.normcase(path)
837 folded = self._map.filefoldmap.get(normed, None)
835 folded = self._map.filefoldmap.get(normed, None)
838 if folded is None:
836 if folded is None:
839 folded = self._map.dirfoldmap.get(normed, None)
837 folded = self._map.dirfoldmap.get(normed, None)
840 if folded is None:
838 if folded is None:
841 if isknown:
839 if isknown:
842 folded = path
840 folded = path
843 else:
841 else:
844 # store discovered result in dirfoldmap so that future
842 # store discovered result in dirfoldmap so that future
845 # normalizefile calls don't start matching directories
843 # normalizefile calls don't start matching directories
846 folded = self._discoverpath(
844 folded = self._discoverpath(
847 path, normed, ignoremissing, exists, self._map.dirfoldmap
845 path, normed, ignoremissing, exists, self._map.dirfoldmap
848 )
846 )
849 return folded
847 return folded
850
848
851 def normalize(self, path, isknown=False, ignoremissing=False):
849 def normalize(self, path, isknown=False, ignoremissing=False):
852 """
850 """
853 normalize the case of a pathname when on a casefolding filesystem
851 normalize the case of a pathname when on a casefolding filesystem
854
852
855 isknown specifies whether the filename came from walking the
853 isknown specifies whether the filename came from walking the
856 disk, to avoid extra filesystem access.
854 disk, to avoid extra filesystem access.
857
855
858 If ignoremissing is True, missing path are returned
856 If ignoremissing is True, missing path are returned
859 unchanged. Otherwise, we try harder to normalize possibly
857 unchanged. Otherwise, we try harder to normalize possibly
860 existing path components.
858 existing path components.
861
859
862 The normalized case is determined based on the following precedence:
860 The normalized case is determined based on the following precedence:
863
861
864 - version of name already stored in the dirstate
862 - version of name already stored in the dirstate
865 - version of name stored on disk
863 - version of name stored on disk
866 - version provided via command arguments
864 - version provided via command arguments
867 """
865 """
868
866
869 if self._checkcase:
867 if self._checkcase:
870 return self._normalize(path, isknown, ignoremissing)
868 return self._normalize(path, isknown, ignoremissing)
871 return path
869 return path
872
870
873 # XXX this method is barely used, as a result:
871 # XXX this method is barely used, as a result:
874 # - its semantic is unclear
872 # - its semantic is unclear
875 # - do we really needs it ?
873 # - do we really needs it ?
876 @requires_changing_parents
874 @requires_changing_parents
877 def clear(self):
875 def clear(self):
878 self._map.clear()
876 self._map.clear()
879 self._dirty = True
877 self._dirty = True
880
878
881 @requires_changing_parents
879 @requires_changing_parents
882 def rebuild(self, parent, allfiles, changedfiles=None):
880 def rebuild(self, parent, allfiles, changedfiles=None):
883 matcher = self._sparsematcher
881 matcher = self._sparsematcher
884 if matcher is not None and not matcher.always():
882 if matcher is not None and not matcher.always():
885 # should not add non-matching files
883 # should not add non-matching files
886 allfiles = [f for f in allfiles if matcher(f)]
884 allfiles = [f for f in allfiles if matcher(f)]
887 if changedfiles:
885 if changedfiles:
888 changedfiles = [f for f in changedfiles if matcher(f)]
886 changedfiles = [f for f in changedfiles if matcher(f)]
889
887
890 if changedfiles is not None:
888 if changedfiles is not None:
891 # these files will be deleted from the dirstate when they are
889 # these files will be deleted from the dirstate when they are
892 # not found to be in allfiles
890 # not found to be in allfiles
893 dirstatefilestoremove = {f for f in self if not matcher(f)}
891 dirstatefilestoremove = {f for f in self if not matcher(f)}
894 changedfiles = dirstatefilestoremove.union(changedfiles)
892 changedfiles = dirstatefilestoremove.union(changedfiles)
895
893
896 if changedfiles is None:
894 if changedfiles is None:
897 # Rebuild entire dirstate
895 # Rebuild entire dirstate
898 to_lookup = allfiles
896 to_lookup = allfiles
899 to_drop = []
897 to_drop = []
900 self.clear()
898 self.clear()
901 elif len(changedfiles) < 10:
899 elif len(changedfiles) < 10:
902 # Avoid turning allfiles into a set, which can be expensive if it's
900 # Avoid turning allfiles into a set, which can be expensive if it's
903 # large.
901 # large.
904 to_lookup = []
902 to_lookup = []
905 to_drop = []
903 to_drop = []
906 for f in changedfiles:
904 for f in changedfiles:
907 if f in allfiles:
905 if f in allfiles:
908 to_lookup.append(f)
906 to_lookup.append(f)
909 else:
907 else:
910 to_drop.append(f)
908 to_drop.append(f)
911 else:
909 else:
912 changedfilesset = set(changedfiles)
910 changedfilesset = set(changedfiles)
913 to_lookup = changedfilesset & set(allfiles)
911 to_lookup = changedfilesset & set(allfiles)
914 to_drop = changedfilesset - to_lookup
912 to_drop = changedfilesset - to_lookup
915
913
916 if self._origpl is None:
914 if self._origpl is None:
917 self._origpl = self._pl
915 self._origpl = self._pl
918 self._map.setparents(parent, self._nodeconstants.nullid)
916 self._map.setparents(parent, self._nodeconstants.nullid)
919
917
920 for f in to_lookup:
918 for f in to_lookup:
921 if self.in_merge:
919 if self.in_merge:
922 self.set_tracked(f)
920 self.set_tracked(f)
923 else:
921 else:
924 self._map.reset_state(
922 self._map.reset_state(
925 f,
923 f,
926 wc_tracked=True,
924 wc_tracked=True,
927 p1_tracked=True,
925 p1_tracked=True,
928 )
926 )
929 for f in to_drop:
927 for f in to_drop:
930 self._map.reset_state(f)
928 self._map.reset_state(f)
931
929
932 self._dirty = True
930 self._dirty = True
933
931
934 def identity(self):
932 def identity(self):
935 """Return identity of dirstate itself to detect changing in storage
933 """Return identity of dirstate itself to detect changing in storage
936
934
937 If identity of previous dirstate is equal to this, writing
935 If identity of previous dirstate is equal to this, writing
938 changes based on the former dirstate out can keep consistency.
936 changes based on the former dirstate out can keep consistency.
939 """
937 """
940 return self._map.identity
938 return self._map.identity
941
939
942 def write(self, tr):
940 def write(self, tr):
943 if not self._dirty:
941 if not self._dirty:
944 return
942 return
945
943
946 write_key = self._use_tracked_hint and self._dirty_tracked_set
944 write_key = self._use_tracked_hint and self._dirty_tracked_set
947 if tr:
945 if tr:
948 # make sure we invalidate the current change on abort
946 # make sure we invalidate the current change on abort
949 if tr is not None:
947 if tr is not None:
950 tr.addabort(
948 tr.addabort(
951 b'dirstate-invalidate',
949 b'dirstate-invalidate',
952 lambda tr: self.invalidate(),
950 lambda tr: self.invalidate(),
953 )
951 )
954 # delay writing in-memory changes out
952 # delay writing in-memory changes out
955 tr.addfilegenerator(
953 tr.addfilegenerator(
956 b'dirstate-1-main',
954 b'dirstate-1-main',
957 (self._filename,),
955 (self._filename,),
958 lambda f: self._writedirstate(tr, f),
956 lambda f: self._writedirstate(tr, f),
959 location=b'plain',
957 location=b'plain',
960 post_finalize=True,
958 post_finalize=True,
961 )
959 )
962 if write_key:
960 if write_key:
963 tr.addfilegenerator(
961 tr.addfilegenerator(
964 b'dirstate-2-key-post',
962 b'dirstate-2-key-post',
965 (self._filename_th,),
963 (self._filename_th,),
966 lambda f: self._write_tracked_hint(tr, f),
964 lambda f: self._write_tracked_hint(tr, f),
967 location=b'plain',
965 location=b'plain',
968 post_finalize=True,
966 post_finalize=True,
969 )
967 )
970 return
968 return
971
969
972 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
970 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
973 with file(self._filename) as f:
971 with file(self._filename) as f:
974 self._writedirstate(tr, f)
972 self._writedirstate(tr, f)
975 if write_key:
973 if write_key:
976 # we update the key-file after writing to make sure reader have a
974 # we update the key-file after writing to make sure reader have a
977 # key that match the newly written content
975 # key that match the newly written content
978 with file(self._filename_th) as f:
976 with file(self._filename_th) as f:
979 self._write_tracked_hint(tr, f)
977 self._write_tracked_hint(tr, f)
980
978
981 def delete_tracked_hint(self):
979 def delete_tracked_hint(self):
982 """remove the tracked_hint file
980 """remove the tracked_hint file
983
981
984 To be used by format downgrades operation"""
982 To be used by format downgrades operation"""
985 self._opener.unlink(self._filename_th)
983 self._opener.unlink(self._filename_th)
986 self._use_tracked_hint = False
984 self._use_tracked_hint = False
987
985
988 def addparentchangecallback(self, category, callback):
986 def addparentchangecallback(self, category, callback):
989 """add a callback to be called when the wd parents are changed
987 """add a callback to be called when the wd parents are changed
990
988
991 Callback will be called with the following arguments:
989 Callback will be called with the following arguments:
992 dirstate, (oldp1, oldp2), (newp1, newp2)
990 dirstate, (oldp1, oldp2), (newp1, newp2)
993
991
994 Category is a unique identifier to allow overwriting an old callback
992 Category is a unique identifier to allow overwriting an old callback
995 with a newer callback.
993 with a newer callback.
996 """
994 """
997 self._plchangecallbacks[category] = callback
995 self._plchangecallbacks[category] = callback
998
996
999 def _writedirstate(self, tr, st):
997 def _writedirstate(self, tr, st):
1000 # notify callbacks about parents change
998 # notify callbacks about parents change
1001 if self._origpl is not None and self._origpl != self._pl:
999 if self._origpl is not None and self._origpl != self._pl:
1002 for c, callback in sorted(self._plchangecallbacks.items()):
1000 for c, callback in sorted(self._plchangecallbacks.items()):
1003 callback(self, self._origpl, self._pl)
1001 callback(self, self._origpl, self._pl)
1004 self._origpl = None
1002 self._origpl = None
1005 self._map.write(tr, st)
1003 self._map.write(tr, st)
1006 self._dirty = False
1004 self._dirty = False
1007 self._dirty_tracked_set = False
1005 self._dirty_tracked_set = False
1008
1006
1009 def _write_tracked_hint(self, tr, f):
1007 def _write_tracked_hint(self, tr, f):
1010 key = node.hex(uuid.uuid4().bytes)
1008 key = node.hex(uuid.uuid4().bytes)
1011 f.write(b"1\n%s\n" % key) # 1 is the format version
1009 f.write(b"1\n%s\n" % key) # 1 is the format version
1012
1010
1013 def _dirignore(self, f):
1011 def _dirignore(self, f):
1014 if self._ignore(f):
1012 if self._ignore(f):
1015 return True
1013 return True
1016 for p in pathutil.finddirs(f):
1014 for p in pathutil.finddirs(f):
1017 if self._ignore(p):
1015 if self._ignore(p):
1018 return True
1016 return True
1019 return False
1017 return False
1020
1018
1021 def _ignorefiles(self):
1019 def _ignorefiles(self):
1022 files = []
1020 files = []
1023 if os.path.exists(self._join(b'.hgignore')):
1021 if os.path.exists(self._join(b'.hgignore')):
1024 files.append(self._join(b'.hgignore'))
1022 files.append(self._join(b'.hgignore'))
1025 for name, path in self._ui.configitems(b"ui"):
1023 for name, path in self._ui.configitems(b"ui"):
1026 if name == b'ignore' or name.startswith(b'ignore.'):
1024 if name == b'ignore' or name.startswith(b'ignore.'):
1027 # we need to use os.path.join here rather than self._join
1025 # we need to use os.path.join here rather than self._join
1028 # because path is arbitrary and user-specified
1026 # because path is arbitrary and user-specified
1029 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1027 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1030 return files
1028 return files
1031
1029
1032 def _ignorefileandline(self, f):
1030 def _ignorefileandline(self, f):
1033 files = collections.deque(self._ignorefiles())
1031 files = collections.deque(self._ignorefiles())
1034 visited = set()
1032 visited = set()
1035 while files:
1033 while files:
1036 i = files.popleft()
1034 i = files.popleft()
1037 patterns = matchmod.readpatternfile(
1035 patterns = matchmod.readpatternfile(
1038 i, self._ui.warn, sourceinfo=True
1036 i, self._ui.warn, sourceinfo=True
1039 )
1037 )
1040 for pattern, lineno, line in patterns:
1038 for pattern, lineno, line in patterns:
1041 kind, p = matchmod._patsplit(pattern, b'glob')
1039 kind, p = matchmod._patsplit(pattern, b'glob')
1042 if kind == b"subinclude":
1040 if kind == b"subinclude":
1043 if p not in visited:
1041 if p not in visited:
1044 files.append(p)
1042 files.append(p)
1045 continue
1043 continue
1046 m = matchmod.match(
1044 m = matchmod.match(
1047 self._root, b'', [], [pattern], warn=self._ui.warn
1045 self._root, b'', [], [pattern], warn=self._ui.warn
1048 )
1046 )
1049 if m(f):
1047 if m(f):
1050 return (i, lineno, line)
1048 return (i, lineno, line)
1051 visited.add(i)
1049 visited.add(i)
1052 return (None, -1, b"")
1050 return (None, -1, b"")
1053
1051
1054 def _walkexplicit(self, match, subrepos):
1052 def _walkexplicit(self, match, subrepos):
1055 """Get stat data about the files explicitly specified by match.
1053 """Get stat data about the files explicitly specified by match.
1056
1054
1057 Return a triple (results, dirsfound, dirsnotfound).
1055 Return a triple (results, dirsfound, dirsnotfound).
1058 - results is a mapping from filename to stat result. It also contains
1056 - results is a mapping from filename to stat result. It also contains
1059 listings mapping subrepos and .hg to None.
1057 listings mapping subrepos and .hg to None.
1060 - dirsfound is a list of files found to be directories.
1058 - dirsfound is a list of files found to be directories.
1061 - dirsnotfound is a list of files that the dirstate thinks are
1059 - dirsnotfound is a list of files that the dirstate thinks are
1062 directories and that were not found."""
1060 directories and that were not found."""
1063
1061
1064 def badtype(mode):
1062 def badtype(mode):
1065 kind = _(b'unknown')
1063 kind = _(b'unknown')
1066 if stat.S_ISCHR(mode):
1064 if stat.S_ISCHR(mode):
1067 kind = _(b'character device')
1065 kind = _(b'character device')
1068 elif stat.S_ISBLK(mode):
1066 elif stat.S_ISBLK(mode):
1069 kind = _(b'block device')
1067 kind = _(b'block device')
1070 elif stat.S_ISFIFO(mode):
1068 elif stat.S_ISFIFO(mode):
1071 kind = _(b'fifo')
1069 kind = _(b'fifo')
1072 elif stat.S_ISSOCK(mode):
1070 elif stat.S_ISSOCK(mode):
1073 kind = _(b'socket')
1071 kind = _(b'socket')
1074 elif stat.S_ISDIR(mode):
1072 elif stat.S_ISDIR(mode):
1075 kind = _(b'directory')
1073 kind = _(b'directory')
1076 return _(b'unsupported file type (type is %s)') % kind
1074 return _(b'unsupported file type (type is %s)') % kind
1077
1075
1078 badfn = match.bad
1076 badfn = match.bad
1079 dmap = self._map
1077 dmap = self._map
1080 lstat = os.lstat
1078 lstat = os.lstat
1081 getkind = stat.S_IFMT
1079 getkind = stat.S_IFMT
1082 dirkind = stat.S_IFDIR
1080 dirkind = stat.S_IFDIR
1083 regkind = stat.S_IFREG
1081 regkind = stat.S_IFREG
1084 lnkkind = stat.S_IFLNK
1082 lnkkind = stat.S_IFLNK
1085 join = self._join
1083 join = self._join
1086 dirsfound = []
1084 dirsfound = []
1087 foundadd = dirsfound.append
1085 foundadd = dirsfound.append
1088 dirsnotfound = []
1086 dirsnotfound = []
1089 notfoundadd = dirsnotfound.append
1087 notfoundadd = dirsnotfound.append
1090
1088
1091 if not match.isexact() and self._checkcase:
1089 if not match.isexact() and self._checkcase:
1092 normalize = self._normalize
1090 normalize = self._normalize
1093 else:
1091 else:
1094 normalize = None
1092 normalize = None
1095
1093
1096 files = sorted(match.files())
1094 files = sorted(match.files())
1097 subrepos.sort()
1095 subrepos.sort()
1098 i, j = 0, 0
1096 i, j = 0, 0
1099 while i < len(files) and j < len(subrepos):
1097 while i < len(files) and j < len(subrepos):
1100 subpath = subrepos[j] + b"/"
1098 subpath = subrepos[j] + b"/"
1101 if files[i] < subpath:
1099 if files[i] < subpath:
1102 i += 1
1100 i += 1
1103 continue
1101 continue
1104 while i < len(files) and files[i].startswith(subpath):
1102 while i < len(files) and files[i].startswith(subpath):
1105 del files[i]
1103 del files[i]
1106 j += 1
1104 j += 1
1107
1105
1108 if not files or b'' in files:
1106 if not files or b'' in files:
1109 files = [b'']
1107 files = [b'']
1110 # constructing the foldmap is expensive, so don't do it for the
1108 # constructing the foldmap is expensive, so don't do it for the
1111 # common case where files is ['']
1109 # common case where files is ['']
1112 normalize = None
1110 normalize = None
1113 results = dict.fromkeys(subrepos)
1111 results = dict.fromkeys(subrepos)
1114 results[b'.hg'] = None
1112 results[b'.hg'] = None
1115
1113
1116 for ff in files:
1114 for ff in files:
1117 if normalize:
1115 if normalize:
1118 nf = normalize(ff, False, True)
1116 nf = normalize(ff, False, True)
1119 else:
1117 else:
1120 nf = ff
1118 nf = ff
1121 if nf in results:
1119 if nf in results:
1122 continue
1120 continue
1123
1121
1124 try:
1122 try:
1125 st = lstat(join(nf))
1123 st = lstat(join(nf))
1126 kind = getkind(st.st_mode)
1124 kind = getkind(st.st_mode)
1127 if kind == dirkind:
1125 if kind == dirkind:
1128 if nf in dmap:
1126 if nf in dmap:
1129 # file replaced by dir on disk but still in dirstate
1127 # file replaced by dir on disk but still in dirstate
1130 results[nf] = None
1128 results[nf] = None
1131 foundadd((nf, ff))
1129 foundadd((nf, ff))
1132 elif kind == regkind or kind == lnkkind:
1130 elif kind == regkind or kind == lnkkind:
1133 results[nf] = st
1131 results[nf] = st
1134 else:
1132 else:
1135 badfn(ff, badtype(kind))
1133 badfn(ff, badtype(kind))
1136 if nf in dmap:
1134 if nf in dmap:
1137 results[nf] = None
1135 results[nf] = None
1138 except (OSError) as inst:
1136 except (OSError) as inst:
1139 # nf not found on disk - it is dirstate only
1137 # nf not found on disk - it is dirstate only
1140 if nf in dmap: # does it exactly match a missing file?
1138 if nf in dmap: # does it exactly match a missing file?
1141 results[nf] = None
1139 results[nf] = None
1142 else: # does it match a missing directory?
1140 else: # does it match a missing directory?
1143 if self._map.hasdir(nf):
1141 if self._map.hasdir(nf):
1144 notfoundadd(nf)
1142 notfoundadd(nf)
1145 else:
1143 else:
1146 badfn(ff, encoding.strtolocal(inst.strerror))
1144 badfn(ff, encoding.strtolocal(inst.strerror))
1147
1145
1148 # match.files() may contain explicitly-specified paths that shouldn't
1146 # match.files() may contain explicitly-specified paths that shouldn't
1149 # be taken; drop them from the list of files found. dirsfound/notfound
1147 # be taken; drop them from the list of files found. dirsfound/notfound
1150 # aren't filtered here because they will be tested later.
1148 # aren't filtered here because they will be tested later.
1151 if match.anypats():
1149 if match.anypats():
1152 for f in list(results):
1150 for f in list(results):
1153 if f == b'.hg' or f in subrepos:
1151 if f == b'.hg' or f in subrepos:
1154 # keep sentinel to disable further out-of-repo walks
1152 # keep sentinel to disable further out-of-repo walks
1155 continue
1153 continue
1156 if not match(f):
1154 if not match(f):
1157 del results[f]
1155 del results[f]
1158
1156
1159 # Case insensitive filesystems cannot rely on lstat() failing to detect
1157 # Case insensitive filesystems cannot rely on lstat() failing to detect
1160 # a case-only rename. Prune the stat object for any file that does not
1158 # a case-only rename. Prune the stat object for any file that does not
1161 # match the case in the filesystem, if there are multiple files that
1159 # match the case in the filesystem, if there are multiple files that
1162 # normalize to the same path.
1160 # normalize to the same path.
1163 if match.isexact() and self._checkcase:
1161 if match.isexact() and self._checkcase:
1164 normed = {}
1162 normed = {}
1165
1163
1166 for f, st in results.items():
1164 for f, st in results.items():
1167 if st is None:
1165 if st is None:
1168 continue
1166 continue
1169
1167
1170 nc = util.normcase(f)
1168 nc = util.normcase(f)
1171 paths = normed.get(nc)
1169 paths = normed.get(nc)
1172
1170
1173 if paths is None:
1171 if paths is None:
1174 paths = set()
1172 paths = set()
1175 normed[nc] = paths
1173 normed[nc] = paths
1176
1174
1177 paths.add(f)
1175 paths.add(f)
1178
1176
1179 for norm, paths in normed.items():
1177 for norm, paths in normed.items():
1180 if len(paths) > 1:
1178 if len(paths) > 1:
1181 for path in paths:
1179 for path in paths:
1182 folded = self._discoverpath(
1180 folded = self._discoverpath(
1183 path, norm, True, None, self._map.dirfoldmap
1181 path, norm, True, None, self._map.dirfoldmap
1184 )
1182 )
1185 if path != folded:
1183 if path != folded:
1186 results[path] = None
1184 results[path] = None
1187
1185
1188 return results, dirsfound, dirsnotfound
1186 return results, dirsfound, dirsnotfound
1189
1187
1190 def walk(self, match, subrepos, unknown, ignored, full=True):
1188 def walk(self, match, subrepos, unknown, ignored, full=True):
1191 """
1189 """
1192 Walk recursively through the directory tree, finding all files
1190 Walk recursively through the directory tree, finding all files
1193 matched by match.
1191 matched by match.
1194
1192
1195 If full is False, maybe skip some known-clean files.
1193 If full is False, maybe skip some known-clean files.
1196
1194
1197 Return a dict mapping filename to stat-like object (either
1195 Return a dict mapping filename to stat-like object (either
1198 mercurial.osutil.stat instance or return value of os.stat()).
1196 mercurial.osutil.stat instance or return value of os.stat()).
1199
1197
1200 """
1198 """
1201 # full is a flag that extensions that hook into walk can use -- this
1199 # full is a flag that extensions that hook into walk can use -- this
1202 # implementation doesn't use it at all. This satisfies the contract
1200 # implementation doesn't use it at all. This satisfies the contract
1203 # because we only guarantee a "maybe".
1201 # because we only guarantee a "maybe".
1204
1202
1205 if ignored:
1203 if ignored:
1206 ignore = util.never
1204 ignore = util.never
1207 dirignore = util.never
1205 dirignore = util.never
1208 elif unknown:
1206 elif unknown:
1209 ignore = self._ignore
1207 ignore = self._ignore
1210 dirignore = self._dirignore
1208 dirignore = self._dirignore
1211 else:
1209 else:
1212 # if not unknown and not ignored, drop dir recursion and step 2
1210 # if not unknown and not ignored, drop dir recursion and step 2
1213 ignore = util.always
1211 ignore = util.always
1214 dirignore = util.always
1212 dirignore = util.always
1215
1213
1216 if self._sparsematchfn is not None:
1214 if self._sparsematchfn is not None:
1217 em = matchmod.exact(match.files())
1215 em = matchmod.exact(match.files())
1218 sm = matchmod.unionmatcher([self._sparsematcher, em])
1216 sm = matchmod.unionmatcher([self._sparsematcher, em])
1219 match = matchmod.intersectmatchers(match, sm)
1217 match = matchmod.intersectmatchers(match, sm)
1220
1218
1221 matchfn = match.matchfn
1219 matchfn = match.matchfn
1222 matchalways = match.always()
1220 matchalways = match.always()
1223 matchtdir = match.traversedir
1221 matchtdir = match.traversedir
1224 dmap = self._map
1222 dmap = self._map
1225 listdir = util.listdir
1223 listdir = util.listdir
1226 lstat = os.lstat
1224 lstat = os.lstat
1227 dirkind = stat.S_IFDIR
1225 dirkind = stat.S_IFDIR
1228 regkind = stat.S_IFREG
1226 regkind = stat.S_IFREG
1229 lnkkind = stat.S_IFLNK
1227 lnkkind = stat.S_IFLNK
1230 join = self._join
1228 join = self._join
1231
1229
1232 exact = skipstep3 = False
1230 exact = skipstep3 = False
1233 if match.isexact(): # match.exact
1231 if match.isexact(): # match.exact
1234 exact = True
1232 exact = True
1235 dirignore = util.always # skip step 2
1233 dirignore = util.always # skip step 2
1236 elif match.prefix(): # match.match, no patterns
1234 elif match.prefix(): # match.match, no patterns
1237 skipstep3 = True
1235 skipstep3 = True
1238
1236
1239 if not exact and self._checkcase:
1237 if not exact and self._checkcase:
1240 normalize = self._normalize
1238 normalize = self._normalize
1241 normalizefile = self._normalizefile
1239 normalizefile = self._normalizefile
1242 skipstep3 = False
1240 skipstep3 = False
1243 else:
1241 else:
1244 normalize = self._normalize
1242 normalize = self._normalize
1245 normalizefile = None
1243 normalizefile = None
1246
1244
1247 # step 1: find all explicit files
1245 # step 1: find all explicit files
1248 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1246 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1249 if matchtdir:
1247 if matchtdir:
1250 for d in work:
1248 for d in work:
1251 matchtdir(d[0])
1249 matchtdir(d[0])
1252 for d in dirsnotfound:
1250 for d in dirsnotfound:
1253 matchtdir(d)
1251 matchtdir(d)
1254
1252
1255 skipstep3 = skipstep3 and not (work or dirsnotfound)
1253 skipstep3 = skipstep3 and not (work or dirsnotfound)
1256 work = [d for d in work if not dirignore(d[0])]
1254 work = [d for d in work if not dirignore(d[0])]
1257
1255
1258 # step 2: visit subdirectories
1256 # step 2: visit subdirectories
1259 def traverse(work, alreadynormed):
1257 def traverse(work, alreadynormed):
1260 wadd = work.append
1258 wadd = work.append
1261 while work:
1259 while work:
1262 tracing.counter('dirstate.walk work', len(work))
1260 tracing.counter('dirstate.walk work', len(work))
1263 nd = work.pop()
1261 nd = work.pop()
1264 visitentries = match.visitchildrenset(nd)
1262 visitentries = match.visitchildrenset(nd)
1265 if not visitentries:
1263 if not visitentries:
1266 continue
1264 continue
1267 if visitentries == b'this' or visitentries == b'all':
1265 if visitentries == b'this' or visitentries == b'all':
1268 visitentries = None
1266 visitentries = None
1269 skip = None
1267 skip = None
1270 if nd != b'':
1268 if nd != b'':
1271 skip = b'.hg'
1269 skip = b'.hg'
1272 try:
1270 try:
1273 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1271 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1274 entries = listdir(join(nd), stat=True, skip=skip)
1272 entries = listdir(join(nd), stat=True, skip=skip)
1275 except (PermissionError, FileNotFoundError) as inst:
1273 except (PermissionError, FileNotFoundError) as inst:
1276 match.bad(
1274 match.bad(
1277 self.pathto(nd), encoding.strtolocal(inst.strerror)
1275 self.pathto(nd), encoding.strtolocal(inst.strerror)
1278 )
1276 )
1279 continue
1277 continue
1280 for f, kind, st in entries:
1278 for f, kind, st in entries:
1281 # Some matchers may return files in the visitentries set,
1279 # Some matchers may return files in the visitentries set,
1282 # instead of 'this', if the matcher explicitly mentions them
1280 # instead of 'this', if the matcher explicitly mentions them
1283 # and is not an exactmatcher. This is acceptable; we do not
1281 # and is not an exactmatcher. This is acceptable; we do not
1284 # make any hard assumptions about file-or-directory below
1282 # make any hard assumptions about file-or-directory below
1285 # based on the presence of `f` in visitentries. If
1283 # based on the presence of `f` in visitentries. If
1286 # visitchildrenset returned a set, we can always skip the
1284 # visitchildrenset returned a set, we can always skip the
1287 # entries *not* in the set it provided regardless of whether
1285 # entries *not* in the set it provided regardless of whether
1288 # they're actually a file or a directory.
1286 # they're actually a file or a directory.
1289 if visitentries and f not in visitentries:
1287 if visitentries and f not in visitentries:
1290 continue
1288 continue
1291 if normalizefile:
1289 if normalizefile:
1292 # even though f might be a directory, we're only
1290 # even though f might be a directory, we're only
1293 # interested in comparing it to files currently in the
1291 # interested in comparing it to files currently in the
1294 # dmap -- therefore normalizefile is enough
1292 # dmap -- therefore normalizefile is enough
1295 nf = normalizefile(
1293 nf = normalizefile(
1296 nd and (nd + b"/" + f) or f, True, True
1294 nd and (nd + b"/" + f) or f, True, True
1297 )
1295 )
1298 else:
1296 else:
1299 nf = nd and (nd + b"/" + f) or f
1297 nf = nd and (nd + b"/" + f) or f
1300 if nf not in results:
1298 if nf not in results:
1301 if kind == dirkind:
1299 if kind == dirkind:
1302 if not ignore(nf):
1300 if not ignore(nf):
1303 if matchtdir:
1301 if matchtdir:
1304 matchtdir(nf)
1302 matchtdir(nf)
1305 wadd(nf)
1303 wadd(nf)
1306 if nf in dmap and (matchalways or matchfn(nf)):
1304 if nf in dmap and (matchalways or matchfn(nf)):
1307 results[nf] = None
1305 results[nf] = None
1308 elif kind == regkind or kind == lnkkind:
1306 elif kind == regkind or kind == lnkkind:
1309 if nf in dmap:
1307 if nf in dmap:
1310 if matchalways or matchfn(nf):
1308 if matchalways or matchfn(nf):
1311 results[nf] = st
1309 results[nf] = st
1312 elif (matchalways or matchfn(nf)) and not ignore(
1310 elif (matchalways or matchfn(nf)) and not ignore(
1313 nf
1311 nf
1314 ):
1312 ):
1315 # unknown file -- normalize if necessary
1313 # unknown file -- normalize if necessary
1316 if not alreadynormed:
1314 if not alreadynormed:
1317 nf = normalize(nf, False, True)
1315 nf = normalize(nf, False, True)
1318 results[nf] = st
1316 results[nf] = st
1319 elif nf in dmap and (matchalways or matchfn(nf)):
1317 elif nf in dmap and (matchalways or matchfn(nf)):
1320 results[nf] = None
1318 results[nf] = None
1321
1319
1322 for nd, d in work:
1320 for nd, d in work:
1323 # alreadynormed means that processwork doesn't have to do any
1321 # alreadynormed means that processwork doesn't have to do any
1324 # expensive directory normalization
1322 # expensive directory normalization
1325 alreadynormed = not normalize or nd == d
1323 alreadynormed = not normalize or nd == d
1326 traverse([d], alreadynormed)
1324 traverse([d], alreadynormed)
1327
1325
1328 for s in subrepos:
1326 for s in subrepos:
1329 del results[s]
1327 del results[s]
1330 del results[b'.hg']
1328 del results[b'.hg']
1331
1329
1332 # step 3: visit remaining files from dmap
1330 # step 3: visit remaining files from dmap
1333 if not skipstep3 and not exact:
1331 if not skipstep3 and not exact:
1334 # If a dmap file is not in results yet, it was either
1332 # If a dmap file is not in results yet, it was either
1335 # a) not matching matchfn b) ignored, c) missing, or d) under a
1333 # a) not matching matchfn b) ignored, c) missing, or d) under a
1336 # symlink directory.
1334 # symlink directory.
1337 if not results and matchalways:
1335 if not results and matchalways:
1338 visit = [f for f in dmap]
1336 visit = [f for f in dmap]
1339 else:
1337 else:
1340 visit = [f for f in dmap if f not in results and matchfn(f)]
1338 visit = [f for f in dmap if f not in results and matchfn(f)]
1341 visit.sort()
1339 visit.sort()
1342
1340
1343 if unknown:
1341 if unknown:
1344 # unknown == True means we walked all dirs under the roots
1342 # unknown == True means we walked all dirs under the roots
1345 # that wasn't ignored, and everything that matched was stat'ed
1343 # that wasn't ignored, and everything that matched was stat'ed
1346 # and is already in results.
1344 # and is already in results.
1347 # The rest must thus be ignored or under a symlink.
1345 # The rest must thus be ignored or under a symlink.
1348 audit_path = pathutil.pathauditor(self._root, cached=True)
1346 audit_path = pathutil.pathauditor(self._root, cached=True)
1349
1347
1350 for nf in iter(visit):
1348 for nf in iter(visit):
1351 # If a stat for the same file was already added with a
1349 # If a stat for the same file was already added with a
1352 # different case, don't add one for this, since that would
1350 # different case, don't add one for this, since that would
1353 # make it appear as if the file exists under both names
1351 # make it appear as if the file exists under both names
1354 # on disk.
1352 # on disk.
1355 if (
1353 if (
1356 normalizefile
1354 normalizefile
1357 and normalizefile(nf, True, True) in results
1355 and normalizefile(nf, True, True) in results
1358 ):
1356 ):
1359 results[nf] = None
1357 results[nf] = None
1360 # Report ignored items in the dmap as long as they are not
1358 # Report ignored items in the dmap as long as they are not
1361 # under a symlink directory.
1359 # under a symlink directory.
1362 elif audit_path.check(nf):
1360 elif audit_path.check(nf):
1363 try:
1361 try:
1364 results[nf] = lstat(join(nf))
1362 results[nf] = lstat(join(nf))
1365 # file was just ignored, no links, and exists
1363 # file was just ignored, no links, and exists
1366 except OSError:
1364 except OSError:
1367 # file doesn't exist
1365 # file doesn't exist
1368 results[nf] = None
1366 results[nf] = None
1369 else:
1367 else:
1370 # It's either missing or under a symlink directory
1368 # It's either missing or under a symlink directory
1371 # which we in this case report as missing
1369 # which we in this case report as missing
1372 results[nf] = None
1370 results[nf] = None
1373 else:
1371 else:
1374 # We may not have walked the full directory tree above,
1372 # We may not have walked the full directory tree above,
1375 # so stat and check everything we missed.
1373 # so stat and check everything we missed.
1376 iv = iter(visit)
1374 iv = iter(visit)
1377 for st in util.statfiles([join(i) for i in visit]):
1375 for st in util.statfiles([join(i) for i in visit]):
1378 results[next(iv)] = st
1376 results[next(iv)] = st
1379 return results
1377 return results
1380
1378
1381 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1379 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1382 if self._sparsematchfn is not None:
1380 if self._sparsematchfn is not None:
1383 em = matchmod.exact(matcher.files())
1381 em = matchmod.exact(matcher.files())
1384 sm = matchmod.unionmatcher([self._sparsematcher, em])
1382 sm = matchmod.unionmatcher([self._sparsematcher, em])
1385 matcher = matchmod.intersectmatchers(matcher, sm)
1383 matcher = matchmod.intersectmatchers(matcher, sm)
1386 # Force Rayon (Rust parallelism library) to respect the number of
1384 # Force Rayon (Rust parallelism library) to respect the number of
1387 # workers. This is a temporary workaround until Rust code knows
1385 # workers. This is a temporary workaround until Rust code knows
1388 # how to read the config file.
1386 # how to read the config file.
1389 numcpus = self._ui.configint(b"worker", b"numcpus")
1387 numcpus = self._ui.configint(b"worker", b"numcpus")
1390 if numcpus is not None:
1388 if numcpus is not None:
1391 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1389 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1392
1390
1393 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1391 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1394 if not workers_enabled:
1392 if not workers_enabled:
1395 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1393 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1396
1394
1397 (
1395 (
1398 lookup,
1396 lookup,
1399 modified,
1397 modified,
1400 added,
1398 added,
1401 removed,
1399 removed,
1402 deleted,
1400 deleted,
1403 clean,
1401 clean,
1404 ignored,
1402 ignored,
1405 unknown,
1403 unknown,
1406 warnings,
1404 warnings,
1407 bad,
1405 bad,
1408 traversed,
1406 traversed,
1409 dirty,
1407 dirty,
1410 ) = rustmod.status(
1408 ) = rustmod.status(
1411 self._map._map,
1409 self._map._map,
1412 matcher,
1410 matcher,
1413 self._rootdir,
1411 self._rootdir,
1414 self._ignorefiles(),
1412 self._ignorefiles(),
1415 self._checkexec,
1413 self._checkexec,
1416 bool(list_clean),
1414 bool(list_clean),
1417 bool(list_ignored),
1415 bool(list_ignored),
1418 bool(list_unknown),
1416 bool(list_unknown),
1419 bool(matcher.traversedir),
1417 bool(matcher.traversedir),
1420 )
1418 )
1421
1419
1422 self._dirty |= dirty
1420 self._dirty |= dirty
1423
1421
1424 if matcher.traversedir:
1422 if matcher.traversedir:
1425 for dir in traversed:
1423 for dir in traversed:
1426 matcher.traversedir(dir)
1424 matcher.traversedir(dir)
1427
1425
1428 if self._ui.warn:
1426 if self._ui.warn:
1429 for item in warnings:
1427 for item in warnings:
1430 if isinstance(item, tuple):
1428 if isinstance(item, tuple):
1431 file_path, syntax = item
1429 file_path, syntax = item
1432 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1430 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1433 file_path,
1431 file_path,
1434 syntax,
1432 syntax,
1435 )
1433 )
1436 self._ui.warn(msg)
1434 self._ui.warn(msg)
1437 else:
1435 else:
1438 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1436 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1439 self._ui.warn(
1437 self._ui.warn(
1440 msg
1438 msg
1441 % (
1439 % (
1442 pathutil.canonpath(
1440 pathutil.canonpath(
1443 self._rootdir, self._rootdir, item
1441 self._rootdir, self._rootdir, item
1444 ),
1442 ),
1445 b"No such file or directory",
1443 b"No such file or directory",
1446 )
1444 )
1447 )
1445 )
1448
1446
1449 for fn, message in bad:
1447 for fn, message in bad:
1450 matcher.bad(fn, encoding.strtolocal(message))
1448 matcher.bad(fn, encoding.strtolocal(message))
1451
1449
1452 status = scmutil.status(
1450 status = scmutil.status(
1453 modified=modified,
1451 modified=modified,
1454 added=added,
1452 added=added,
1455 removed=removed,
1453 removed=removed,
1456 deleted=deleted,
1454 deleted=deleted,
1457 unknown=unknown,
1455 unknown=unknown,
1458 ignored=ignored,
1456 ignored=ignored,
1459 clean=clean,
1457 clean=clean,
1460 )
1458 )
1461 return (lookup, status)
1459 return (lookup, status)
1462
1460
1463 # XXX since this can make the dirstate dirty (through rust), we should
1461 # XXX since this can make the dirstate dirty (through rust), we should
1464 # enforce that it is done withing an appropriate change-context that scope
1462 # enforce that it is done withing an appropriate change-context that scope
1465 # the change and ensure it eventually get written on disk (or rolled back)
1463 # the change and ensure it eventually get written on disk (or rolled back)
1466 def status(self, match, subrepos, ignored, clean, unknown):
1464 def status(self, match, subrepos, ignored, clean, unknown):
1467 """Determine the status of the working copy relative to the
1465 """Determine the status of the working copy relative to the
1468 dirstate and return a pair of (unsure, status), where status is of type
1466 dirstate and return a pair of (unsure, status), where status is of type
1469 scmutil.status and:
1467 scmutil.status and:
1470
1468
1471 unsure:
1469 unsure:
1472 files that might have been modified since the dirstate was
1470 files that might have been modified since the dirstate was
1473 written, but need to be read to be sure (size is the same
1471 written, but need to be read to be sure (size is the same
1474 but mtime differs)
1472 but mtime differs)
1475 status.modified:
1473 status.modified:
1476 files that have definitely been modified since the dirstate
1474 files that have definitely been modified since the dirstate
1477 was written (different size or mode)
1475 was written (different size or mode)
1478 status.clean:
1476 status.clean:
1479 files that have definitely not been modified since the
1477 files that have definitely not been modified since the
1480 dirstate was written
1478 dirstate was written
1481 """
1479 """
1482 listignored, listclean, listunknown = ignored, clean, unknown
1480 listignored, listclean, listunknown = ignored, clean, unknown
1483 lookup, modified, added, unknown, ignored = [], [], [], [], []
1481 lookup, modified, added, unknown, ignored = [], [], [], [], []
1484 removed, deleted, clean = [], [], []
1482 removed, deleted, clean = [], [], []
1485
1483
1486 dmap = self._map
1484 dmap = self._map
1487 dmap.preload()
1485 dmap.preload()
1488
1486
1489 use_rust = True
1487 use_rust = True
1490
1488
1491 allowed_matchers = (
1489 allowed_matchers = (
1492 matchmod.alwaysmatcher,
1490 matchmod.alwaysmatcher,
1493 matchmod.differencematcher,
1491 matchmod.differencematcher,
1494 matchmod.exactmatcher,
1492 matchmod.exactmatcher,
1495 matchmod.includematcher,
1493 matchmod.includematcher,
1496 matchmod.intersectionmatcher,
1494 matchmod.intersectionmatcher,
1497 matchmod.nevermatcher,
1495 matchmod.nevermatcher,
1498 matchmod.unionmatcher,
1496 matchmod.unionmatcher,
1499 )
1497 )
1500
1498
1501 if rustmod is None:
1499 if rustmod is None:
1502 use_rust = False
1500 use_rust = False
1503 elif self._checkcase:
1501 elif self._checkcase:
1504 # Case-insensitive filesystems are not handled yet
1502 # Case-insensitive filesystems are not handled yet
1505 use_rust = False
1503 use_rust = False
1506 elif subrepos:
1504 elif subrepos:
1507 use_rust = False
1505 use_rust = False
1508 elif not isinstance(match, allowed_matchers):
1506 elif not isinstance(match, allowed_matchers):
1509 # Some matchers have yet to be implemented
1507 # Some matchers have yet to be implemented
1510 use_rust = False
1508 use_rust = False
1511
1509
1512 # Get the time from the filesystem so we can disambiguate files that
1510 # Get the time from the filesystem so we can disambiguate files that
1513 # appear modified in the present or future.
1511 # appear modified in the present or future.
1514 try:
1512 try:
1515 mtime_boundary = timestamp.get_fs_now(self._opener)
1513 mtime_boundary = timestamp.get_fs_now(self._opener)
1516 except OSError:
1514 except OSError:
1517 # In largefiles or readonly context
1515 # In largefiles or readonly context
1518 mtime_boundary = None
1516 mtime_boundary = None
1519
1517
1520 if use_rust:
1518 if use_rust:
1521 try:
1519 try:
1522 res = self._rust_status(
1520 res = self._rust_status(
1523 match, listclean, listignored, listunknown
1521 match, listclean, listignored, listunknown
1524 )
1522 )
1525 return res + (mtime_boundary,)
1523 return res + (mtime_boundary,)
1526 except rustmod.FallbackError:
1524 except rustmod.FallbackError:
1527 pass
1525 pass
1528
1526
1529 def noop(f):
1527 def noop(f):
1530 pass
1528 pass
1531
1529
1532 dcontains = dmap.__contains__
1530 dcontains = dmap.__contains__
1533 dget = dmap.__getitem__
1531 dget = dmap.__getitem__
1534 ladd = lookup.append # aka "unsure"
1532 ladd = lookup.append # aka "unsure"
1535 madd = modified.append
1533 madd = modified.append
1536 aadd = added.append
1534 aadd = added.append
1537 uadd = unknown.append if listunknown else noop
1535 uadd = unknown.append if listunknown else noop
1538 iadd = ignored.append if listignored else noop
1536 iadd = ignored.append if listignored else noop
1539 radd = removed.append
1537 radd = removed.append
1540 dadd = deleted.append
1538 dadd = deleted.append
1541 cadd = clean.append if listclean else noop
1539 cadd = clean.append if listclean else noop
1542 mexact = match.exact
1540 mexact = match.exact
1543 dirignore = self._dirignore
1541 dirignore = self._dirignore
1544 checkexec = self._checkexec
1542 checkexec = self._checkexec
1545 checklink = self._checklink
1543 checklink = self._checklink
1546 copymap = self._map.copymap
1544 copymap = self._map.copymap
1547
1545
1548 # We need to do full walks when either
1546 # We need to do full walks when either
1549 # - we're listing all clean files, or
1547 # - we're listing all clean files, or
1550 # - match.traversedir does something, because match.traversedir should
1548 # - match.traversedir does something, because match.traversedir should
1551 # be called for every dir in the working dir
1549 # be called for every dir in the working dir
1552 full = listclean or match.traversedir is not None
1550 full = listclean or match.traversedir is not None
1553 for fn, st in self.walk(
1551 for fn, st in self.walk(
1554 match, subrepos, listunknown, listignored, full=full
1552 match, subrepos, listunknown, listignored, full=full
1555 ).items():
1553 ).items():
1556 if not dcontains(fn):
1554 if not dcontains(fn):
1557 if (listignored or mexact(fn)) and dirignore(fn):
1555 if (listignored or mexact(fn)) and dirignore(fn):
1558 if listignored:
1556 if listignored:
1559 iadd(fn)
1557 iadd(fn)
1560 else:
1558 else:
1561 uadd(fn)
1559 uadd(fn)
1562 continue
1560 continue
1563
1561
1564 t = dget(fn)
1562 t = dget(fn)
1565 mode = t.mode
1563 mode = t.mode
1566 size = t.size
1564 size = t.size
1567
1565
1568 if not st and t.tracked:
1566 if not st and t.tracked:
1569 dadd(fn)
1567 dadd(fn)
1570 elif t.p2_info:
1568 elif t.p2_info:
1571 madd(fn)
1569 madd(fn)
1572 elif t.added:
1570 elif t.added:
1573 aadd(fn)
1571 aadd(fn)
1574 elif t.removed:
1572 elif t.removed:
1575 radd(fn)
1573 radd(fn)
1576 elif t.tracked:
1574 elif t.tracked:
1577 if not checklink and t.has_fallback_symlink:
1575 if not checklink and t.has_fallback_symlink:
1578 # If the file system does not support symlink, the mode
1576 # If the file system does not support symlink, the mode
1579 # might not be correctly stored in the dirstate, so do not
1577 # might not be correctly stored in the dirstate, so do not
1580 # trust it.
1578 # trust it.
1581 ladd(fn)
1579 ladd(fn)
1582 elif not checkexec and t.has_fallback_exec:
1580 elif not checkexec and t.has_fallback_exec:
1583 # If the file system does not support exec bits, the mode
1581 # If the file system does not support exec bits, the mode
1584 # might not be correctly stored in the dirstate, so do not
1582 # might not be correctly stored in the dirstate, so do not
1585 # trust it.
1583 # trust it.
1586 ladd(fn)
1584 ladd(fn)
1587 elif (
1585 elif (
1588 size >= 0
1586 size >= 0
1589 and (
1587 and (
1590 (size != st.st_size and size != st.st_size & _rangemask)
1588 (size != st.st_size and size != st.st_size & _rangemask)
1591 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1589 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1592 )
1590 )
1593 or fn in copymap
1591 or fn in copymap
1594 ):
1592 ):
1595 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1593 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1596 # issue6456: Size returned may be longer due to
1594 # issue6456: Size returned may be longer due to
1597 # encryption on EXT-4 fscrypt, undecided.
1595 # encryption on EXT-4 fscrypt, undecided.
1598 ladd(fn)
1596 ladd(fn)
1599 else:
1597 else:
1600 madd(fn)
1598 madd(fn)
1601 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1599 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1602 # There might be a change in the future if for example the
1600 # There might be a change in the future if for example the
1603 # internal clock is off, but this is a case where the issues
1601 # internal clock is off, but this is a case where the issues
1604 # the user would face would be a lot worse and there is
1602 # the user would face would be a lot worse and there is
1605 # nothing we can really do.
1603 # nothing we can really do.
1606 ladd(fn)
1604 ladd(fn)
1607 elif listclean:
1605 elif listclean:
1608 cadd(fn)
1606 cadd(fn)
1609 status = scmutil.status(
1607 status = scmutil.status(
1610 modified, added, removed, deleted, unknown, ignored, clean
1608 modified, added, removed, deleted, unknown, ignored, clean
1611 )
1609 )
1612 return (lookup, status, mtime_boundary)
1610 return (lookup, status, mtime_boundary)
1613
1611
1614 def matches(self, match):
1612 def matches(self, match):
1615 """
1613 """
1616 return files in the dirstate (in whatever state) filtered by match
1614 return files in the dirstate (in whatever state) filtered by match
1617 """
1615 """
1618 dmap = self._map
1616 dmap = self._map
1619 if rustmod is not None:
1617 if rustmod is not None:
1620 dmap = self._map._map
1618 dmap = self._map._map
1621
1619
1622 if match.always():
1620 if match.always():
1623 return dmap.keys()
1621 return dmap.keys()
1624 files = match.files()
1622 files = match.files()
1625 if match.isexact():
1623 if match.isexact():
1626 # fast path -- filter the other way around, since typically files is
1624 # fast path -- filter the other way around, since typically files is
1627 # much smaller than dmap
1625 # much smaller than dmap
1628 return [f for f in files if f in dmap]
1626 return [f for f in files if f in dmap]
1629 if match.prefix() and all(fn in dmap for fn in files):
1627 if match.prefix() and all(fn in dmap for fn in files):
1630 # fast path -- all the values are known to be files, so just return
1628 # fast path -- all the values are known to be files, so just return
1631 # that
1629 # that
1632 return list(files)
1630 return list(files)
1633 return [f for f in dmap if match(f)]
1631 return [f for f in dmap if match(f)]
1634
1632
1635 def _actualfilename(self, tr):
1633 def _actualfilename(self, tr):
1636 if tr:
1634 if tr:
1637 return self._pendingfilename
1635 return self._pendingfilename
1638 else:
1636 else:
1639 return self._filename
1637 return self._filename
1640
1638
1641 def all_file_names(self):
1639 def all_file_names(self):
1642 """list all filename currently used by this dirstate
1640 """list all filename currently used by this dirstate
1643
1641
1644 This is only used to do `hg rollback` related backup in the transaction
1642 This is only used to do `hg rollback` related backup in the transaction
1645 """
1643 """
1646 if not self._opener.exists(self._filename):
1644 if not self._opener.exists(self._filename):
1647 # no data every written to disk yet
1645 # no data every written to disk yet
1648 return ()
1646 return ()
1649 elif self._use_dirstate_v2:
1647 elif self._use_dirstate_v2:
1650 return (
1648 return (
1651 self._filename,
1649 self._filename,
1652 self._map.docket.data_filename(),
1650 self._map.docket.data_filename(),
1653 )
1651 )
1654 else:
1652 else:
1655 return (self._filename,)
1653 return (self._filename,)
1656
1654
1657 def verify(self, m1, m2, p1, narrow_matcher=None):
1655 def verify(self, m1, m2, p1, narrow_matcher=None):
1658 """
1656 """
1659 check the dirstate contents against the parent manifest and yield errors
1657 check the dirstate contents against the parent manifest and yield errors
1660 """
1658 """
1661 missing_from_p1 = _(
1659 missing_from_p1 = _(
1662 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1660 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1663 )
1661 )
1664 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1662 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1665 missing_from_ps = _(
1663 missing_from_ps = _(
1666 b"%s marked as modified, but not in either manifest\n"
1664 b"%s marked as modified, but not in either manifest\n"
1667 )
1665 )
1668 missing_from_ds = _(
1666 missing_from_ds = _(
1669 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1667 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1670 )
1668 )
1671 for f, entry in self.items():
1669 for f, entry in self.items():
1672 if entry.p1_tracked:
1670 if entry.p1_tracked:
1673 if entry.modified and f not in m1 and f not in m2:
1671 if entry.modified and f not in m1 and f not in m2:
1674 yield missing_from_ps % f
1672 yield missing_from_ps % f
1675 elif f not in m1:
1673 elif f not in m1:
1676 yield missing_from_p1 % (f, node.short(p1))
1674 yield missing_from_p1 % (f, node.short(p1))
1677 if entry.added and f in m1:
1675 if entry.added and f in m1:
1678 yield unexpected_in_p1 % f
1676 yield unexpected_in_p1 % f
1679 for f in m1:
1677 for f in m1:
1680 if narrow_matcher is not None and not narrow_matcher(f):
1678 if narrow_matcher is not None and not narrow_matcher(f):
1681 continue
1679 continue
1682 entry = self.get_entry(f)
1680 entry = self.get_entry(f)
1683 if not entry.p1_tracked:
1681 if not entry.p1_tracked:
1684 yield missing_from_ds % (f, node.short(p1))
1682 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now