##// END OF EJS Templates
dirstate: start tracking that we are within a `running_status` context
marmoute -
r51040:72ef6c48 default
parent child Browse files
Show More
@@ -1,1738 +1,1751 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136
136
137 # used by largefile to avoid overwritting transaction callbacK
137 # used by largefile to avoid overwritting transaction callbacK
138 _tr_key_suffix = b''
138 _tr_key_suffix = b''
139
139
140 def __init__(
140 def __init__(
141 self,
141 self,
142 opener,
142 opener,
143 ui,
143 ui,
144 root,
144 root,
145 validate,
145 validate,
146 sparsematchfn,
146 sparsematchfn,
147 nodeconstants,
147 nodeconstants,
148 use_dirstate_v2,
148 use_dirstate_v2,
149 use_tracked_hint=False,
149 use_tracked_hint=False,
150 ):
150 ):
151 """Create a new dirstate object.
151 """Create a new dirstate object.
152
152
153 opener is an open()-like callable that can be used to open the
153 opener is an open()-like callable that can be used to open the
154 dirstate file; root is the root of the directory tracked by
154 dirstate file; root is the root of the directory tracked by
155 the dirstate.
155 the dirstate.
156 """
156 """
157 self._use_dirstate_v2 = use_dirstate_v2
157 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_tracked_hint = use_tracked_hint
158 self._use_tracked_hint = use_tracked_hint
159 self._nodeconstants = nodeconstants
159 self._nodeconstants = nodeconstants
160 self._opener = opener
160 self._opener = opener
161 self._validate = validate
161 self._validate = validate
162 self._root = root
162 self._root = root
163 # Either build a sparse-matcher or None if sparse is disabled
163 # Either build a sparse-matcher or None if sparse is disabled
164 self._sparsematchfn = sparsematchfn
164 self._sparsematchfn = sparsematchfn
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # UNC path pointing to root share (issue4557)
166 # UNC path pointing to root share (issue4557)
167 self._rootdir = pathutil.normasprefix(root)
167 self._rootdir = pathutil.normasprefix(root)
168 # True is any internal state may be different
168 # True is any internal state may be different
169 self._dirty = False
169 self._dirty = False
170 # True if the set of tracked file may be different
170 # True if the set of tracked file may be different
171 self._dirty_tracked_set = False
171 self._dirty_tracked_set = False
172 self._ui = ui
172 self._ui = ui
173 self._filecache = {}
173 self._filecache = {}
174 # nesting level of `changing_parents` context
174 # nesting level of `changing_parents` context
175 self._changing_level = 0
175 self._changing_level = 0
176 # the change currently underway
176 # the change currently underway
177 self._change_type = None
177 self._change_type = None
178 # number of open _running_status context
179 self._running_status = 0
178 # True if the current dirstate changing operations have been
180 # True if the current dirstate changing operations have been
179 # invalidated (used to make sure all nested contexts have been exited)
181 # invalidated (used to make sure all nested contexts have been exited)
180 self._invalidated_context = False
182 self._invalidated_context = False
181 self._attached_to_a_transaction = False
183 self._attached_to_a_transaction = False
182 self._filename = b'dirstate'
184 self._filename = b'dirstate'
183 self._filename_th = b'dirstate-tracked-hint'
185 self._filename_th = b'dirstate-tracked-hint'
184 self._pendingfilename = b'%s.pending' % self._filename
186 self._pendingfilename = b'%s.pending' % self._filename
185 self._plchangecallbacks = {}
187 self._plchangecallbacks = {}
186 self._origpl = None
188 self._origpl = None
187 self._mapcls = dirstatemap.dirstatemap
189 self._mapcls = dirstatemap.dirstatemap
188 # Access and cache cwd early, so we don't access it for the first time
190 # Access and cache cwd early, so we don't access it for the first time
189 # after a working-copy update caused it to not exist (accessing it then
191 # after a working-copy update caused it to not exist (accessing it then
190 # raises an exception).
192 # raises an exception).
191 self._cwd
193 self._cwd
192
194
193 def refresh(self):
195 def refresh(self):
194 if '_branch' in vars(self):
196 if '_branch' in vars(self):
195 del self._branch
197 del self._branch
196 if '_map' in vars(self) and self._map.may_need_refresh():
198 if '_map' in vars(self) and self._map.may_need_refresh():
197 self.invalidate()
199 self.invalidate()
198
200
199 def prefetch_parents(self):
201 def prefetch_parents(self):
200 """make sure the parents are loaded
202 """make sure the parents are loaded
201
203
202 Used to avoid a race condition.
204 Used to avoid a race condition.
203 """
205 """
204 self._pl
206 self._pl
205
207
206 @contextlib.contextmanager
208 @contextlib.contextmanager
207 @check_invalidated
209 @check_invalidated
208 def running_status(self, repo):
210 def running_status(self, repo):
209 """Wrap a status operation
211 """Wrap a status operation
210
212
211 This context is not mutally exclusive with the `changing_*` context. It
213 This context is not mutally exclusive with the `changing_*` context. It
212 also do not warrant for the `wlock` to be taken.
214 also do not warrant for the `wlock` to be taken.
213
215
214 If the wlock is taken, this context will (in the future) behave in a
216 If the wlock is taken, this context will (in the future) behave in a
215 simple way, and ensure the data are scheduled for write when leaving
217 simple way, and ensure the data are scheduled for write when leaving
216 the top level context.
218 the top level context.
217
219
218 If the lock is not taken, it will only warrant that the data are either
220 If the lock is not taken, it will only warrant that the data are either
219 committed (written) and rolled back (invalidated) when exiting the top
221 committed (written) and rolled back (invalidated) when exiting the top
220 level context. The write/invalidate action must be performed by the
222 level context. The write/invalidate action must be performed by the
221 wrapped code.
223 wrapped code.
222
224
223
225
224 The expected logic is:
226 The expected logic is:
225
227
226 A: read the dirstate
228 A: read the dirstate
227 B: run status
229 B: run status
228 This might make the dirstate dirty by updating cache,
230 This might make the dirstate dirty by updating cache,
229 especially in Rust.
231 especially in Rust.
230 C: do more "post status fixup if relevant
232 C: do more "post status fixup if relevant
231 D: try to take the w-lock (this will invalidate the changes if they were raced)
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
232 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
233 E1: elif lock was acquired β†’ write the changes
235 E1: elif lock was acquired β†’ write the changes
234 E2: else β†’ discard the changes
236 E2: else β†’ discard the changes
235 """
237 """
238 self._running_status += 1
239 try:
236 yield
240 yield
241 except Exception:
242 self.invalidate()
243 raise
244 finally:
245 self._running_status -= 1
246 if self._invalidated_context:
247 self.invalidate()
237
248
238 @contextlib.contextmanager
249 @contextlib.contextmanager
239 @check_invalidated
250 @check_invalidated
240 def _changing(self, repo, change_type):
251 def _changing(self, repo, change_type):
241 if repo.currentwlock() is None:
252 if repo.currentwlock() is None:
242 msg = b"trying to change the dirstate without holding the wlock"
253 msg = b"trying to change the dirstate without holding the wlock"
243 raise error.ProgrammingError(msg)
254 raise error.ProgrammingError(msg)
244
255
245 has_tr = repo.currenttransaction() is not None
256 has_tr = repo.currenttransaction() is not None
246 if not has_tr and self._changing_level == 0 and self._dirty:
257 if not has_tr and self._changing_level == 0 and self._dirty:
247 msg = "entering a changing context, but dirstate is already dirty"
258 msg = "entering a changing context, but dirstate is already dirty"
248 raise error.ProgrammingError(msg)
259 raise error.ProgrammingError(msg)
249
260
250 assert self._changing_level >= 0
261 assert self._changing_level >= 0
251 # different type of change are mutually exclusive
262 # different type of change are mutually exclusive
252 if self._change_type is None:
263 if self._change_type is None:
253 assert self._changing_level == 0
264 assert self._changing_level == 0
254 self._change_type = change_type
265 self._change_type = change_type
255 elif self._change_type != change_type:
266 elif self._change_type != change_type:
256 msg = (
267 msg = (
257 'trying to open "%s" dirstate-changing context while a "%s" is'
268 'trying to open "%s" dirstate-changing context while a "%s" is'
258 ' already open'
269 ' already open'
259 )
270 )
260 msg %= (change_type, self._change_type)
271 msg %= (change_type, self._change_type)
261 raise error.ProgrammingError(msg)
272 raise error.ProgrammingError(msg)
262 should_write = False
273 should_write = False
263 self._changing_level += 1
274 self._changing_level += 1
264 try:
275 try:
265 yield
276 yield
266 except: # re-raises
277 except: # re-raises
267 self.invalidate() # this will set `_invalidated_context`
278 self.invalidate() # this will set `_invalidated_context`
268 raise
279 raise
269 finally:
280 finally:
270 assert self._changing_level > 0
281 assert self._changing_level > 0
271 self._changing_level -= 1
282 self._changing_level -= 1
272 # If the dirstate is being invalidated, call invalidate again.
283 # If the dirstate is being invalidated, call invalidate again.
273 # This will throw away anything added by a upper context and
284 # This will throw away anything added by a upper context and
274 # reset the `_invalidated_context` flag when relevant
285 # reset the `_invalidated_context` flag when relevant
275 if self._changing_level <= 0:
286 if self._changing_level <= 0:
276 self._change_type = None
287 self._change_type = None
277 assert self._changing_level == 0
288 assert self._changing_level == 0
278 if self._invalidated_context:
289 if self._invalidated_context:
279 # make sure we invalidate anything an upper context might
290 # make sure we invalidate anything an upper context might
280 # have changed.
291 # have changed.
281 self.invalidate()
292 self.invalidate()
282 else:
293 else:
283 should_write = self._changing_level <= 0
294 should_write = self._changing_level <= 0
284 tr = repo.currenttransaction()
295 tr = repo.currenttransaction()
285 if has_tr != (tr is not None):
296 if has_tr != (tr is not None):
286 if has_tr:
297 if has_tr:
287 m = "transaction vanished while changing dirstate"
298 m = "transaction vanished while changing dirstate"
288 else:
299 else:
289 m = "transaction appeared while changing dirstate"
300 m = "transaction appeared while changing dirstate"
290 raise error.ProgrammingError(m)
301 raise error.ProgrammingError(m)
291 if should_write:
302 if should_write:
292 self.write(tr)
303 self.write(tr)
293
304
294 @contextlib.contextmanager
305 @contextlib.contextmanager
295 def changing_parents(self, repo):
306 def changing_parents(self, repo):
296 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
307 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
297 yield c
308 yield c
298
309
299 @contextlib.contextmanager
310 @contextlib.contextmanager
300 def changing_files(self, repo):
311 def changing_files(self, repo):
301 with self._changing(repo, CHANGE_TYPE_FILES) as c:
312 with self._changing(repo, CHANGE_TYPE_FILES) as c:
302 yield c
313 yield c
303
314
304 # here to help migration to the new code
315 # here to help migration to the new code
305 def parentchange(self):
316 def parentchange(self):
306 msg = (
317 msg = (
307 "Mercurial 6.4 and later requires call to "
318 "Mercurial 6.4 and later requires call to "
308 "`dirstate.changing_parents(repo)`"
319 "`dirstate.changing_parents(repo)`"
309 )
320 )
310 raise error.ProgrammingError(msg)
321 raise error.ProgrammingError(msg)
311
322
312 @property
323 @property
313 def is_changing_any(self):
324 def is_changing_any(self):
314 """Returns true if the dirstate is in the middle of a set of changes.
325 """Returns true if the dirstate is in the middle of a set of changes.
315
326
316 This returns True for any kind of change.
327 This returns True for any kind of change.
317 """
328 """
318 return self._changing_level > 0
329 return self._changing_level > 0
319
330
320 def pendingparentchange(self):
331 def pendingparentchange(self):
321 return self.is_changing_parent()
332 return self.is_changing_parent()
322
333
323 def is_changing_parent(self):
334 def is_changing_parent(self):
324 """Returns true if the dirstate is in the middle of a set of changes
335 """Returns true if the dirstate is in the middle of a set of changes
325 that modify the dirstate parent.
336 that modify the dirstate parent.
326 """
337 """
327 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
338 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
328 return self.is_changing_parents
339 return self.is_changing_parents
329
340
330 @property
341 @property
331 def is_changing_parents(self):
342 def is_changing_parents(self):
332 """Returns true if the dirstate is in the middle of a set of changes
343 """Returns true if the dirstate is in the middle of a set of changes
333 that modify the dirstate parent.
344 that modify the dirstate parent.
334 """
345 """
335 if self._changing_level <= 0:
346 if self._changing_level <= 0:
336 return False
347 return False
337 return self._change_type == CHANGE_TYPE_PARENTS
348 return self._change_type == CHANGE_TYPE_PARENTS
338
349
339 @property
350 @property
340 def is_changing_files(self):
351 def is_changing_files(self):
341 """Returns true if the dirstate is in the middle of a set of changes
352 """Returns true if the dirstate is in the middle of a set of changes
342 that modify the files tracked or their sources.
353 that modify the files tracked or their sources.
343 """
354 """
344 if self._changing_level <= 0:
355 if self._changing_level <= 0:
345 return False
356 return False
346 return self._change_type == CHANGE_TYPE_FILES
357 return self._change_type == CHANGE_TYPE_FILES
347
358
348 @propertycache
359 @propertycache
349 def _map(self):
360 def _map(self):
350 """Return the dirstate contents (see documentation for dirstatemap)."""
361 """Return the dirstate contents (see documentation for dirstatemap)."""
351 return self._mapcls(
362 return self._mapcls(
352 self._ui,
363 self._ui,
353 self._opener,
364 self._opener,
354 self._root,
365 self._root,
355 self._nodeconstants,
366 self._nodeconstants,
356 self._use_dirstate_v2,
367 self._use_dirstate_v2,
357 )
368 )
358
369
359 @property
370 @property
360 def _sparsematcher(self):
371 def _sparsematcher(self):
361 """The matcher for the sparse checkout.
372 """The matcher for the sparse checkout.
362
373
363 The working directory may not include every file from a manifest. The
374 The working directory may not include every file from a manifest. The
364 matcher obtained by this property will match a path if it is to be
375 matcher obtained by this property will match a path if it is to be
365 included in the working directory.
376 included in the working directory.
366
377
367 When sparse if disabled, return None.
378 When sparse if disabled, return None.
368 """
379 """
369 if self._sparsematchfn is None:
380 if self._sparsematchfn is None:
370 return None
381 return None
371 # TODO there is potential to cache this property. For now, the matcher
382 # TODO there is potential to cache this property. For now, the matcher
372 # is resolved on every access. (But the called function does use a
383 # is resolved on every access. (But the called function does use a
373 # cache to keep the lookup fast.)
384 # cache to keep the lookup fast.)
374 return self._sparsematchfn()
385 return self._sparsematchfn()
375
386
376 @repocache(b'branch')
387 @repocache(b'branch')
377 def _branch(self):
388 def _branch(self):
378 try:
389 try:
379 return self._opener.read(b"branch").strip() or b"default"
390 return self._opener.read(b"branch").strip() or b"default"
380 except FileNotFoundError:
391 except FileNotFoundError:
381 return b"default"
392 return b"default"
382
393
383 @property
394 @property
384 def _pl(self):
395 def _pl(self):
385 return self._map.parents()
396 return self._map.parents()
386
397
387 def hasdir(self, d):
398 def hasdir(self, d):
388 return self._map.hastrackeddir(d)
399 return self._map.hastrackeddir(d)
389
400
390 @rootcache(b'.hgignore')
401 @rootcache(b'.hgignore')
391 def _ignore(self):
402 def _ignore(self):
392 files = self._ignorefiles()
403 files = self._ignorefiles()
393 if not files:
404 if not files:
394 return matchmod.never()
405 return matchmod.never()
395
406
396 pats = [b'include:%s' % f for f in files]
407 pats = [b'include:%s' % f for f in files]
397 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
408 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
398
409
399 @propertycache
410 @propertycache
400 def _slash(self):
411 def _slash(self):
401 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
412 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
402
413
403 @propertycache
414 @propertycache
404 def _checklink(self):
415 def _checklink(self):
405 return util.checklink(self._root)
416 return util.checklink(self._root)
406
417
407 @propertycache
418 @propertycache
408 def _checkexec(self):
419 def _checkexec(self):
409 return bool(util.checkexec(self._root))
420 return bool(util.checkexec(self._root))
410
421
411 @propertycache
422 @propertycache
412 def _checkcase(self):
423 def _checkcase(self):
413 return not util.fscasesensitive(self._join(b'.hg'))
424 return not util.fscasesensitive(self._join(b'.hg'))
414
425
415 def _join(self, f):
426 def _join(self, f):
416 # much faster than os.path.join()
427 # much faster than os.path.join()
417 # it's safe because f is always a relative path
428 # it's safe because f is always a relative path
418 return self._rootdir + f
429 return self._rootdir + f
419
430
420 def flagfunc(self, buildfallback):
431 def flagfunc(self, buildfallback):
421 """build a callable that returns flags associated with a filename
432 """build a callable that returns flags associated with a filename
422
433
423 The information is extracted from three possible layers:
434 The information is extracted from three possible layers:
424 1. the file system if it supports the information
435 1. the file system if it supports the information
425 2. the "fallback" information stored in the dirstate if any
436 2. the "fallback" information stored in the dirstate if any
426 3. a more expensive mechanism inferring the flags from the parents.
437 3. a more expensive mechanism inferring the flags from the parents.
427 """
438 """
428
439
429 # small hack to cache the result of buildfallback()
440 # small hack to cache the result of buildfallback()
430 fallback_func = []
441 fallback_func = []
431
442
432 def get_flags(x):
443 def get_flags(x):
433 entry = None
444 entry = None
434 fallback_value = None
445 fallback_value = None
435 try:
446 try:
436 st = os.lstat(self._join(x))
447 st = os.lstat(self._join(x))
437 except OSError:
448 except OSError:
438 return b''
449 return b''
439
450
440 if self._checklink:
451 if self._checklink:
441 if util.statislink(st):
452 if util.statislink(st):
442 return b'l'
453 return b'l'
443 else:
454 else:
444 entry = self.get_entry(x)
455 entry = self.get_entry(x)
445 if entry.has_fallback_symlink:
456 if entry.has_fallback_symlink:
446 if entry.fallback_symlink:
457 if entry.fallback_symlink:
447 return b'l'
458 return b'l'
448 else:
459 else:
449 if not fallback_func:
460 if not fallback_func:
450 fallback_func.append(buildfallback())
461 fallback_func.append(buildfallback())
451 fallback_value = fallback_func[0](x)
462 fallback_value = fallback_func[0](x)
452 if b'l' in fallback_value:
463 if b'l' in fallback_value:
453 return b'l'
464 return b'l'
454
465
455 if self._checkexec:
466 if self._checkexec:
456 if util.statisexec(st):
467 if util.statisexec(st):
457 return b'x'
468 return b'x'
458 else:
469 else:
459 if entry is None:
470 if entry is None:
460 entry = self.get_entry(x)
471 entry = self.get_entry(x)
461 if entry.has_fallback_exec:
472 if entry.has_fallback_exec:
462 if entry.fallback_exec:
473 if entry.fallback_exec:
463 return b'x'
474 return b'x'
464 else:
475 else:
465 if fallback_value is None:
476 if fallback_value is None:
466 if not fallback_func:
477 if not fallback_func:
467 fallback_func.append(buildfallback())
478 fallback_func.append(buildfallback())
468 fallback_value = fallback_func[0](x)
479 fallback_value = fallback_func[0](x)
469 if b'x' in fallback_value:
480 if b'x' in fallback_value:
470 return b'x'
481 return b'x'
471 return b''
482 return b''
472
483
473 return get_flags
484 return get_flags
474
485
475 @propertycache
486 @propertycache
476 def _cwd(self):
487 def _cwd(self):
477 # internal config: ui.forcecwd
488 # internal config: ui.forcecwd
478 forcecwd = self._ui.config(b'ui', b'forcecwd')
489 forcecwd = self._ui.config(b'ui', b'forcecwd')
479 if forcecwd:
490 if forcecwd:
480 return forcecwd
491 return forcecwd
481 return encoding.getcwd()
492 return encoding.getcwd()
482
493
483 def getcwd(self):
494 def getcwd(self):
484 """Return the path from which a canonical path is calculated.
495 """Return the path from which a canonical path is calculated.
485
496
486 This path should be used to resolve file patterns or to convert
497 This path should be used to resolve file patterns or to convert
487 canonical paths back to file paths for display. It shouldn't be
498 canonical paths back to file paths for display. It shouldn't be
488 used to get real file paths. Use vfs functions instead.
499 used to get real file paths. Use vfs functions instead.
489 """
500 """
490 cwd = self._cwd
501 cwd = self._cwd
491 if cwd == self._root:
502 if cwd == self._root:
492 return b''
503 return b''
493 # self._root ends with a path separator if self._root is '/' or 'C:\'
504 # self._root ends with a path separator if self._root is '/' or 'C:\'
494 rootsep = self._root
505 rootsep = self._root
495 if not util.endswithsep(rootsep):
506 if not util.endswithsep(rootsep):
496 rootsep += pycompat.ossep
507 rootsep += pycompat.ossep
497 if cwd.startswith(rootsep):
508 if cwd.startswith(rootsep):
498 return cwd[len(rootsep) :]
509 return cwd[len(rootsep) :]
499 else:
510 else:
500 # we're outside the repo. return an absolute path.
511 # we're outside the repo. return an absolute path.
501 return cwd
512 return cwd
502
513
503 def pathto(self, f, cwd=None):
514 def pathto(self, f, cwd=None):
504 if cwd is None:
515 if cwd is None:
505 cwd = self.getcwd()
516 cwd = self.getcwd()
506 path = util.pathto(self._root, cwd, f)
517 path = util.pathto(self._root, cwd, f)
507 if self._slash:
518 if self._slash:
508 return util.pconvert(path)
519 return util.pconvert(path)
509 return path
520 return path
510
521
511 def get_entry(self, path):
522 def get_entry(self, path):
512 """return a DirstateItem for the associated path"""
523 """return a DirstateItem for the associated path"""
513 entry = self._map.get(path)
524 entry = self._map.get(path)
514 if entry is None:
525 if entry is None:
515 return DirstateItem()
526 return DirstateItem()
516 return entry
527 return entry
517
528
518 def __contains__(self, key):
529 def __contains__(self, key):
519 return key in self._map
530 return key in self._map
520
531
521 def __iter__(self):
532 def __iter__(self):
522 return iter(sorted(self._map))
533 return iter(sorted(self._map))
523
534
524 def items(self):
535 def items(self):
525 return self._map.items()
536 return self._map.items()
526
537
527 iteritems = items
538 iteritems = items
528
539
529 def parents(self):
540 def parents(self):
530 return [self._validate(p) for p in self._pl]
541 return [self._validate(p) for p in self._pl]
531
542
532 def p1(self):
543 def p1(self):
533 return self._validate(self._pl[0])
544 return self._validate(self._pl[0])
534
545
535 def p2(self):
546 def p2(self):
536 return self._validate(self._pl[1])
547 return self._validate(self._pl[1])
537
548
538 @property
549 @property
539 def in_merge(self):
550 def in_merge(self):
540 """True if a merge is in progress"""
551 """True if a merge is in progress"""
541 return self._pl[1] != self._nodeconstants.nullid
552 return self._pl[1] != self._nodeconstants.nullid
542
553
543 def branch(self):
554 def branch(self):
544 return encoding.tolocal(self._branch)
555 return encoding.tolocal(self._branch)
545
556
546 @requires_changing_parents
557 @requires_changing_parents
547 def setparents(self, p1, p2=None):
558 def setparents(self, p1, p2=None):
548 """Set dirstate parents to p1 and p2.
559 """Set dirstate parents to p1 and p2.
549
560
550 When moving from two parents to one, "merged" entries a
561 When moving from two parents to one, "merged" entries a
551 adjusted to normal and previous copy records discarded and
562 adjusted to normal and previous copy records discarded and
552 returned by the call.
563 returned by the call.
553
564
554 See localrepo.setparents()
565 See localrepo.setparents()
555 """
566 """
556 if p2 is None:
567 if p2 is None:
557 p2 = self._nodeconstants.nullid
568 p2 = self._nodeconstants.nullid
558 if self._changing_level == 0:
569 if self._changing_level == 0:
559 raise ValueError(
570 raise ValueError(
560 b"cannot set dirstate parent outside of "
571 b"cannot set dirstate parent outside of "
561 b"dirstate.changing_parents context manager"
572 b"dirstate.changing_parents context manager"
562 )
573 )
563
574
564 self._dirty = True
575 self._dirty = True
565 oldp2 = self._pl[1]
576 oldp2 = self._pl[1]
566 if self._origpl is None:
577 if self._origpl is None:
567 self._origpl = self._pl
578 self._origpl = self._pl
568 nullid = self._nodeconstants.nullid
579 nullid = self._nodeconstants.nullid
569 # True if we need to fold p2 related state back to a linear case
580 # True if we need to fold p2 related state back to a linear case
570 fold_p2 = oldp2 != nullid and p2 == nullid
581 fold_p2 = oldp2 != nullid and p2 == nullid
571 return self._map.setparents(p1, p2, fold_p2=fold_p2)
582 return self._map.setparents(p1, p2, fold_p2=fold_p2)
572
583
573 def setbranch(self, branch):
584 def setbranch(self, branch):
574 self.__class__._branch.set(self, encoding.fromlocal(branch))
585 self.__class__._branch.set(self, encoding.fromlocal(branch))
575 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
586 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
576 try:
587 try:
577 f.write(self._branch + b'\n')
588 f.write(self._branch + b'\n')
578 f.close()
589 f.close()
579
590
580 # make sure filecache has the correct stat info for _branch after
591 # make sure filecache has the correct stat info for _branch after
581 # replacing the underlying file
592 # replacing the underlying file
582 ce = self._filecache[b'_branch']
593 ce = self._filecache[b'_branch']
583 if ce:
594 if ce:
584 ce.refresh()
595 ce.refresh()
585 except: # re-raises
596 except: # re-raises
586 f.discard()
597 f.discard()
587 raise
598 raise
588
599
589 def invalidate(self):
600 def invalidate(self):
590 """Causes the next access to reread the dirstate.
601 """Causes the next access to reread the dirstate.
591
602
592 This is different from localrepo.invalidatedirstate() because it always
603 This is different from localrepo.invalidatedirstate() because it always
593 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
604 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
594 check whether the dirstate has changed before rereading it."""
605 check whether the dirstate has changed before rereading it."""
595
606
596 for a in ("_map", "_branch", "_ignore"):
607 for a in ("_map", "_branch", "_ignore"):
597 if a in self.__dict__:
608 if a in self.__dict__:
598 delattr(self, a)
609 delattr(self, a)
599 self._dirty = False
610 self._dirty = False
600 self._dirty_tracked_set = False
611 self._dirty_tracked_set = False
601 self._invalidated_context = (
612 self._invalidated_context = bool(
602 self._changing_level > 0 or self._attached_to_a_transaction
613 self._changing_level > 0
614 or self._attached_to_a_transaction
615 or self._running_status
603 )
616 )
604 self._origpl = None
617 self._origpl = None
605
618
606 @requires_changing_any
619 @requires_changing_any
607 def copy(self, source, dest):
620 def copy(self, source, dest):
608 """Mark dest as a copy of source. Unmark dest if source is None."""
621 """Mark dest as a copy of source. Unmark dest if source is None."""
609 if source == dest:
622 if source == dest:
610 return
623 return
611 self._dirty = True
624 self._dirty = True
612 if source is not None:
625 if source is not None:
613 self._check_sparse(source)
626 self._check_sparse(source)
614 self._map.copymap[dest] = source
627 self._map.copymap[dest] = source
615 else:
628 else:
616 self._map.copymap.pop(dest, None)
629 self._map.copymap.pop(dest, None)
617
630
618 def copied(self, file):
631 def copied(self, file):
619 return self._map.copymap.get(file, None)
632 return self._map.copymap.get(file, None)
620
633
621 def copies(self):
634 def copies(self):
622 return self._map.copymap
635 return self._map.copymap
623
636
624 @requires_changing_files
637 @requires_changing_files
625 def set_tracked(self, filename, reset_copy=False):
638 def set_tracked(self, filename, reset_copy=False):
626 """a "public" method for generic code to mark a file as tracked
639 """a "public" method for generic code to mark a file as tracked
627
640
628 This function is to be called outside of "update/merge" case. For
641 This function is to be called outside of "update/merge" case. For
629 example by a command like `hg add X`.
642 example by a command like `hg add X`.
630
643
631 if reset_copy is set, any existing copy information will be dropped.
644 if reset_copy is set, any existing copy information will be dropped.
632
645
633 return True the file was previously untracked, False otherwise.
646 return True the file was previously untracked, False otherwise.
634 """
647 """
635 self._dirty = True
648 self._dirty = True
636 entry = self._map.get(filename)
649 entry = self._map.get(filename)
637 if entry is None or not entry.tracked:
650 if entry is None or not entry.tracked:
638 self._check_new_tracked_filename(filename)
651 self._check_new_tracked_filename(filename)
639 pre_tracked = self._map.set_tracked(filename)
652 pre_tracked = self._map.set_tracked(filename)
640 if reset_copy:
653 if reset_copy:
641 self._map.copymap.pop(filename, None)
654 self._map.copymap.pop(filename, None)
642 if pre_tracked:
655 if pre_tracked:
643 self._dirty_tracked_set = True
656 self._dirty_tracked_set = True
644 return pre_tracked
657 return pre_tracked
645
658
646 @requires_changing_files
659 @requires_changing_files
647 def set_untracked(self, filename):
660 def set_untracked(self, filename):
648 """a "public" method for generic code to mark a file as untracked
661 """a "public" method for generic code to mark a file as untracked
649
662
650 This function is to be called outside of "update/merge" case. For
663 This function is to be called outside of "update/merge" case. For
651 example by a command like `hg remove X`.
664 example by a command like `hg remove X`.
652
665
653 return True the file was previously tracked, False otherwise.
666 return True the file was previously tracked, False otherwise.
654 """
667 """
655 ret = self._map.set_untracked(filename)
668 ret = self._map.set_untracked(filename)
656 if ret:
669 if ret:
657 self._dirty = True
670 self._dirty = True
658 self._dirty_tracked_set = True
671 self._dirty_tracked_set = True
659 return ret
672 return ret
660
673
661 @requires_not_changing_parents
674 @requires_not_changing_parents
662 def set_clean(self, filename, parentfiledata):
675 def set_clean(self, filename, parentfiledata):
663 """record that the current state of the file on disk is known to be clean"""
676 """record that the current state of the file on disk is known to be clean"""
664 self._dirty = True
677 self._dirty = True
665 if not self._map[filename].tracked:
678 if not self._map[filename].tracked:
666 self._check_new_tracked_filename(filename)
679 self._check_new_tracked_filename(filename)
667 (mode, size, mtime) = parentfiledata
680 (mode, size, mtime) = parentfiledata
668 self._map.set_clean(filename, mode, size, mtime)
681 self._map.set_clean(filename, mode, size, mtime)
669
682
670 @requires_not_changing_parents
683 @requires_not_changing_parents
671 def set_possibly_dirty(self, filename):
684 def set_possibly_dirty(self, filename):
672 """record that the current state of the file on disk is unknown"""
685 """record that the current state of the file on disk is unknown"""
673 self._dirty = True
686 self._dirty = True
674 self._map.set_possibly_dirty(filename)
687 self._map.set_possibly_dirty(filename)
675
688
676 @requires_changing_parents
689 @requires_changing_parents
677 def update_file_p1(
690 def update_file_p1(
678 self,
691 self,
679 filename,
692 filename,
680 p1_tracked,
693 p1_tracked,
681 ):
694 ):
682 """Set a file as tracked in the parent (or not)
695 """Set a file as tracked in the parent (or not)
683
696
684 This is to be called when adjust the dirstate to a new parent after an history
697 This is to be called when adjust the dirstate to a new parent after an history
685 rewriting operation.
698 rewriting operation.
686
699
687 It should not be called during a merge (p2 != nullid) and only within
700 It should not be called during a merge (p2 != nullid) and only within
688 a `with dirstate.changing_parents(repo):` context.
701 a `with dirstate.changing_parents(repo):` context.
689 """
702 """
690 if self.in_merge:
703 if self.in_merge:
691 msg = b'update_file_reference should not be called when merging'
704 msg = b'update_file_reference should not be called when merging'
692 raise error.ProgrammingError(msg)
705 raise error.ProgrammingError(msg)
693 entry = self._map.get(filename)
706 entry = self._map.get(filename)
694 if entry is None:
707 if entry is None:
695 wc_tracked = False
708 wc_tracked = False
696 else:
709 else:
697 wc_tracked = entry.tracked
710 wc_tracked = entry.tracked
698 if not (p1_tracked or wc_tracked):
711 if not (p1_tracked or wc_tracked):
699 # the file is no longer relevant to anyone
712 # the file is no longer relevant to anyone
700 if self._map.get(filename) is not None:
713 if self._map.get(filename) is not None:
701 self._map.reset_state(filename)
714 self._map.reset_state(filename)
702 self._dirty = True
715 self._dirty = True
703 elif (not p1_tracked) and wc_tracked:
716 elif (not p1_tracked) and wc_tracked:
704 if entry is not None and entry.added:
717 if entry is not None and entry.added:
705 return # avoid dropping copy information (maybe?)
718 return # avoid dropping copy information (maybe?)
706
719
707 self._map.reset_state(
720 self._map.reset_state(
708 filename,
721 filename,
709 wc_tracked,
722 wc_tracked,
710 p1_tracked,
723 p1_tracked,
711 # the underlying reference might have changed, we will have to
724 # the underlying reference might have changed, we will have to
712 # check it.
725 # check it.
713 has_meaningful_mtime=False,
726 has_meaningful_mtime=False,
714 )
727 )
715
728
716 @requires_changing_parents
729 @requires_changing_parents
717 def update_file(
730 def update_file(
718 self,
731 self,
719 filename,
732 filename,
720 wc_tracked,
733 wc_tracked,
721 p1_tracked,
734 p1_tracked,
722 p2_info=False,
735 p2_info=False,
723 possibly_dirty=False,
736 possibly_dirty=False,
724 parentfiledata=None,
737 parentfiledata=None,
725 ):
738 ):
726 """update the information about a file in the dirstate
739 """update the information about a file in the dirstate
727
740
728 This is to be called when the direstates parent changes to keep track
741 This is to be called when the direstates parent changes to keep track
729 of what is the file situation in regards to the working copy and its parent.
742 of what is the file situation in regards to the working copy and its parent.
730
743
731 This function must be called within a `dirstate.changing_parents` context.
744 This function must be called within a `dirstate.changing_parents` context.
732
745
733 note: the API is at an early stage and we might need to adjust it
746 note: the API is at an early stage and we might need to adjust it
734 depending of what information ends up being relevant and useful to
747 depending of what information ends up being relevant and useful to
735 other processing.
748 other processing.
736 """
749 """
737 self._update_file(
750 self._update_file(
738 filename=filename,
751 filename=filename,
739 wc_tracked=wc_tracked,
752 wc_tracked=wc_tracked,
740 p1_tracked=p1_tracked,
753 p1_tracked=p1_tracked,
741 p2_info=p2_info,
754 p2_info=p2_info,
742 possibly_dirty=possibly_dirty,
755 possibly_dirty=possibly_dirty,
743 parentfiledata=parentfiledata,
756 parentfiledata=parentfiledata,
744 )
757 )
745
758
746 # XXX since this make the dirstate dirty, we should enforce that it is done
759 # XXX since this make the dirstate dirty, we should enforce that it is done
747 # withing an appropriate change-context that scope the change and ensure it
760 # withing an appropriate change-context that scope the change and ensure it
748 # eventually get written on disk (or rolled back)
761 # eventually get written on disk (or rolled back)
749 def hacky_extension_update_file(self, *args, **kwargs):
762 def hacky_extension_update_file(self, *args, **kwargs):
750 """NEVER USE THIS, YOU DO NOT NEED IT
763 """NEVER USE THIS, YOU DO NOT NEED IT
751
764
752 This function is a variant of "update_file" to be called by a small set
765 This function is a variant of "update_file" to be called by a small set
753 of extensions, it also adjust the internal state of file, but can be
766 of extensions, it also adjust the internal state of file, but can be
754 called outside an `changing_parents` context.
767 called outside an `changing_parents` context.
755
768
756 A very small number of extension meddle with the working copy content
769 A very small number of extension meddle with the working copy content
757 in a way that requires to adjust the dirstate accordingly. At the time
770 in a way that requires to adjust the dirstate accordingly. At the time
758 this command is written they are :
771 this command is written they are :
759 - keyword,
772 - keyword,
760 - largefile,
773 - largefile,
761 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
774 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
762
775
763 This function could probably be replaced by more semantic one (like
776 This function could probably be replaced by more semantic one (like
764 "adjust expected size" or "always revalidate file content", etc)
777 "adjust expected size" or "always revalidate file content", etc)
765 however at the time where this is writen, this is too much of a detour
778 however at the time where this is writen, this is too much of a detour
766 to be considered.
779 to be considered.
767 """
780 """
768 self._update_file(
781 self._update_file(
769 *args,
782 *args,
770 **kwargs,
783 **kwargs,
771 )
784 )
772
785
773 def _update_file(
786 def _update_file(
774 self,
787 self,
775 filename,
788 filename,
776 wc_tracked,
789 wc_tracked,
777 p1_tracked,
790 p1_tracked,
778 p2_info=False,
791 p2_info=False,
779 possibly_dirty=False,
792 possibly_dirty=False,
780 parentfiledata=None,
793 parentfiledata=None,
781 ):
794 ):
782
795
783 # note: I do not think we need to double check name clash here since we
796 # note: I do not think we need to double check name clash here since we
784 # are in a update/merge case that should already have taken care of
797 # are in a update/merge case that should already have taken care of
785 # this. The test agrees
798 # this. The test agrees
786
799
787 self._dirty = True
800 self._dirty = True
788 old_entry = self._map.get(filename)
801 old_entry = self._map.get(filename)
789 if old_entry is None:
802 if old_entry is None:
790 prev_tracked = False
803 prev_tracked = False
791 else:
804 else:
792 prev_tracked = old_entry.tracked
805 prev_tracked = old_entry.tracked
793 if prev_tracked != wc_tracked:
806 if prev_tracked != wc_tracked:
794 self._dirty_tracked_set = True
807 self._dirty_tracked_set = True
795
808
796 self._map.reset_state(
809 self._map.reset_state(
797 filename,
810 filename,
798 wc_tracked,
811 wc_tracked,
799 p1_tracked,
812 p1_tracked,
800 p2_info=p2_info,
813 p2_info=p2_info,
801 has_meaningful_mtime=not possibly_dirty,
814 has_meaningful_mtime=not possibly_dirty,
802 parentfiledata=parentfiledata,
815 parentfiledata=parentfiledata,
803 )
816 )
804
817
805 def _check_new_tracked_filename(self, filename):
818 def _check_new_tracked_filename(self, filename):
806 scmutil.checkfilename(filename)
819 scmutil.checkfilename(filename)
807 if self._map.hastrackeddir(filename):
820 if self._map.hastrackeddir(filename):
808 msg = _(b'directory %r already in dirstate')
821 msg = _(b'directory %r already in dirstate')
809 msg %= pycompat.bytestr(filename)
822 msg %= pycompat.bytestr(filename)
810 raise error.Abort(msg)
823 raise error.Abort(msg)
811 # shadows
824 # shadows
812 for d in pathutil.finddirs(filename):
825 for d in pathutil.finddirs(filename):
813 if self._map.hastrackeddir(d):
826 if self._map.hastrackeddir(d):
814 break
827 break
815 entry = self._map.get(d)
828 entry = self._map.get(d)
816 if entry is not None and not entry.removed:
829 if entry is not None and not entry.removed:
817 msg = _(b'file %r in dirstate clashes with %r')
830 msg = _(b'file %r in dirstate clashes with %r')
818 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
831 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
819 raise error.Abort(msg)
832 raise error.Abort(msg)
820 self._check_sparse(filename)
833 self._check_sparse(filename)
821
834
822 def _check_sparse(self, filename):
835 def _check_sparse(self, filename):
823 """Check that a filename is inside the sparse profile"""
836 """Check that a filename is inside the sparse profile"""
824 sparsematch = self._sparsematcher
837 sparsematch = self._sparsematcher
825 if sparsematch is not None and not sparsematch.always():
838 if sparsematch is not None and not sparsematch.always():
826 if not sparsematch(filename):
839 if not sparsematch(filename):
827 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
840 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
828 hint = _(
841 hint = _(
829 b'include file with `hg debugsparse --include <pattern>` or use '
842 b'include file with `hg debugsparse --include <pattern>` or use '
830 b'`hg add -s <file>` to include file directory while adding'
843 b'`hg add -s <file>` to include file directory while adding'
831 )
844 )
832 raise error.Abort(msg % filename, hint=hint)
845 raise error.Abort(msg % filename, hint=hint)
833
846
834 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
847 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
835 if exists is None:
848 if exists is None:
836 exists = os.path.lexists(os.path.join(self._root, path))
849 exists = os.path.lexists(os.path.join(self._root, path))
837 if not exists:
850 if not exists:
838 # Maybe a path component exists
851 # Maybe a path component exists
839 if not ignoremissing and b'/' in path:
852 if not ignoremissing and b'/' in path:
840 d, f = path.rsplit(b'/', 1)
853 d, f = path.rsplit(b'/', 1)
841 d = self._normalize(d, False, ignoremissing, None)
854 d = self._normalize(d, False, ignoremissing, None)
842 folded = d + b"/" + f
855 folded = d + b"/" + f
843 else:
856 else:
844 # No path components, preserve original case
857 # No path components, preserve original case
845 folded = path
858 folded = path
846 else:
859 else:
847 # recursively normalize leading directory components
860 # recursively normalize leading directory components
848 # against dirstate
861 # against dirstate
849 if b'/' in normed:
862 if b'/' in normed:
850 d, f = normed.rsplit(b'/', 1)
863 d, f = normed.rsplit(b'/', 1)
851 d = self._normalize(d, False, ignoremissing, True)
864 d = self._normalize(d, False, ignoremissing, True)
852 r = self._root + b"/" + d
865 r = self._root + b"/" + d
853 folded = d + b"/" + util.fspath(f, r)
866 folded = d + b"/" + util.fspath(f, r)
854 else:
867 else:
855 folded = util.fspath(normed, self._root)
868 folded = util.fspath(normed, self._root)
856 storemap[normed] = folded
869 storemap[normed] = folded
857
870
858 return folded
871 return folded
859
872
860 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
873 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
861 normed = util.normcase(path)
874 normed = util.normcase(path)
862 folded = self._map.filefoldmap.get(normed, None)
875 folded = self._map.filefoldmap.get(normed, None)
863 if folded is None:
876 if folded is None:
864 if isknown:
877 if isknown:
865 folded = path
878 folded = path
866 else:
879 else:
867 folded = self._discoverpath(
880 folded = self._discoverpath(
868 path, normed, ignoremissing, exists, self._map.filefoldmap
881 path, normed, ignoremissing, exists, self._map.filefoldmap
869 )
882 )
870 return folded
883 return folded
871
884
872 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
885 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
873 normed = util.normcase(path)
886 normed = util.normcase(path)
874 folded = self._map.filefoldmap.get(normed, None)
887 folded = self._map.filefoldmap.get(normed, None)
875 if folded is None:
888 if folded is None:
876 folded = self._map.dirfoldmap.get(normed, None)
889 folded = self._map.dirfoldmap.get(normed, None)
877 if folded is None:
890 if folded is None:
878 if isknown:
891 if isknown:
879 folded = path
892 folded = path
880 else:
893 else:
881 # store discovered result in dirfoldmap so that future
894 # store discovered result in dirfoldmap so that future
882 # normalizefile calls don't start matching directories
895 # normalizefile calls don't start matching directories
883 folded = self._discoverpath(
896 folded = self._discoverpath(
884 path, normed, ignoremissing, exists, self._map.dirfoldmap
897 path, normed, ignoremissing, exists, self._map.dirfoldmap
885 )
898 )
886 return folded
899 return folded
887
900
888 def normalize(self, path, isknown=False, ignoremissing=False):
901 def normalize(self, path, isknown=False, ignoremissing=False):
889 """
902 """
890 normalize the case of a pathname when on a casefolding filesystem
903 normalize the case of a pathname when on a casefolding filesystem
891
904
892 isknown specifies whether the filename came from walking the
905 isknown specifies whether the filename came from walking the
893 disk, to avoid extra filesystem access.
906 disk, to avoid extra filesystem access.
894
907
895 If ignoremissing is True, missing path are returned
908 If ignoremissing is True, missing path are returned
896 unchanged. Otherwise, we try harder to normalize possibly
909 unchanged. Otherwise, we try harder to normalize possibly
897 existing path components.
910 existing path components.
898
911
899 The normalized case is determined based on the following precedence:
912 The normalized case is determined based on the following precedence:
900
913
901 - version of name already stored in the dirstate
914 - version of name already stored in the dirstate
902 - version of name stored on disk
915 - version of name stored on disk
903 - version provided via command arguments
916 - version provided via command arguments
904 """
917 """
905
918
906 if self._checkcase:
919 if self._checkcase:
907 return self._normalize(path, isknown, ignoremissing)
920 return self._normalize(path, isknown, ignoremissing)
908 return path
921 return path
909
922
910 # XXX this method is barely used, as a result:
923 # XXX this method is barely used, as a result:
911 # - its semantic is unclear
924 # - its semantic is unclear
912 # - do we really needs it ?
925 # - do we really needs it ?
913 @requires_changing_parents
926 @requires_changing_parents
914 def clear(self):
927 def clear(self):
915 self._map.clear()
928 self._map.clear()
916 self._dirty = True
929 self._dirty = True
917
930
918 @requires_changing_parents
931 @requires_changing_parents
919 def rebuild(self, parent, allfiles, changedfiles=None):
932 def rebuild(self, parent, allfiles, changedfiles=None):
920 matcher = self._sparsematcher
933 matcher = self._sparsematcher
921 if matcher is not None and not matcher.always():
934 if matcher is not None and not matcher.always():
922 # should not add non-matching files
935 # should not add non-matching files
923 allfiles = [f for f in allfiles if matcher(f)]
936 allfiles = [f for f in allfiles if matcher(f)]
924 if changedfiles:
937 if changedfiles:
925 changedfiles = [f for f in changedfiles if matcher(f)]
938 changedfiles = [f for f in changedfiles if matcher(f)]
926
939
927 if changedfiles is not None:
940 if changedfiles is not None:
928 # these files will be deleted from the dirstate when they are
941 # these files will be deleted from the dirstate when they are
929 # not found to be in allfiles
942 # not found to be in allfiles
930 dirstatefilestoremove = {f for f in self if not matcher(f)}
943 dirstatefilestoremove = {f for f in self if not matcher(f)}
931 changedfiles = dirstatefilestoremove.union(changedfiles)
944 changedfiles = dirstatefilestoremove.union(changedfiles)
932
945
933 if changedfiles is None:
946 if changedfiles is None:
934 # Rebuild entire dirstate
947 # Rebuild entire dirstate
935 to_lookup = allfiles
948 to_lookup = allfiles
936 to_drop = []
949 to_drop = []
937 self.clear()
950 self.clear()
938 elif len(changedfiles) < 10:
951 elif len(changedfiles) < 10:
939 # Avoid turning allfiles into a set, which can be expensive if it's
952 # Avoid turning allfiles into a set, which can be expensive if it's
940 # large.
953 # large.
941 to_lookup = []
954 to_lookup = []
942 to_drop = []
955 to_drop = []
943 for f in changedfiles:
956 for f in changedfiles:
944 if f in allfiles:
957 if f in allfiles:
945 to_lookup.append(f)
958 to_lookup.append(f)
946 else:
959 else:
947 to_drop.append(f)
960 to_drop.append(f)
948 else:
961 else:
949 changedfilesset = set(changedfiles)
962 changedfilesset = set(changedfiles)
950 to_lookup = changedfilesset & set(allfiles)
963 to_lookup = changedfilesset & set(allfiles)
951 to_drop = changedfilesset - to_lookup
964 to_drop = changedfilesset - to_lookup
952
965
953 if self._origpl is None:
966 if self._origpl is None:
954 self._origpl = self._pl
967 self._origpl = self._pl
955 self._map.setparents(parent, self._nodeconstants.nullid)
968 self._map.setparents(parent, self._nodeconstants.nullid)
956
969
957 for f in to_lookup:
970 for f in to_lookup:
958 if self.in_merge:
971 if self.in_merge:
959 self.set_tracked(f)
972 self.set_tracked(f)
960 else:
973 else:
961 self._map.reset_state(
974 self._map.reset_state(
962 f,
975 f,
963 wc_tracked=True,
976 wc_tracked=True,
964 p1_tracked=True,
977 p1_tracked=True,
965 )
978 )
966 for f in to_drop:
979 for f in to_drop:
967 self._map.reset_state(f)
980 self._map.reset_state(f)
968
981
969 self._dirty = True
982 self._dirty = True
970
983
971 def identity(self):
984 def identity(self):
972 """Return identity of dirstate itself to detect changing in storage
985 """Return identity of dirstate itself to detect changing in storage
973
986
974 If identity of previous dirstate is equal to this, writing
987 If identity of previous dirstate is equal to this, writing
975 changes based on the former dirstate out can keep consistency.
988 changes based on the former dirstate out can keep consistency.
976 """
989 """
977 return self._map.identity
990 return self._map.identity
978
991
979 def write(self, tr):
992 def write(self, tr):
980 if not self._dirty:
993 if not self._dirty:
981 return
994 return
982 # make sure we don't request a write of invalidated content
995 # make sure we don't request a write of invalidated content
983 # XXX move before the dirty check once `unlock` stop calling `write`
996 # XXX move before the dirty check once `unlock` stop calling `write`
984 assert not self._invalidated_context
997 assert not self._invalidated_context
985
998
986 write_key = self._use_tracked_hint and self._dirty_tracked_set
999 write_key = self._use_tracked_hint and self._dirty_tracked_set
987 if tr:
1000 if tr:
988
1001
989 def on_abort(tr):
1002 def on_abort(tr):
990 self._attached_to_a_transaction = False
1003 self._attached_to_a_transaction = False
991 self.invalidate()
1004 self.invalidate()
992
1005
993 # make sure we invalidate the current change on abort
1006 # make sure we invalidate the current change on abort
994 if tr is not None:
1007 if tr is not None:
995 tr.addabort(
1008 tr.addabort(
996 b'dirstate-invalidate%s' % self._tr_key_suffix,
1009 b'dirstate-invalidate%s' % self._tr_key_suffix,
997 on_abort,
1010 on_abort,
998 )
1011 )
999
1012
1000 self._attached_to_a_transaction = True
1013 self._attached_to_a_transaction = True
1001
1014
1002 def on_success(f):
1015 def on_success(f):
1003 self._attached_to_a_transaction = False
1016 self._attached_to_a_transaction = False
1004 self._writedirstate(tr, f),
1017 self._writedirstate(tr, f),
1005
1018
1006 # delay writing in-memory changes out
1019 # delay writing in-memory changes out
1007 tr.addfilegenerator(
1020 tr.addfilegenerator(
1008 b'dirstate-1-main%s' % self._tr_key_suffix,
1021 b'dirstate-1-main%s' % self._tr_key_suffix,
1009 (self._filename,),
1022 (self._filename,),
1010 on_success,
1023 on_success,
1011 location=b'plain',
1024 location=b'plain',
1012 post_finalize=True,
1025 post_finalize=True,
1013 )
1026 )
1014 if write_key:
1027 if write_key:
1015 tr.addfilegenerator(
1028 tr.addfilegenerator(
1016 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1029 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1017 (self._filename_th,),
1030 (self._filename_th,),
1018 lambda f: self._write_tracked_hint(tr, f),
1031 lambda f: self._write_tracked_hint(tr, f),
1019 location=b'plain',
1032 location=b'plain',
1020 post_finalize=True,
1033 post_finalize=True,
1021 )
1034 )
1022 return
1035 return
1023
1036
1024 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1037 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1025 with file(self._filename) as f:
1038 with file(self._filename) as f:
1026 self._writedirstate(tr, f)
1039 self._writedirstate(tr, f)
1027 if write_key:
1040 if write_key:
1028 # we update the key-file after writing to make sure reader have a
1041 # we update the key-file after writing to make sure reader have a
1029 # key that match the newly written content
1042 # key that match the newly written content
1030 with file(self._filename_th) as f:
1043 with file(self._filename_th) as f:
1031 self._write_tracked_hint(tr, f)
1044 self._write_tracked_hint(tr, f)
1032
1045
1033 def delete_tracked_hint(self):
1046 def delete_tracked_hint(self):
1034 """remove the tracked_hint file
1047 """remove the tracked_hint file
1035
1048
1036 To be used by format downgrades operation"""
1049 To be used by format downgrades operation"""
1037 self._opener.unlink(self._filename_th)
1050 self._opener.unlink(self._filename_th)
1038 self._use_tracked_hint = False
1051 self._use_tracked_hint = False
1039
1052
1040 def addparentchangecallback(self, category, callback):
1053 def addparentchangecallback(self, category, callback):
1041 """add a callback to be called when the wd parents are changed
1054 """add a callback to be called when the wd parents are changed
1042
1055
1043 Callback will be called with the following arguments:
1056 Callback will be called with the following arguments:
1044 dirstate, (oldp1, oldp2), (newp1, newp2)
1057 dirstate, (oldp1, oldp2), (newp1, newp2)
1045
1058
1046 Category is a unique identifier to allow overwriting an old callback
1059 Category is a unique identifier to allow overwriting an old callback
1047 with a newer callback.
1060 with a newer callback.
1048 """
1061 """
1049 self._plchangecallbacks[category] = callback
1062 self._plchangecallbacks[category] = callback
1050
1063
1051 def _writedirstate(self, tr, st):
1064 def _writedirstate(self, tr, st):
1052 # make sure we don't write invalidated content
1065 # make sure we don't write invalidated content
1053 assert not self._invalidated_context
1066 assert not self._invalidated_context
1054 # notify callbacks about parents change
1067 # notify callbacks about parents change
1055 if self._origpl is not None and self._origpl != self._pl:
1068 if self._origpl is not None and self._origpl != self._pl:
1056 for c, callback in sorted(self._plchangecallbacks.items()):
1069 for c, callback in sorted(self._plchangecallbacks.items()):
1057 callback(self, self._origpl, self._pl)
1070 callback(self, self._origpl, self._pl)
1058 self._origpl = None
1071 self._origpl = None
1059 self._map.write(tr, st)
1072 self._map.write(tr, st)
1060 self._dirty = False
1073 self._dirty = False
1061 self._dirty_tracked_set = False
1074 self._dirty_tracked_set = False
1062
1075
1063 def _write_tracked_hint(self, tr, f):
1076 def _write_tracked_hint(self, tr, f):
1064 key = node.hex(uuid.uuid4().bytes)
1077 key = node.hex(uuid.uuid4().bytes)
1065 f.write(b"1\n%s\n" % key) # 1 is the format version
1078 f.write(b"1\n%s\n" % key) # 1 is the format version
1066
1079
1067 def _dirignore(self, f):
1080 def _dirignore(self, f):
1068 if self._ignore(f):
1081 if self._ignore(f):
1069 return True
1082 return True
1070 for p in pathutil.finddirs(f):
1083 for p in pathutil.finddirs(f):
1071 if self._ignore(p):
1084 if self._ignore(p):
1072 return True
1085 return True
1073 return False
1086 return False
1074
1087
1075 def _ignorefiles(self):
1088 def _ignorefiles(self):
1076 files = []
1089 files = []
1077 if os.path.exists(self._join(b'.hgignore')):
1090 if os.path.exists(self._join(b'.hgignore')):
1078 files.append(self._join(b'.hgignore'))
1091 files.append(self._join(b'.hgignore'))
1079 for name, path in self._ui.configitems(b"ui"):
1092 for name, path in self._ui.configitems(b"ui"):
1080 if name == b'ignore' or name.startswith(b'ignore.'):
1093 if name == b'ignore' or name.startswith(b'ignore.'):
1081 # we need to use os.path.join here rather than self._join
1094 # we need to use os.path.join here rather than self._join
1082 # because path is arbitrary and user-specified
1095 # because path is arbitrary and user-specified
1083 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1096 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1084 return files
1097 return files
1085
1098
1086 def _ignorefileandline(self, f):
1099 def _ignorefileandline(self, f):
1087 files = collections.deque(self._ignorefiles())
1100 files = collections.deque(self._ignorefiles())
1088 visited = set()
1101 visited = set()
1089 while files:
1102 while files:
1090 i = files.popleft()
1103 i = files.popleft()
1091 patterns = matchmod.readpatternfile(
1104 patterns = matchmod.readpatternfile(
1092 i, self._ui.warn, sourceinfo=True
1105 i, self._ui.warn, sourceinfo=True
1093 )
1106 )
1094 for pattern, lineno, line in patterns:
1107 for pattern, lineno, line in patterns:
1095 kind, p = matchmod._patsplit(pattern, b'glob')
1108 kind, p = matchmod._patsplit(pattern, b'glob')
1096 if kind == b"subinclude":
1109 if kind == b"subinclude":
1097 if p not in visited:
1110 if p not in visited:
1098 files.append(p)
1111 files.append(p)
1099 continue
1112 continue
1100 m = matchmod.match(
1113 m = matchmod.match(
1101 self._root, b'', [], [pattern], warn=self._ui.warn
1114 self._root, b'', [], [pattern], warn=self._ui.warn
1102 )
1115 )
1103 if m(f):
1116 if m(f):
1104 return (i, lineno, line)
1117 return (i, lineno, line)
1105 visited.add(i)
1118 visited.add(i)
1106 return (None, -1, b"")
1119 return (None, -1, b"")
1107
1120
1108 def _walkexplicit(self, match, subrepos):
1121 def _walkexplicit(self, match, subrepos):
1109 """Get stat data about the files explicitly specified by match.
1122 """Get stat data about the files explicitly specified by match.
1110
1123
1111 Return a triple (results, dirsfound, dirsnotfound).
1124 Return a triple (results, dirsfound, dirsnotfound).
1112 - results is a mapping from filename to stat result. It also contains
1125 - results is a mapping from filename to stat result. It also contains
1113 listings mapping subrepos and .hg to None.
1126 listings mapping subrepos and .hg to None.
1114 - dirsfound is a list of files found to be directories.
1127 - dirsfound is a list of files found to be directories.
1115 - dirsnotfound is a list of files that the dirstate thinks are
1128 - dirsnotfound is a list of files that the dirstate thinks are
1116 directories and that were not found."""
1129 directories and that were not found."""
1117
1130
1118 def badtype(mode):
1131 def badtype(mode):
1119 kind = _(b'unknown')
1132 kind = _(b'unknown')
1120 if stat.S_ISCHR(mode):
1133 if stat.S_ISCHR(mode):
1121 kind = _(b'character device')
1134 kind = _(b'character device')
1122 elif stat.S_ISBLK(mode):
1135 elif stat.S_ISBLK(mode):
1123 kind = _(b'block device')
1136 kind = _(b'block device')
1124 elif stat.S_ISFIFO(mode):
1137 elif stat.S_ISFIFO(mode):
1125 kind = _(b'fifo')
1138 kind = _(b'fifo')
1126 elif stat.S_ISSOCK(mode):
1139 elif stat.S_ISSOCK(mode):
1127 kind = _(b'socket')
1140 kind = _(b'socket')
1128 elif stat.S_ISDIR(mode):
1141 elif stat.S_ISDIR(mode):
1129 kind = _(b'directory')
1142 kind = _(b'directory')
1130 return _(b'unsupported file type (type is %s)') % kind
1143 return _(b'unsupported file type (type is %s)') % kind
1131
1144
1132 badfn = match.bad
1145 badfn = match.bad
1133 dmap = self._map
1146 dmap = self._map
1134 lstat = os.lstat
1147 lstat = os.lstat
1135 getkind = stat.S_IFMT
1148 getkind = stat.S_IFMT
1136 dirkind = stat.S_IFDIR
1149 dirkind = stat.S_IFDIR
1137 regkind = stat.S_IFREG
1150 regkind = stat.S_IFREG
1138 lnkkind = stat.S_IFLNK
1151 lnkkind = stat.S_IFLNK
1139 join = self._join
1152 join = self._join
1140 dirsfound = []
1153 dirsfound = []
1141 foundadd = dirsfound.append
1154 foundadd = dirsfound.append
1142 dirsnotfound = []
1155 dirsnotfound = []
1143 notfoundadd = dirsnotfound.append
1156 notfoundadd = dirsnotfound.append
1144
1157
1145 if not match.isexact() and self._checkcase:
1158 if not match.isexact() and self._checkcase:
1146 normalize = self._normalize
1159 normalize = self._normalize
1147 else:
1160 else:
1148 normalize = None
1161 normalize = None
1149
1162
1150 files = sorted(match.files())
1163 files = sorted(match.files())
1151 subrepos.sort()
1164 subrepos.sort()
1152 i, j = 0, 0
1165 i, j = 0, 0
1153 while i < len(files) and j < len(subrepos):
1166 while i < len(files) and j < len(subrepos):
1154 subpath = subrepos[j] + b"/"
1167 subpath = subrepos[j] + b"/"
1155 if files[i] < subpath:
1168 if files[i] < subpath:
1156 i += 1
1169 i += 1
1157 continue
1170 continue
1158 while i < len(files) and files[i].startswith(subpath):
1171 while i < len(files) and files[i].startswith(subpath):
1159 del files[i]
1172 del files[i]
1160 j += 1
1173 j += 1
1161
1174
1162 if not files or b'' in files:
1175 if not files or b'' in files:
1163 files = [b'']
1176 files = [b'']
1164 # constructing the foldmap is expensive, so don't do it for the
1177 # constructing the foldmap is expensive, so don't do it for the
1165 # common case where files is ['']
1178 # common case where files is ['']
1166 normalize = None
1179 normalize = None
1167 results = dict.fromkeys(subrepos)
1180 results = dict.fromkeys(subrepos)
1168 results[b'.hg'] = None
1181 results[b'.hg'] = None
1169
1182
1170 for ff in files:
1183 for ff in files:
1171 if normalize:
1184 if normalize:
1172 nf = normalize(ff, False, True)
1185 nf = normalize(ff, False, True)
1173 else:
1186 else:
1174 nf = ff
1187 nf = ff
1175 if nf in results:
1188 if nf in results:
1176 continue
1189 continue
1177
1190
1178 try:
1191 try:
1179 st = lstat(join(nf))
1192 st = lstat(join(nf))
1180 kind = getkind(st.st_mode)
1193 kind = getkind(st.st_mode)
1181 if kind == dirkind:
1194 if kind == dirkind:
1182 if nf in dmap:
1195 if nf in dmap:
1183 # file replaced by dir on disk but still in dirstate
1196 # file replaced by dir on disk but still in dirstate
1184 results[nf] = None
1197 results[nf] = None
1185 foundadd((nf, ff))
1198 foundadd((nf, ff))
1186 elif kind == regkind or kind == lnkkind:
1199 elif kind == regkind or kind == lnkkind:
1187 results[nf] = st
1200 results[nf] = st
1188 else:
1201 else:
1189 badfn(ff, badtype(kind))
1202 badfn(ff, badtype(kind))
1190 if nf in dmap:
1203 if nf in dmap:
1191 results[nf] = None
1204 results[nf] = None
1192 except (OSError) as inst:
1205 except (OSError) as inst:
1193 # nf not found on disk - it is dirstate only
1206 # nf not found on disk - it is dirstate only
1194 if nf in dmap: # does it exactly match a missing file?
1207 if nf in dmap: # does it exactly match a missing file?
1195 results[nf] = None
1208 results[nf] = None
1196 else: # does it match a missing directory?
1209 else: # does it match a missing directory?
1197 if self._map.hasdir(nf):
1210 if self._map.hasdir(nf):
1198 notfoundadd(nf)
1211 notfoundadd(nf)
1199 else:
1212 else:
1200 badfn(ff, encoding.strtolocal(inst.strerror))
1213 badfn(ff, encoding.strtolocal(inst.strerror))
1201
1214
1202 # match.files() may contain explicitly-specified paths that shouldn't
1215 # match.files() may contain explicitly-specified paths that shouldn't
1203 # be taken; drop them from the list of files found. dirsfound/notfound
1216 # be taken; drop them from the list of files found. dirsfound/notfound
1204 # aren't filtered here because they will be tested later.
1217 # aren't filtered here because they will be tested later.
1205 if match.anypats():
1218 if match.anypats():
1206 for f in list(results):
1219 for f in list(results):
1207 if f == b'.hg' or f in subrepos:
1220 if f == b'.hg' or f in subrepos:
1208 # keep sentinel to disable further out-of-repo walks
1221 # keep sentinel to disable further out-of-repo walks
1209 continue
1222 continue
1210 if not match(f):
1223 if not match(f):
1211 del results[f]
1224 del results[f]
1212
1225
1213 # Case insensitive filesystems cannot rely on lstat() failing to detect
1226 # Case insensitive filesystems cannot rely on lstat() failing to detect
1214 # a case-only rename. Prune the stat object for any file that does not
1227 # a case-only rename. Prune the stat object for any file that does not
1215 # match the case in the filesystem, if there are multiple files that
1228 # match the case in the filesystem, if there are multiple files that
1216 # normalize to the same path.
1229 # normalize to the same path.
1217 if match.isexact() and self._checkcase:
1230 if match.isexact() and self._checkcase:
1218 normed = {}
1231 normed = {}
1219
1232
1220 for f, st in results.items():
1233 for f, st in results.items():
1221 if st is None:
1234 if st is None:
1222 continue
1235 continue
1223
1236
1224 nc = util.normcase(f)
1237 nc = util.normcase(f)
1225 paths = normed.get(nc)
1238 paths = normed.get(nc)
1226
1239
1227 if paths is None:
1240 if paths is None:
1228 paths = set()
1241 paths = set()
1229 normed[nc] = paths
1242 normed[nc] = paths
1230
1243
1231 paths.add(f)
1244 paths.add(f)
1232
1245
1233 for norm, paths in normed.items():
1246 for norm, paths in normed.items():
1234 if len(paths) > 1:
1247 if len(paths) > 1:
1235 for path in paths:
1248 for path in paths:
1236 folded = self._discoverpath(
1249 folded = self._discoverpath(
1237 path, norm, True, None, self._map.dirfoldmap
1250 path, norm, True, None, self._map.dirfoldmap
1238 )
1251 )
1239 if path != folded:
1252 if path != folded:
1240 results[path] = None
1253 results[path] = None
1241
1254
1242 return results, dirsfound, dirsnotfound
1255 return results, dirsfound, dirsnotfound
1243
1256
1244 def walk(self, match, subrepos, unknown, ignored, full=True):
1257 def walk(self, match, subrepos, unknown, ignored, full=True):
1245 """
1258 """
1246 Walk recursively through the directory tree, finding all files
1259 Walk recursively through the directory tree, finding all files
1247 matched by match.
1260 matched by match.
1248
1261
1249 If full is False, maybe skip some known-clean files.
1262 If full is False, maybe skip some known-clean files.
1250
1263
1251 Return a dict mapping filename to stat-like object (either
1264 Return a dict mapping filename to stat-like object (either
1252 mercurial.osutil.stat instance or return value of os.stat()).
1265 mercurial.osutil.stat instance or return value of os.stat()).
1253
1266
1254 """
1267 """
1255 # full is a flag that extensions that hook into walk can use -- this
1268 # full is a flag that extensions that hook into walk can use -- this
1256 # implementation doesn't use it at all. This satisfies the contract
1269 # implementation doesn't use it at all. This satisfies the contract
1257 # because we only guarantee a "maybe".
1270 # because we only guarantee a "maybe".
1258
1271
1259 if ignored:
1272 if ignored:
1260 ignore = util.never
1273 ignore = util.never
1261 dirignore = util.never
1274 dirignore = util.never
1262 elif unknown:
1275 elif unknown:
1263 ignore = self._ignore
1276 ignore = self._ignore
1264 dirignore = self._dirignore
1277 dirignore = self._dirignore
1265 else:
1278 else:
1266 # if not unknown and not ignored, drop dir recursion and step 2
1279 # if not unknown and not ignored, drop dir recursion and step 2
1267 ignore = util.always
1280 ignore = util.always
1268 dirignore = util.always
1281 dirignore = util.always
1269
1282
1270 if self._sparsematchfn is not None:
1283 if self._sparsematchfn is not None:
1271 em = matchmod.exact(match.files())
1284 em = matchmod.exact(match.files())
1272 sm = matchmod.unionmatcher([self._sparsematcher, em])
1285 sm = matchmod.unionmatcher([self._sparsematcher, em])
1273 match = matchmod.intersectmatchers(match, sm)
1286 match = matchmod.intersectmatchers(match, sm)
1274
1287
1275 matchfn = match.matchfn
1288 matchfn = match.matchfn
1276 matchalways = match.always()
1289 matchalways = match.always()
1277 matchtdir = match.traversedir
1290 matchtdir = match.traversedir
1278 dmap = self._map
1291 dmap = self._map
1279 listdir = util.listdir
1292 listdir = util.listdir
1280 lstat = os.lstat
1293 lstat = os.lstat
1281 dirkind = stat.S_IFDIR
1294 dirkind = stat.S_IFDIR
1282 regkind = stat.S_IFREG
1295 regkind = stat.S_IFREG
1283 lnkkind = stat.S_IFLNK
1296 lnkkind = stat.S_IFLNK
1284 join = self._join
1297 join = self._join
1285
1298
1286 exact = skipstep3 = False
1299 exact = skipstep3 = False
1287 if match.isexact(): # match.exact
1300 if match.isexact(): # match.exact
1288 exact = True
1301 exact = True
1289 dirignore = util.always # skip step 2
1302 dirignore = util.always # skip step 2
1290 elif match.prefix(): # match.match, no patterns
1303 elif match.prefix(): # match.match, no patterns
1291 skipstep3 = True
1304 skipstep3 = True
1292
1305
1293 if not exact and self._checkcase:
1306 if not exact and self._checkcase:
1294 normalize = self._normalize
1307 normalize = self._normalize
1295 normalizefile = self._normalizefile
1308 normalizefile = self._normalizefile
1296 skipstep3 = False
1309 skipstep3 = False
1297 else:
1310 else:
1298 normalize = self._normalize
1311 normalize = self._normalize
1299 normalizefile = None
1312 normalizefile = None
1300
1313
1301 # step 1: find all explicit files
1314 # step 1: find all explicit files
1302 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1315 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1303 if matchtdir:
1316 if matchtdir:
1304 for d in work:
1317 for d in work:
1305 matchtdir(d[0])
1318 matchtdir(d[0])
1306 for d in dirsnotfound:
1319 for d in dirsnotfound:
1307 matchtdir(d)
1320 matchtdir(d)
1308
1321
1309 skipstep3 = skipstep3 and not (work or dirsnotfound)
1322 skipstep3 = skipstep3 and not (work or dirsnotfound)
1310 work = [d for d in work if not dirignore(d[0])]
1323 work = [d for d in work if not dirignore(d[0])]
1311
1324
1312 # step 2: visit subdirectories
1325 # step 2: visit subdirectories
1313 def traverse(work, alreadynormed):
1326 def traverse(work, alreadynormed):
1314 wadd = work.append
1327 wadd = work.append
1315 while work:
1328 while work:
1316 tracing.counter('dirstate.walk work', len(work))
1329 tracing.counter('dirstate.walk work', len(work))
1317 nd = work.pop()
1330 nd = work.pop()
1318 visitentries = match.visitchildrenset(nd)
1331 visitentries = match.visitchildrenset(nd)
1319 if not visitentries:
1332 if not visitentries:
1320 continue
1333 continue
1321 if visitentries == b'this' or visitentries == b'all':
1334 if visitentries == b'this' or visitentries == b'all':
1322 visitentries = None
1335 visitentries = None
1323 skip = None
1336 skip = None
1324 if nd != b'':
1337 if nd != b'':
1325 skip = b'.hg'
1338 skip = b'.hg'
1326 try:
1339 try:
1327 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1340 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1328 entries = listdir(join(nd), stat=True, skip=skip)
1341 entries = listdir(join(nd), stat=True, skip=skip)
1329 except (PermissionError, FileNotFoundError) as inst:
1342 except (PermissionError, FileNotFoundError) as inst:
1330 match.bad(
1343 match.bad(
1331 self.pathto(nd), encoding.strtolocal(inst.strerror)
1344 self.pathto(nd), encoding.strtolocal(inst.strerror)
1332 )
1345 )
1333 continue
1346 continue
1334 for f, kind, st in entries:
1347 for f, kind, st in entries:
1335 # Some matchers may return files in the visitentries set,
1348 # Some matchers may return files in the visitentries set,
1336 # instead of 'this', if the matcher explicitly mentions them
1349 # instead of 'this', if the matcher explicitly mentions them
1337 # and is not an exactmatcher. This is acceptable; we do not
1350 # and is not an exactmatcher. This is acceptable; we do not
1338 # make any hard assumptions about file-or-directory below
1351 # make any hard assumptions about file-or-directory below
1339 # based on the presence of `f` in visitentries. If
1352 # based on the presence of `f` in visitentries. If
1340 # visitchildrenset returned a set, we can always skip the
1353 # visitchildrenset returned a set, we can always skip the
1341 # entries *not* in the set it provided regardless of whether
1354 # entries *not* in the set it provided regardless of whether
1342 # they're actually a file or a directory.
1355 # they're actually a file or a directory.
1343 if visitentries and f not in visitentries:
1356 if visitentries and f not in visitentries:
1344 continue
1357 continue
1345 if normalizefile:
1358 if normalizefile:
1346 # even though f might be a directory, we're only
1359 # even though f might be a directory, we're only
1347 # interested in comparing it to files currently in the
1360 # interested in comparing it to files currently in the
1348 # dmap -- therefore normalizefile is enough
1361 # dmap -- therefore normalizefile is enough
1349 nf = normalizefile(
1362 nf = normalizefile(
1350 nd and (nd + b"/" + f) or f, True, True
1363 nd and (nd + b"/" + f) or f, True, True
1351 )
1364 )
1352 else:
1365 else:
1353 nf = nd and (nd + b"/" + f) or f
1366 nf = nd and (nd + b"/" + f) or f
1354 if nf not in results:
1367 if nf not in results:
1355 if kind == dirkind:
1368 if kind == dirkind:
1356 if not ignore(nf):
1369 if not ignore(nf):
1357 if matchtdir:
1370 if matchtdir:
1358 matchtdir(nf)
1371 matchtdir(nf)
1359 wadd(nf)
1372 wadd(nf)
1360 if nf in dmap and (matchalways or matchfn(nf)):
1373 if nf in dmap and (matchalways or matchfn(nf)):
1361 results[nf] = None
1374 results[nf] = None
1362 elif kind == regkind or kind == lnkkind:
1375 elif kind == regkind or kind == lnkkind:
1363 if nf in dmap:
1376 if nf in dmap:
1364 if matchalways or matchfn(nf):
1377 if matchalways or matchfn(nf):
1365 results[nf] = st
1378 results[nf] = st
1366 elif (matchalways or matchfn(nf)) and not ignore(
1379 elif (matchalways or matchfn(nf)) and not ignore(
1367 nf
1380 nf
1368 ):
1381 ):
1369 # unknown file -- normalize if necessary
1382 # unknown file -- normalize if necessary
1370 if not alreadynormed:
1383 if not alreadynormed:
1371 nf = normalize(nf, False, True)
1384 nf = normalize(nf, False, True)
1372 results[nf] = st
1385 results[nf] = st
1373 elif nf in dmap and (matchalways or matchfn(nf)):
1386 elif nf in dmap and (matchalways or matchfn(nf)):
1374 results[nf] = None
1387 results[nf] = None
1375
1388
1376 for nd, d in work:
1389 for nd, d in work:
1377 # alreadynormed means that processwork doesn't have to do any
1390 # alreadynormed means that processwork doesn't have to do any
1378 # expensive directory normalization
1391 # expensive directory normalization
1379 alreadynormed = not normalize or nd == d
1392 alreadynormed = not normalize or nd == d
1380 traverse([d], alreadynormed)
1393 traverse([d], alreadynormed)
1381
1394
1382 for s in subrepos:
1395 for s in subrepos:
1383 del results[s]
1396 del results[s]
1384 del results[b'.hg']
1397 del results[b'.hg']
1385
1398
1386 # step 3: visit remaining files from dmap
1399 # step 3: visit remaining files from dmap
1387 if not skipstep3 and not exact:
1400 if not skipstep3 and not exact:
1388 # If a dmap file is not in results yet, it was either
1401 # If a dmap file is not in results yet, it was either
1389 # a) not matching matchfn b) ignored, c) missing, or d) under a
1402 # a) not matching matchfn b) ignored, c) missing, or d) under a
1390 # symlink directory.
1403 # symlink directory.
1391 if not results and matchalways:
1404 if not results and matchalways:
1392 visit = [f for f in dmap]
1405 visit = [f for f in dmap]
1393 else:
1406 else:
1394 visit = [f for f in dmap if f not in results and matchfn(f)]
1407 visit = [f for f in dmap if f not in results and matchfn(f)]
1395 visit.sort()
1408 visit.sort()
1396
1409
1397 if unknown:
1410 if unknown:
1398 # unknown == True means we walked all dirs under the roots
1411 # unknown == True means we walked all dirs under the roots
1399 # that wasn't ignored, and everything that matched was stat'ed
1412 # that wasn't ignored, and everything that matched was stat'ed
1400 # and is already in results.
1413 # and is already in results.
1401 # The rest must thus be ignored or under a symlink.
1414 # The rest must thus be ignored or under a symlink.
1402 audit_path = pathutil.pathauditor(self._root, cached=True)
1415 audit_path = pathutil.pathauditor(self._root, cached=True)
1403
1416
1404 for nf in iter(visit):
1417 for nf in iter(visit):
1405 # If a stat for the same file was already added with a
1418 # If a stat for the same file was already added with a
1406 # different case, don't add one for this, since that would
1419 # different case, don't add one for this, since that would
1407 # make it appear as if the file exists under both names
1420 # make it appear as if the file exists under both names
1408 # on disk.
1421 # on disk.
1409 if (
1422 if (
1410 normalizefile
1423 normalizefile
1411 and normalizefile(nf, True, True) in results
1424 and normalizefile(nf, True, True) in results
1412 ):
1425 ):
1413 results[nf] = None
1426 results[nf] = None
1414 # Report ignored items in the dmap as long as they are not
1427 # Report ignored items in the dmap as long as they are not
1415 # under a symlink directory.
1428 # under a symlink directory.
1416 elif audit_path.check(nf):
1429 elif audit_path.check(nf):
1417 try:
1430 try:
1418 results[nf] = lstat(join(nf))
1431 results[nf] = lstat(join(nf))
1419 # file was just ignored, no links, and exists
1432 # file was just ignored, no links, and exists
1420 except OSError:
1433 except OSError:
1421 # file doesn't exist
1434 # file doesn't exist
1422 results[nf] = None
1435 results[nf] = None
1423 else:
1436 else:
1424 # It's either missing or under a symlink directory
1437 # It's either missing or under a symlink directory
1425 # which we in this case report as missing
1438 # which we in this case report as missing
1426 results[nf] = None
1439 results[nf] = None
1427 else:
1440 else:
1428 # We may not have walked the full directory tree above,
1441 # We may not have walked the full directory tree above,
1429 # so stat and check everything we missed.
1442 # so stat and check everything we missed.
1430 iv = iter(visit)
1443 iv = iter(visit)
1431 for st in util.statfiles([join(i) for i in visit]):
1444 for st in util.statfiles([join(i) for i in visit]):
1432 results[next(iv)] = st
1445 results[next(iv)] = st
1433 return results
1446 return results
1434
1447
1435 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1448 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1436 if self._sparsematchfn is not None:
1449 if self._sparsematchfn is not None:
1437 em = matchmod.exact(matcher.files())
1450 em = matchmod.exact(matcher.files())
1438 sm = matchmod.unionmatcher([self._sparsematcher, em])
1451 sm = matchmod.unionmatcher([self._sparsematcher, em])
1439 matcher = matchmod.intersectmatchers(matcher, sm)
1452 matcher = matchmod.intersectmatchers(matcher, sm)
1440 # Force Rayon (Rust parallelism library) to respect the number of
1453 # Force Rayon (Rust parallelism library) to respect the number of
1441 # workers. This is a temporary workaround until Rust code knows
1454 # workers. This is a temporary workaround until Rust code knows
1442 # how to read the config file.
1455 # how to read the config file.
1443 numcpus = self._ui.configint(b"worker", b"numcpus")
1456 numcpus = self._ui.configint(b"worker", b"numcpus")
1444 if numcpus is not None:
1457 if numcpus is not None:
1445 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1458 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1446
1459
1447 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1460 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1448 if not workers_enabled:
1461 if not workers_enabled:
1449 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1462 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1450
1463
1451 (
1464 (
1452 lookup,
1465 lookup,
1453 modified,
1466 modified,
1454 added,
1467 added,
1455 removed,
1468 removed,
1456 deleted,
1469 deleted,
1457 clean,
1470 clean,
1458 ignored,
1471 ignored,
1459 unknown,
1472 unknown,
1460 warnings,
1473 warnings,
1461 bad,
1474 bad,
1462 traversed,
1475 traversed,
1463 dirty,
1476 dirty,
1464 ) = rustmod.status(
1477 ) = rustmod.status(
1465 self._map._map,
1478 self._map._map,
1466 matcher,
1479 matcher,
1467 self._rootdir,
1480 self._rootdir,
1468 self._ignorefiles(),
1481 self._ignorefiles(),
1469 self._checkexec,
1482 self._checkexec,
1470 bool(list_clean),
1483 bool(list_clean),
1471 bool(list_ignored),
1484 bool(list_ignored),
1472 bool(list_unknown),
1485 bool(list_unknown),
1473 bool(matcher.traversedir),
1486 bool(matcher.traversedir),
1474 )
1487 )
1475
1488
1476 self._dirty |= dirty
1489 self._dirty |= dirty
1477
1490
1478 if matcher.traversedir:
1491 if matcher.traversedir:
1479 for dir in traversed:
1492 for dir in traversed:
1480 matcher.traversedir(dir)
1493 matcher.traversedir(dir)
1481
1494
1482 if self._ui.warn:
1495 if self._ui.warn:
1483 for item in warnings:
1496 for item in warnings:
1484 if isinstance(item, tuple):
1497 if isinstance(item, tuple):
1485 file_path, syntax = item
1498 file_path, syntax = item
1486 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1499 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1487 file_path,
1500 file_path,
1488 syntax,
1501 syntax,
1489 )
1502 )
1490 self._ui.warn(msg)
1503 self._ui.warn(msg)
1491 else:
1504 else:
1492 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1505 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1493 self._ui.warn(
1506 self._ui.warn(
1494 msg
1507 msg
1495 % (
1508 % (
1496 pathutil.canonpath(
1509 pathutil.canonpath(
1497 self._rootdir, self._rootdir, item
1510 self._rootdir, self._rootdir, item
1498 ),
1511 ),
1499 b"No such file or directory",
1512 b"No such file or directory",
1500 )
1513 )
1501 )
1514 )
1502
1515
1503 for fn, message in bad:
1516 for fn, message in bad:
1504 matcher.bad(fn, encoding.strtolocal(message))
1517 matcher.bad(fn, encoding.strtolocal(message))
1505
1518
1506 status = scmutil.status(
1519 status = scmutil.status(
1507 modified=modified,
1520 modified=modified,
1508 added=added,
1521 added=added,
1509 removed=removed,
1522 removed=removed,
1510 deleted=deleted,
1523 deleted=deleted,
1511 unknown=unknown,
1524 unknown=unknown,
1512 ignored=ignored,
1525 ignored=ignored,
1513 clean=clean,
1526 clean=clean,
1514 )
1527 )
1515 return (lookup, status)
1528 return (lookup, status)
1516
1529
1517 # XXX since this can make the dirstate dirty (through rust), we should
1530 # XXX since this can make the dirstate dirty (through rust), we should
1518 # enforce that it is done withing an appropriate change-context that scope
1531 # enforce that it is done withing an appropriate change-context that scope
1519 # the change and ensure it eventually get written on disk (or rolled back)
1532 # the change and ensure it eventually get written on disk (or rolled back)
1520 def status(self, match, subrepos, ignored, clean, unknown):
1533 def status(self, match, subrepos, ignored, clean, unknown):
1521 """Determine the status of the working copy relative to the
1534 """Determine the status of the working copy relative to the
1522 dirstate and return a pair of (unsure, status), where status is of type
1535 dirstate and return a pair of (unsure, status), where status is of type
1523 scmutil.status and:
1536 scmutil.status and:
1524
1537
1525 unsure:
1538 unsure:
1526 files that might have been modified since the dirstate was
1539 files that might have been modified since the dirstate was
1527 written, but need to be read to be sure (size is the same
1540 written, but need to be read to be sure (size is the same
1528 but mtime differs)
1541 but mtime differs)
1529 status.modified:
1542 status.modified:
1530 files that have definitely been modified since the dirstate
1543 files that have definitely been modified since the dirstate
1531 was written (different size or mode)
1544 was written (different size or mode)
1532 status.clean:
1545 status.clean:
1533 files that have definitely not been modified since the
1546 files that have definitely not been modified since the
1534 dirstate was written
1547 dirstate was written
1535 """
1548 """
1536 listignored, listclean, listunknown = ignored, clean, unknown
1549 listignored, listclean, listunknown = ignored, clean, unknown
1537 lookup, modified, added, unknown, ignored = [], [], [], [], []
1550 lookup, modified, added, unknown, ignored = [], [], [], [], []
1538 removed, deleted, clean = [], [], []
1551 removed, deleted, clean = [], [], []
1539
1552
1540 dmap = self._map
1553 dmap = self._map
1541 dmap.preload()
1554 dmap.preload()
1542
1555
1543 use_rust = True
1556 use_rust = True
1544
1557
1545 allowed_matchers = (
1558 allowed_matchers = (
1546 matchmod.alwaysmatcher,
1559 matchmod.alwaysmatcher,
1547 matchmod.differencematcher,
1560 matchmod.differencematcher,
1548 matchmod.exactmatcher,
1561 matchmod.exactmatcher,
1549 matchmod.includematcher,
1562 matchmod.includematcher,
1550 matchmod.intersectionmatcher,
1563 matchmod.intersectionmatcher,
1551 matchmod.nevermatcher,
1564 matchmod.nevermatcher,
1552 matchmod.unionmatcher,
1565 matchmod.unionmatcher,
1553 )
1566 )
1554
1567
1555 if rustmod is None:
1568 if rustmod is None:
1556 use_rust = False
1569 use_rust = False
1557 elif self._checkcase:
1570 elif self._checkcase:
1558 # Case-insensitive filesystems are not handled yet
1571 # Case-insensitive filesystems are not handled yet
1559 use_rust = False
1572 use_rust = False
1560 elif subrepos:
1573 elif subrepos:
1561 use_rust = False
1574 use_rust = False
1562 elif not isinstance(match, allowed_matchers):
1575 elif not isinstance(match, allowed_matchers):
1563 # Some matchers have yet to be implemented
1576 # Some matchers have yet to be implemented
1564 use_rust = False
1577 use_rust = False
1565
1578
1566 # Get the time from the filesystem so we can disambiguate files that
1579 # Get the time from the filesystem so we can disambiguate files that
1567 # appear modified in the present or future.
1580 # appear modified in the present or future.
1568 try:
1581 try:
1569 mtime_boundary = timestamp.get_fs_now(self._opener)
1582 mtime_boundary = timestamp.get_fs_now(self._opener)
1570 except OSError:
1583 except OSError:
1571 # In largefiles or readonly context
1584 # In largefiles or readonly context
1572 mtime_boundary = None
1585 mtime_boundary = None
1573
1586
1574 if use_rust:
1587 if use_rust:
1575 try:
1588 try:
1576 res = self._rust_status(
1589 res = self._rust_status(
1577 match, listclean, listignored, listunknown
1590 match, listclean, listignored, listunknown
1578 )
1591 )
1579 return res + (mtime_boundary,)
1592 return res + (mtime_boundary,)
1580 except rustmod.FallbackError:
1593 except rustmod.FallbackError:
1581 pass
1594 pass
1582
1595
1583 def noop(f):
1596 def noop(f):
1584 pass
1597 pass
1585
1598
1586 dcontains = dmap.__contains__
1599 dcontains = dmap.__contains__
1587 dget = dmap.__getitem__
1600 dget = dmap.__getitem__
1588 ladd = lookup.append # aka "unsure"
1601 ladd = lookup.append # aka "unsure"
1589 madd = modified.append
1602 madd = modified.append
1590 aadd = added.append
1603 aadd = added.append
1591 uadd = unknown.append if listunknown else noop
1604 uadd = unknown.append if listunknown else noop
1592 iadd = ignored.append if listignored else noop
1605 iadd = ignored.append if listignored else noop
1593 radd = removed.append
1606 radd = removed.append
1594 dadd = deleted.append
1607 dadd = deleted.append
1595 cadd = clean.append if listclean else noop
1608 cadd = clean.append if listclean else noop
1596 mexact = match.exact
1609 mexact = match.exact
1597 dirignore = self._dirignore
1610 dirignore = self._dirignore
1598 checkexec = self._checkexec
1611 checkexec = self._checkexec
1599 checklink = self._checklink
1612 checklink = self._checklink
1600 copymap = self._map.copymap
1613 copymap = self._map.copymap
1601
1614
1602 # We need to do full walks when either
1615 # We need to do full walks when either
1603 # - we're listing all clean files, or
1616 # - we're listing all clean files, or
1604 # - match.traversedir does something, because match.traversedir should
1617 # - match.traversedir does something, because match.traversedir should
1605 # be called for every dir in the working dir
1618 # be called for every dir in the working dir
1606 full = listclean or match.traversedir is not None
1619 full = listclean or match.traversedir is not None
1607 for fn, st in self.walk(
1620 for fn, st in self.walk(
1608 match, subrepos, listunknown, listignored, full=full
1621 match, subrepos, listunknown, listignored, full=full
1609 ).items():
1622 ).items():
1610 if not dcontains(fn):
1623 if not dcontains(fn):
1611 if (listignored or mexact(fn)) and dirignore(fn):
1624 if (listignored or mexact(fn)) and dirignore(fn):
1612 if listignored:
1625 if listignored:
1613 iadd(fn)
1626 iadd(fn)
1614 else:
1627 else:
1615 uadd(fn)
1628 uadd(fn)
1616 continue
1629 continue
1617
1630
1618 t = dget(fn)
1631 t = dget(fn)
1619 mode = t.mode
1632 mode = t.mode
1620 size = t.size
1633 size = t.size
1621
1634
1622 if not st and t.tracked:
1635 if not st and t.tracked:
1623 dadd(fn)
1636 dadd(fn)
1624 elif t.p2_info:
1637 elif t.p2_info:
1625 madd(fn)
1638 madd(fn)
1626 elif t.added:
1639 elif t.added:
1627 aadd(fn)
1640 aadd(fn)
1628 elif t.removed:
1641 elif t.removed:
1629 radd(fn)
1642 radd(fn)
1630 elif t.tracked:
1643 elif t.tracked:
1631 if not checklink and t.has_fallback_symlink:
1644 if not checklink and t.has_fallback_symlink:
1632 # If the file system does not support symlink, the mode
1645 # If the file system does not support symlink, the mode
1633 # might not be correctly stored in the dirstate, so do not
1646 # might not be correctly stored in the dirstate, so do not
1634 # trust it.
1647 # trust it.
1635 ladd(fn)
1648 ladd(fn)
1636 elif not checkexec and t.has_fallback_exec:
1649 elif not checkexec and t.has_fallback_exec:
1637 # If the file system does not support exec bits, the mode
1650 # If the file system does not support exec bits, the mode
1638 # might not be correctly stored in the dirstate, so do not
1651 # might not be correctly stored in the dirstate, so do not
1639 # trust it.
1652 # trust it.
1640 ladd(fn)
1653 ladd(fn)
1641 elif (
1654 elif (
1642 size >= 0
1655 size >= 0
1643 and (
1656 and (
1644 (size != st.st_size and size != st.st_size & _rangemask)
1657 (size != st.st_size and size != st.st_size & _rangemask)
1645 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1658 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1646 )
1659 )
1647 or fn in copymap
1660 or fn in copymap
1648 ):
1661 ):
1649 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1662 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1650 # issue6456: Size returned may be longer due to
1663 # issue6456: Size returned may be longer due to
1651 # encryption on EXT-4 fscrypt, undecided.
1664 # encryption on EXT-4 fscrypt, undecided.
1652 ladd(fn)
1665 ladd(fn)
1653 else:
1666 else:
1654 madd(fn)
1667 madd(fn)
1655 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1668 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1656 # There might be a change in the future if for example the
1669 # There might be a change in the future if for example the
1657 # internal clock is off, but this is a case where the issues
1670 # internal clock is off, but this is a case where the issues
1658 # the user would face would be a lot worse and there is
1671 # the user would face would be a lot worse and there is
1659 # nothing we can really do.
1672 # nothing we can really do.
1660 ladd(fn)
1673 ladd(fn)
1661 elif listclean:
1674 elif listclean:
1662 cadd(fn)
1675 cadd(fn)
1663 status = scmutil.status(
1676 status = scmutil.status(
1664 modified, added, removed, deleted, unknown, ignored, clean
1677 modified, added, removed, deleted, unknown, ignored, clean
1665 )
1678 )
1666 return (lookup, status, mtime_boundary)
1679 return (lookup, status, mtime_boundary)
1667
1680
1668 def matches(self, match):
1681 def matches(self, match):
1669 """
1682 """
1670 return files in the dirstate (in whatever state) filtered by match
1683 return files in the dirstate (in whatever state) filtered by match
1671 """
1684 """
1672 dmap = self._map
1685 dmap = self._map
1673 if rustmod is not None:
1686 if rustmod is not None:
1674 dmap = self._map._map
1687 dmap = self._map._map
1675
1688
1676 if match.always():
1689 if match.always():
1677 return dmap.keys()
1690 return dmap.keys()
1678 files = match.files()
1691 files = match.files()
1679 if match.isexact():
1692 if match.isexact():
1680 # fast path -- filter the other way around, since typically files is
1693 # fast path -- filter the other way around, since typically files is
1681 # much smaller than dmap
1694 # much smaller than dmap
1682 return [f for f in files if f in dmap]
1695 return [f for f in files if f in dmap]
1683 if match.prefix() and all(fn in dmap for fn in files):
1696 if match.prefix() and all(fn in dmap for fn in files):
1684 # fast path -- all the values are known to be files, so just return
1697 # fast path -- all the values are known to be files, so just return
1685 # that
1698 # that
1686 return list(files)
1699 return list(files)
1687 return [f for f in dmap if match(f)]
1700 return [f for f in dmap if match(f)]
1688
1701
1689 def _actualfilename(self, tr):
1702 def _actualfilename(self, tr):
1690 if tr:
1703 if tr:
1691 return self._pendingfilename
1704 return self._pendingfilename
1692 else:
1705 else:
1693 return self._filename
1706 return self._filename
1694
1707
1695 def all_file_names(self):
1708 def all_file_names(self):
1696 """list all filename currently used by this dirstate
1709 """list all filename currently used by this dirstate
1697
1710
1698 This is only used to do `hg rollback` related backup in the transaction
1711 This is only used to do `hg rollback` related backup in the transaction
1699 """
1712 """
1700 if not self._opener.exists(self._filename):
1713 if not self._opener.exists(self._filename):
1701 # no data every written to disk yet
1714 # no data every written to disk yet
1702 return ()
1715 return ()
1703 elif self._use_dirstate_v2:
1716 elif self._use_dirstate_v2:
1704 return (
1717 return (
1705 self._filename,
1718 self._filename,
1706 self._map.docket.data_filename(),
1719 self._map.docket.data_filename(),
1707 )
1720 )
1708 else:
1721 else:
1709 return (self._filename,)
1722 return (self._filename,)
1710
1723
1711 def verify(self, m1, m2, p1, narrow_matcher=None):
1724 def verify(self, m1, m2, p1, narrow_matcher=None):
1712 """
1725 """
1713 check the dirstate contents against the parent manifest and yield errors
1726 check the dirstate contents against the parent manifest and yield errors
1714 """
1727 """
1715 missing_from_p1 = _(
1728 missing_from_p1 = _(
1716 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1729 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1717 )
1730 )
1718 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1731 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1719 missing_from_ps = _(
1732 missing_from_ps = _(
1720 b"%s marked as modified, but not in either manifest\n"
1733 b"%s marked as modified, but not in either manifest\n"
1721 )
1734 )
1722 missing_from_ds = _(
1735 missing_from_ds = _(
1723 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1736 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1724 )
1737 )
1725 for f, entry in self.items():
1738 for f, entry in self.items():
1726 if entry.p1_tracked:
1739 if entry.p1_tracked:
1727 if entry.modified and f not in m1 and f not in m2:
1740 if entry.modified and f not in m1 and f not in m2:
1728 yield missing_from_ps % f
1741 yield missing_from_ps % f
1729 elif f not in m1:
1742 elif f not in m1:
1730 yield missing_from_p1 % (f, node.short(p1))
1743 yield missing_from_p1 % (f, node.short(p1))
1731 if entry.added and f in m1:
1744 if entry.added and f in m1:
1732 yield unexpected_in_p1 % f
1745 yield unexpected_in_p1 % f
1733 for f in m1:
1746 for f in m1:
1734 if narrow_matcher is not None and not narrow_matcher(f):
1747 if narrow_matcher is not None and not narrow_matcher(f):
1735 continue
1748 continue
1736 entry = self.get_entry(f)
1749 entry = self.get_entry(f)
1737 if not entry.p1_tracked:
1750 if not entry.p1_tracked:
1738 yield missing_from_ds % (f, node.short(p1))
1751 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now