##// END OF EJS Templates
dirstate: factor the transaction abort logic...
marmoute -
r51150:fa04407b default
parent child Browse files
Show More
@@ -1,1771 +1,1775 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check that the func is called with a non-invalidated dirstate
69 """check that the func is called with a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_changing_files_or_status(func):
119 def requires_changing_files_or_status(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if not (self.is_changing_files or self._running_status > 0):
121 if not (self.is_changing_files or self._running_status > 0):
122 msg = (
122 msg = (
123 'calling `%s` outside of a changing_files '
123 'calling `%s` outside of a changing_files '
124 'or running_status context'
124 'or running_status context'
125 )
125 )
126 msg %= func.__name__
126 msg %= func.__name__
127 raise error.ProgrammingError(msg)
127 raise error.ProgrammingError(msg)
128 return func(self, *args, **kwargs)
128 return func(self, *args, **kwargs)
129
129
130 return check_invalidated(wrap)
130 return check_invalidated(wrap)
131
131
132
132
133 CHANGE_TYPE_PARENTS = "parents"
133 CHANGE_TYPE_PARENTS = "parents"
134 CHANGE_TYPE_FILES = "files"
134 CHANGE_TYPE_FILES = "files"
135
135
136
136
137 @interfaceutil.implementer(intdirstate.idirstate)
137 @interfaceutil.implementer(intdirstate.idirstate)
138 class dirstate:
138 class dirstate:
139
139
140 # used by largefile to avoid overwritting transaction callback
140 # used by largefile to avoid overwritting transaction callback
141 _tr_key_suffix = b''
141 _tr_key_suffix = b''
142
142
143 def __init__(
143 def __init__(
144 self,
144 self,
145 opener,
145 opener,
146 ui,
146 ui,
147 root,
147 root,
148 validate,
148 validate,
149 sparsematchfn,
149 sparsematchfn,
150 nodeconstants,
150 nodeconstants,
151 use_dirstate_v2,
151 use_dirstate_v2,
152 use_tracked_hint=False,
152 use_tracked_hint=False,
153 ):
153 ):
154 """Create a new dirstate object.
154 """Create a new dirstate object.
155
155
156 opener is an open()-like callable that can be used to open the
156 opener is an open()-like callable that can be used to open the
157 dirstate file; root is the root of the directory tracked by
157 dirstate file; root is the root of the directory tracked by
158 the dirstate.
158 the dirstate.
159 """
159 """
160 self._use_dirstate_v2 = use_dirstate_v2
160 self._use_dirstate_v2 = use_dirstate_v2
161 self._use_tracked_hint = use_tracked_hint
161 self._use_tracked_hint = use_tracked_hint
162 self._nodeconstants = nodeconstants
162 self._nodeconstants = nodeconstants
163 self._opener = opener
163 self._opener = opener
164 self._validate = validate
164 self._validate = validate
165 self._root = root
165 self._root = root
166 # Either build a sparse-matcher or None if sparse is disabled
166 # Either build a sparse-matcher or None if sparse is disabled
167 self._sparsematchfn = sparsematchfn
167 self._sparsematchfn = sparsematchfn
168 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
168 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
169 # UNC path pointing to root share (issue4557)
169 # UNC path pointing to root share (issue4557)
170 self._rootdir = pathutil.normasprefix(root)
170 self._rootdir = pathutil.normasprefix(root)
171 # True is any internal state may be different
171 # True is any internal state may be different
172 self._dirty = False
172 self._dirty = False
173 # True if the set of tracked file may be different
173 # True if the set of tracked file may be different
174 self._dirty_tracked_set = False
174 self._dirty_tracked_set = False
175 self._ui = ui
175 self._ui = ui
176 self._filecache = {}
176 self._filecache = {}
177 # nesting level of `changing_parents` context
177 # nesting level of `changing_parents` context
178 self._changing_level = 0
178 self._changing_level = 0
179 # the change currently underway
179 # the change currently underway
180 self._change_type = None
180 self._change_type = None
181 # number of open _running_status context
181 # number of open _running_status context
182 self._running_status = 0
182 self._running_status = 0
183 # True if the current dirstate changing operations have been
183 # True if the current dirstate changing operations have been
184 # invalidated (used to make sure all nested contexts have been exited)
184 # invalidated (used to make sure all nested contexts have been exited)
185 self._invalidated_context = False
185 self._invalidated_context = False
186 self._attached_to_a_transaction = False
186 self._attached_to_a_transaction = False
187 self._filename = b'dirstate'
187 self._filename = b'dirstate'
188 self._filename_th = b'dirstate-tracked-hint'
188 self._filename_th = b'dirstate-tracked-hint'
189 self._pendingfilename = b'%s.pending' % self._filename
189 self._pendingfilename = b'%s.pending' % self._filename
190 self._plchangecallbacks = {}
190 self._plchangecallbacks = {}
191 self._origpl = None
191 self._origpl = None
192 self._mapcls = dirstatemap.dirstatemap
192 self._mapcls = dirstatemap.dirstatemap
193 # Access and cache cwd early, so we don't access it for the first time
193 # Access and cache cwd early, so we don't access it for the first time
194 # after a working-copy update caused it to not exist (accessing it then
194 # after a working-copy update caused it to not exist (accessing it then
195 # raises an exception).
195 # raises an exception).
196 self._cwd
196 self._cwd
197
197
198 def refresh(self):
198 def refresh(self):
199 if '_branch' in vars(self):
199 if '_branch' in vars(self):
200 del self._branch
200 del self._branch
201 if '_map' in vars(self) and self._map.may_need_refresh():
201 if '_map' in vars(self) and self._map.may_need_refresh():
202 self.invalidate()
202 self.invalidate()
203
203
204 def prefetch_parents(self):
204 def prefetch_parents(self):
205 """make sure the parents are loaded
205 """make sure the parents are loaded
206
206
207 Used to avoid a race condition.
207 Used to avoid a race condition.
208 """
208 """
209 self._pl
209 self._pl
210
210
211 @contextlib.contextmanager
211 @contextlib.contextmanager
212 @check_invalidated
212 @check_invalidated
213 def running_status(self, repo):
213 def running_status(self, repo):
214 """Wrap a status operation
214 """Wrap a status operation
215
215
216 This context is not mutally exclusive with the `changing_*` context. It
216 This context is not mutally exclusive with the `changing_*` context. It
217 also do not warrant for the `wlock` to be taken.
217 also do not warrant for the `wlock` to be taken.
218
218
219 If the wlock is taken, this context will behave in a simple way, and
219 If the wlock is taken, this context will behave in a simple way, and
220 ensure the data are scheduled for write when leaving the top level
220 ensure the data are scheduled for write when leaving the top level
221 context.
221 context.
222
222
223 If the lock is not taken, it will only warrant that the data are either
223 If the lock is not taken, it will only warrant that the data are either
224 committed (written) and rolled back (invalidated) when exiting the top
224 committed (written) and rolled back (invalidated) when exiting the top
225 level context. The write/invalidate action must be performed by the
225 level context. The write/invalidate action must be performed by the
226 wrapped code.
226 wrapped code.
227
227
228
228
229 The expected logic is:
229 The expected logic is:
230
230
231 A: read the dirstate
231 A: read the dirstate
232 B: run status
232 B: run status
233 This might make the dirstate dirty by updating cache,
233 This might make the dirstate dirty by updating cache,
234 especially in Rust.
234 especially in Rust.
235 C: do more "post status fixup if relevant
235 C: do more "post status fixup if relevant
236 D: try to take the w-lock (this will invalidate the changes if they were raced)
236 D: try to take the w-lock (this will invalidate the changes if they were raced)
237 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
237 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
238 E1: elif lock was acquired β†’ write the changes
238 E1: elif lock was acquired β†’ write the changes
239 E2: else β†’ discard the changes
239 E2: else β†’ discard the changes
240 """
240 """
241 has_lock = repo.currentwlock() is not None
241 has_lock = repo.currentwlock() is not None
242 is_changing = self.is_changing_any
242 is_changing = self.is_changing_any
243 tr = repo.currenttransaction()
243 tr = repo.currenttransaction()
244 has_tr = tr is not None
244 has_tr = tr is not None
245 nested = bool(self._running_status)
245 nested = bool(self._running_status)
246
246
247 first_and_alone = not (is_changing or has_tr or nested)
247 first_and_alone = not (is_changing or has_tr or nested)
248
248
249 # enforce no change happened outside of a proper context.
249 # enforce no change happened outside of a proper context.
250 if first_and_alone and self._dirty:
250 if first_and_alone and self._dirty:
251 has_tr = repo.currenttransaction() is not None
251 has_tr = repo.currenttransaction() is not None
252 if not has_tr and self._changing_level == 0 and self._dirty:
252 if not has_tr and self._changing_level == 0 and self._dirty:
253 msg = "entering a status context, but dirstate is already dirty"
253 msg = "entering a status context, but dirstate is already dirty"
254 raise error.ProgrammingError(msg)
254 raise error.ProgrammingError(msg)
255
255
256 should_write = has_lock and not (nested or is_changing)
256 should_write = has_lock and not (nested or is_changing)
257
257
258 self._running_status += 1
258 self._running_status += 1
259 try:
259 try:
260 yield
260 yield
261 except Exception:
261 except Exception:
262 self.invalidate()
262 self.invalidate()
263 raise
263 raise
264 finally:
264 finally:
265 self._running_status -= 1
265 self._running_status -= 1
266 if self._invalidated_context:
266 if self._invalidated_context:
267 should_write = False
267 should_write = False
268 self.invalidate()
268 self.invalidate()
269
269
270 if should_write:
270 if should_write:
271 assert repo.currenttransaction() is tr
271 assert repo.currenttransaction() is tr
272 self.write(tr)
272 self.write(tr)
273 elif not has_lock:
273 elif not has_lock:
274 if self._dirty:
274 if self._dirty:
275 msg = b'dirstate dirty while exiting an isolated status context'
275 msg = b'dirstate dirty while exiting an isolated status context'
276 repo.ui.develwarn(msg)
276 repo.ui.develwarn(msg)
277 self.invalidate()
277 self.invalidate()
278
278
279 @contextlib.contextmanager
279 @contextlib.contextmanager
280 @check_invalidated
280 @check_invalidated
281 def _changing(self, repo, change_type):
281 def _changing(self, repo, change_type):
282 if repo.currentwlock() is None:
282 if repo.currentwlock() is None:
283 msg = b"trying to change the dirstate without holding the wlock"
283 msg = b"trying to change the dirstate without holding the wlock"
284 raise error.ProgrammingError(msg)
284 raise error.ProgrammingError(msg)
285
285
286 has_tr = repo.currenttransaction() is not None
286 has_tr = repo.currenttransaction() is not None
287 if not has_tr and self._changing_level == 0 and self._dirty:
287 if not has_tr and self._changing_level == 0 and self._dirty:
288 msg = b"entering a changing context, but dirstate is already dirty"
288 msg = b"entering a changing context, but dirstate is already dirty"
289 repo.ui.develwarn(msg)
289 repo.ui.develwarn(msg)
290
290
291 assert self._changing_level >= 0
291 assert self._changing_level >= 0
292 # different type of change are mutually exclusive
292 # different type of change are mutually exclusive
293 if self._change_type is None:
293 if self._change_type is None:
294 assert self._changing_level == 0
294 assert self._changing_level == 0
295 self._change_type = change_type
295 self._change_type = change_type
296 elif self._change_type != change_type:
296 elif self._change_type != change_type:
297 msg = (
297 msg = (
298 'trying to open "%s" dirstate-changing context while a "%s" is'
298 'trying to open "%s" dirstate-changing context while a "%s" is'
299 ' already open'
299 ' already open'
300 )
300 )
301 msg %= (change_type, self._change_type)
301 msg %= (change_type, self._change_type)
302 raise error.ProgrammingError(msg)
302 raise error.ProgrammingError(msg)
303 should_write = False
303 should_write = False
304 self._changing_level += 1
304 self._changing_level += 1
305 try:
305 try:
306 yield
306 yield
307 except: # re-raises
307 except: # re-raises
308 self.invalidate() # this will set `_invalidated_context`
308 self.invalidate() # this will set `_invalidated_context`
309 raise
309 raise
310 finally:
310 finally:
311 assert self._changing_level > 0
311 assert self._changing_level > 0
312 self._changing_level -= 1
312 self._changing_level -= 1
313 # If the dirstate is being invalidated, call invalidate again.
313 # If the dirstate is being invalidated, call invalidate again.
314 # This will throw away anything added by a upper context and
314 # This will throw away anything added by a upper context and
315 # reset the `_invalidated_context` flag when relevant
315 # reset the `_invalidated_context` flag when relevant
316 if self._changing_level <= 0:
316 if self._changing_level <= 0:
317 self._change_type = None
317 self._change_type = None
318 assert self._changing_level == 0
318 assert self._changing_level == 0
319 if self._invalidated_context:
319 if self._invalidated_context:
320 # make sure we invalidate anything an upper context might
320 # make sure we invalidate anything an upper context might
321 # have changed.
321 # have changed.
322 self.invalidate()
322 self.invalidate()
323 else:
323 else:
324 should_write = self._changing_level <= 0
324 should_write = self._changing_level <= 0
325 tr = repo.currenttransaction()
325 tr = repo.currenttransaction()
326 if has_tr != (tr is not None):
326 if has_tr != (tr is not None):
327 if has_tr:
327 if has_tr:
328 m = "transaction vanished while changing dirstate"
328 m = "transaction vanished while changing dirstate"
329 else:
329 else:
330 m = "transaction appeared while changing dirstate"
330 m = "transaction appeared while changing dirstate"
331 raise error.ProgrammingError(m)
331 raise error.ProgrammingError(m)
332 if should_write:
332 if should_write:
333 self.write(tr)
333 self.write(tr)
334
334
335 @contextlib.contextmanager
335 @contextlib.contextmanager
336 def changing_parents(self, repo):
336 def changing_parents(self, repo):
337 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
337 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
338 yield c
338 yield c
339
339
340 @contextlib.contextmanager
340 @contextlib.contextmanager
341 def changing_files(self, repo):
341 def changing_files(self, repo):
342 with self._changing(repo, CHANGE_TYPE_FILES) as c:
342 with self._changing(repo, CHANGE_TYPE_FILES) as c:
343 yield c
343 yield c
344
344
345 # here to help migration to the new code
345 # here to help migration to the new code
346 def parentchange(self):
346 def parentchange(self):
347 msg = (
347 msg = (
348 "Mercurial 6.4 and later requires call to "
348 "Mercurial 6.4 and later requires call to "
349 "`dirstate.changing_parents(repo)`"
349 "`dirstate.changing_parents(repo)`"
350 )
350 )
351 raise error.ProgrammingError(msg)
351 raise error.ProgrammingError(msg)
352
352
353 @property
353 @property
354 def is_changing_any(self):
354 def is_changing_any(self):
355 """Returns true if the dirstate is in the middle of a set of changes.
355 """Returns true if the dirstate is in the middle of a set of changes.
356
356
357 This returns True for any kind of change.
357 This returns True for any kind of change.
358 """
358 """
359 return self._changing_level > 0
359 return self._changing_level > 0
360
360
361 def pendingparentchange(self):
361 def pendingparentchange(self):
362 return self.is_changing_parent()
362 return self.is_changing_parent()
363
363
364 def is_changing_parent(self):
364 def is_changing_parent(self):
365 """Returns true if the dirstate is in the middle of a set of changes
365 """Returns true if the dirstate is in the middle of a set of changes
366 that modify the dirstate parent.
366 that modify the dirstate parent.
367 """
367 """
368 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
368 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
369 return self.is_changing_parents
369 return self.is_changing_parents
370
370
371 @property
371 @property
372 def is_changing_parents(self):
372 def is_changing_parents(self):
373 """Returns true if the dirstate is in the middle of a set of changes
373 """Returns true if the dirstate is in the middle of a set of changes
374 that modify the dirstate parent.
374 that modify the dirstate parent.
375 """
375 """
376 if self._changing_level <= 0:
376 if self._changing_level <= 0:
377 return False
377 return False
378 return self._change_type == CHANGE_TYPE_PARENTS
378 return self._change_type == CHANGE_TYPE_PARENTS
379
379
380 @property
380 @property
381 def is_changing_files(self):
381 def is_changing_files(self):
382 """Returns true if the dirstate is in the middle of a set of changes
382 """Returns true if the dirstate is in the middle of a set of changes
383 that modify the files tracked or their sources.
383 that modify the files tracked or their sources.
384 """
384 """
385 if self._changing_level <= 0:
385 if self._changing_level <= 0:
386 return False
386 return False
387 return self._change_type == CHANGE_TYPE_FILES
387 return self._change_type == CHANGE_TYPE_FILES
388
388
389 @propertycache
389 @propertycache
390 def _map(self):
390 def _map(self):
391 """Return the dirstate contents (see documentation for dirstatemap)."""
391 """Return the dirstate contents (see documentation for dirstatemap)."""
392 return self._mapcls(
392 return self._mapcls(
393 self._ui,
393 self._ui,
394 self._opener,
394 self._opener,
395 self._root,
395 self._root,
396 self._nodeconstants,
396 self._nodeconstants,
397 self._use_dirstate_v2,
397 self._use_dirstate_v2,
398 )
398 )
399
399
400 @property
400 @property
401 def _sparsematcher(self):
401 def _sparsematcher(self):
402 """The matcher for the sparse checkout.
402 """The matcher for the sparse checkout.
403
403
404 The working directory may not include every file from a manifest. The
404 The working directory may not include every file from a manifest. The
405 matcher obtained by this property will match a path if it is to be
405 matcher obtained by this property will match a path if it is to be
406 included in the working directory.
406 included in the working directory.
407
407
408 When sparse if disabled, return None.
408 When sparse if disabled, return None.
409 """
409 """
410 if self._sparsematchfn is None:
410 if self._sparsematchfn is None:
411 return None
411 return None
412 # TODO there is potential to cache this property. For now, the matcher
412 # TODO there is potential to cache this property. For now, the matcher
413 # is resolved on every access. (But the called function does use a
413 # is resolved on every access. (But the called function does use a
414 # cache to keep the lookup fast.)
414 # cache to keep the lookup fast.)
415 return self._sparsematchfn()
415 return self._sparsematchfn()
416
416
417 @repocache(b'branch')
417 @repocache(b'branch')
418 def _branch(self):
418 def _branch(self):
419 try:
419 try:
420 return self._opener.read(b"branch").strip() or b"default"
420 return self._opener.read(b"branch").strip() or b"default"
421 except FileNotFoundError:
421 except FileNotFoundError:
422 return b"default"
422 return b"default"
423
423
424 @property
424 @property
425 def _pl(self):
425 def _pl(self):
426 return self._map.parents()
426 return self._map.parents()
427
427
428 def hasdir(self, d):
428 def hasdir(self, d):
429 return self._map.hastrackeddir(d)
429 return self._map.hastrackeddir(d)
430
430
431 @rootcache(b'.hgignore')
431 @rootcache(b'.hgignore')
432 def _ignore(self):
432 def _ignore(self):
433 files = self._ignorefiles()
433 files = self._ignorefiles()
434 if not files:
434 if not files:
435 return matchmod.never()
435 return matchmod.never()
436
436
437 pats = [b'include:%s' % f for f in files]
437 pats = [b'include:%s' % f for f in files]
438 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
438 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
439
439
440 @propertycache
440 @propertycache
441 def _slash(self):
441 def _slash(self):
442 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
442 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
443
443
444 @propertycache
444 @propertycache
445 def _checklink(self):
445 def _checklink(self):
446 return util.checklink(self._root)
446 return util.checklink(self._root)
447
447
448 @propertycache
448 @propertycache
449 def _checkexec(self):
449 def _checkexec(self):
450 return bool(util.checkexec(self._root))
450 return bool(util.checkexec(self._root))
451
451
452 @propertycache
452 @propertycache
453 def _checkcase(self):
453 def _checkcase(self):
454 return not util.fscasesensitive(self._join(b'.hg'))
454 return not util.fscasesensitive(self._join(b'.hg'))
455
455
456 def _join(self, f):
456 def _join(self, f):
457 # much faster than os.path.join()
457 # much faster than os.path.join()
458 # it's safe because f is always a relative path
458 # it's safe because f is always a relative path
459 return self._rootdir + f
459 return self._rootdir + f
460
460
461 def flagfunc(self, buildfallback):
461 def flagfunc(self, buildfallback):
462 """build a callable that returns flags associated with a filename
462 """build a callable that returns flags associated with a filename
463
463
464 The information is extracted from three possible layers:
464 The information is extracted from three possible layers:
465 1. the file system if it supports the information
465 1. the file system if it supports the information
466 2. the "fallback" information stored in the dirstate if any
466 2. the "fallback" information stored in the dirstate if any
467 3. a more expensive mechanism inferring the flags from the parents.
467 3. a more expensive mechanism inferring the flags from the parents.
468 """
468 """
469
469
470 # small hack to cache the result of buildfallback()
470 # small hack to cache the result of buildfallback()
471 fallback_func = []
471 fallback_func = []
472
472
473 def get_flags(x):
473 def get_flags(x):
474 entry = None
474 entry = None
475 fallback_value = None
475 fallback_value = None
476 try:
476 try:
477 st = os.lstat(self._join(x))
477 st = os.lstat(self._join(x))
478 except OSError:
478 except OSError:
479 return b''
479 return b''
480
480
481 if self._checklink:
481 if self._checklink:
482 if util.statislink(st):
482 if util.statislink(st):
483 return b'l'
483 return b'l'
484 else:
484 else:
485 entry = self.get_entry(x)
485 entry = self.get_entry(x)
486 if entry.has_fallback_symlink:
486 if entry.has_fallback_symlink:
487 if entry.fallback_symlink:
487 if entry.fallback_symlink:
488 return b'l'
488 return b'l'
489 else:
489 else:
490 if not fallback_func:
490 if not fallback_func:
491 fallback_func.append(buildfallback())
491 fallback_func.append(buildfallback())
492 fallback_value = fallback_func[0](x)
492 fallback_value = fallback_func[0](x)
493 if b'l' in fallback_value:
493 if b'l' in fallback_value:
494 return b'l'
494 return b'l'
495
495
496 if self._checkexec:
496 if self._checkexec:
497 if util.statisexec(st):
497 if util.statisexec(st):
498 return b'x'
498 return b'x'
499 else:
499 else:
500 if entry is None:
500 if entry is None:
501 entry = self.get_entry(x)
501 entry = self.get_entry(x)
502 if entry.has_fallback_exec:
502 if entry.has_fallback_exec:
503 if entry.fallback_exec:
503 if entry.fallback_exec:
504 return b'x'
504 return b'x'
505 else:
505 else:
506 if fallback_value is None:
506 if fallback_value is None:
507 if not fallback_func:
507 if not fallback_func:
508 fallback_func.append(buildfallback())
508 fallback_func.append(buildfallback())
509 fallback_value = fallback_func[0](x)
509 fallback_value = fallback_func[0](x)
510 if b'x' in fallback_value:
510 if b'x' in fallback_value:
511 return b'x'
511 return b'x'
512 return b''
512 return b''
513
513
514 return get_flags
514 return get_flags
515
515
516 @propertycache
516 @propertycache
517 def _cwd(self):
517 def _cwd(self):
518 # internal config: ui.forcecwd
518 # internal config: ui.forcecwd
519 forcecwd = self._ui.config(b'ui', b'forcecwd')
519 forcecwd = self._ui.config(b'ui', b'forcecwd')
520 if forcecwd:
520 if forcecwd:
521 return forcecwd
521 return forcecwd
522 return encoding.getcwd()
522 return encoding.getcwd()
523
523
524 def getcwd(self):
524 def getcwd(self):
525 """Return the path from which a canonical path is calculated.
525 """Return the path from which a canonical path is calculated.
526
526
527 This path should be used to resolve file patterns or to convert
527 This path should be used to resolve file patterns or to convert
528 canonical paths back to file paths for display. It shouldn't be
528 canonical paths back to file paths for display. It shouldn't be
529 used to get real file paths. Use vfs functions instead.
529 used to get real file paths. Use vfs functions instead.
530 """
530 """
531 cwd = self._cwd
531 cwd = self._cwd
532 if cwd == self._root:
532 if cwd == self._root:
533 return b''
533 return b''
534 # self._root ends with a path separator if self._root is '/' or 'C:\'
534 # self._root ends with a path separator if self._root is '/' or 'C:\'
535 rootsep = self._root
535 rootsep = self._root
536 if not util.endswithsep(rootsep):
536 if not util.endswithsep(rootsep):
537 rootsep += pycompat.ossep
537 rootsep += pycompat.ossep
538 if cwd.startswith(rootsep):
538 if cwd.startswith(rootsep):
539 return cwd[len(rootsep) :]
539 return cwd[len(rootsep) :]
540 else:
540 else:
541 # we're outside the repo. return an absolute path.
541 # we're outside the repo. return an absolute path.
542 return cwd
542 return cwd
543
543
544 def pathto(self, f, cwd=None):
544 def pathto(self, f, cwd=None):
545 if cwd is None:
545 if cwd is None:
546 cwd = self.getcwd()
546 cwd = self.getcwd()
547 path = util.pathto(self._root, cwd, f)
547 path = util.pathto(self._root, cwd, f)
548 if self._slash:
548 if self._slash:
549 return util.pconvert(path)
549 return util.pconvert(path)
550 return path
550 return path
551
551
552 def get_entry(self, path):
552 def get_entry(self, path):
553 """return a DirstateItem for the associated path"""
553 """return a DirstateItem for the associated path"""
554 entry = self._map.get(path)
554 entry = self._map.get(path)
555 if entry is None:
555 if entry is None:
556 return DirstateItem()
556 return DirstateItem()
557 return entry
557 return entry
558
558
559 def __contains__(self, key):
559 def __contains__(self, key):
560 return key in self._map
560 return key in self._map
561
561
562 def __iter__(self):
562 def __iter__(self):
563 return iter(sorted(self._map))
563 return iter(sorted(self._map))
564
564
565 def items(self):
565 def items(self):
566 return self._map.items()
566 return self._map.items()
567
567
568 iteritems = items
568 iteritems = items
569
569
570 def parents(self):
570 def parents(self):
571 return [self._validate(p) for p in self._pl]
571 return [self._validate(p) for p in self._pl]
572
572
573 def p1(self):
573 def p1(self):
574 return self._validate(self._pl[0])
574 return self._validate(self._pl[0])
575
575
576 def p2(self):
576 def p2(self):
577 return self._validate(self._pl[1])
577 return self._validate(self._pl[1])
578
578
579 @property
579 @property
580 def in_merge(self):
580 def in_merge(self):
581 """True if a merge is in progress"""
581 """True if a merge is in progress"""
582 return self._pl[1] != self._nodeconstants.nullid
582 return self._pl[1] != self._nodeconstants.nullid
583
583
584 def branch(self):
584 def branch(self):
585 return encoding.tolocal(self._branch)
585 return encoding.tolocal(self._branch)
586
586
587 @requires_changing_parents
587 @requires_changing_parents
588 def setparents(self, p1, p2=None):
588 def setparents(self, p1, p2=None):
589 """Set dirstate parents to p1 and p2.
589 """Set dirstate parents to p1 and p2.
590
590
591 When moving from two parents to one, "merged" entries a
591 When moving from two parents to one, "merged" entries a
592 adjusted to normal and previous copy records discarded and
592 adjusted to normal and previous copy records discarded and
593 returned by the call.
593 returned by the call.
594
594
595 See localrepo.setparents()
595 See localrepo.setparents()
596 """
596 """
597 if p2 is None:
597 if p2 is None:
598 p2 = self._nodeconstants.nullid
598 p2 = self._nodeconstants.nullid
599 if self._changing_level == 0:
599 if self._changing_level == 0:
600 raise ValueError(
600 raise ValueError(
601 b"cannot set dirstate parent outside of "
601 b"cannot set dirstate parent outside of "
602 b"dirstate.changing_parents context manager"
602 b"dirstate.changing_parents context manager"
603 )
603 )
604
604
605 self._dirty = True
605 self._dirty = True
606 oldp2 = self._pl[1]
606 oldp2 = self._pl[1]
607 if self._origpl is None:
607 if self._origpl is None:
608 self._origpl = self._pl
608 self._origpl = self._pl
609 nullid = self._nodeconstants.nullid
609 nullid = self._nodeconstants.nullid
610 # True if we need to fold p2 related state back to a linear case
610 # True if we need to fold p2 related state back to a linear case
611 fold_p2 = oldp2 != nullid and p2 == nullid
611 fold_p2 = oldp2 != nullid and p2 == nullid
612 return self._map.setparents(p1, p2, fold_p2=fold_p2)
612 return self._map.setparents(p1, p2, fold_p2=fold_p2)
613
613
614 def setbranch(self, branch):
614 def setbranch(self, branch):
615 self.__class__._branch.set(self, encoding.fromlocal(branch))
615 self.__class__._branch.set(self, encoding.fromlocal(branch))
616 vfs = self._opener
616 vfs = self._opener
617 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
617 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
618 f.write(self._branch + b'\n')
618 f.write(self._branch + b'\n')
619 # make sure filecache has the correct stat info for _branch after
619 # make sure filecache has the correct stat info for _branch after
620 # replacing the underlying file
620 # replacing the underlying file
621 #
621 #
622 # XXX do we actually need this,
622 # XXX do we actually need this,
623 # refreshing the attribute is quite cheap
623 # refreshing the attribute is quite cheap
624 ce = self._filecache[b'_branch']
624 ce = self._filecache[b'_branch']
625 if ce:
625 if ce:
626 ce.refresh()
626 ce.refresh()
627
627
628 def invalidate(self):
628 def invalidate(self):
629 """Causes the next access to reread the dirstate.
629 """Causes the next access to reread the dirstate.
630
630
631 This is different from localrepo.invalidatedirstate() because it always
631 This is different from localrepo.invalidatedirstate() because it always
632 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
632 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
633 check whether the dirstate has changed before rereading it."""
633 check whether the dirstate has changed before rereading it."""
634
634
635 for a in ("_map", "_branch", "_ignore"):
635 for a in ("_map", "_branch", "_ignore"):
636 if a in self.__dict__:
636 if a in self.__dict__:
637 delattr(self, a)
637 delattr(self, a)
638 self._dirty = False
638 self._dirty = False
639 self._dirty_tracked_set = False
639 self._dirty_tracked_set = False
640 self._invalidated_context = bool(
640 self._invalidated_context = bool(
641 self._changing_level > 0
641 self._changing_level > 0
642 or self._attached_to_a_transaction
642 or self._attached_to_a_transaction
643 or self._running_status
643 or self._running_status
644 )
644 )
645 self._origpl = None
645 self._origpl = None
646
646
647 @requires_changing_any
647 @requires_changing_any
648 def copy(self, source, dest):
648 def copy(self, source, dest):
649 """Mark dest as a copy of source. Unmark dest if source is None."""
649 """Mark dest as a copy of source. Unmark dest if source is None."""
650 if source == dest:
650 if source == dest:
651 return
651 return
652 self._dirty = True
652 self._dirty = True
653 if source is not None:
653 if source is not None:
654 self._check_sparse(source)
654 self._check_sparse(source)
655 self._map.copymap[dest] = source
655 self._map.copymap[dest] = source
656 else:
656 else:
657 self._map.copymap.pop(dest, None)
657 self._map.copymap.pop(dest, None)
658
658
659 def copied(self, file):
659 def copied(self, file):
660 return self._map.copymap.get(file, None)
660 return self._map.copymap.get(file, None)
661
661
662 def copies(self):
662 def copies(self):
663 return self._map.copymap
663 return self._map.copymap
664
664
665 @requires_changing_files
665 @requires_changing_files
666 def set_tracked(self, filename, reset_copy=False):
666 def set_tracked(self, filename, reset_copy=False):
667 """a "public" method for generic code to mark a file as tracked
667 """a "public" method for generic code to mark a file as tracked
668
668
669 This function is to be called outside of "update/merge" case. For
669 This function is to be called outside of "update/merge" case. For
670 example by a command like `hg add X`.
670 example by a command like `hg add X`.
671
671
672 if reset_copy is set, any existing copy information will be dropped.
672 if reset_copy is set, any existing copy information will be dropped.
673
673
674 return True the file was previously untracked, False otherwise.
674 return True the file was previously untracked, False otherwise.
675 """
675 """
676 self._dirty = True
676 self._dirty = True
677 entry = self._map.get(filename)
677 entry = self._map.get(filename)
678 if entry is None or not entry.tracked:
678 if entry is None or not entry.tracked:
679 self._check_new_tracked_filename(filename)
679 self._check_new_tracked_filename(filename)
680 pre_tracked = self._map.set_tracked(filename)
680 pre_tracked = self._map.set_tracked(filename)
681 if reset_copy:
681 if reset_copy:
682 self._map.copymap.pop(filename, None)
682 self._map.copymap.pop(filename, None)
683 if pre_tracked:
683 if pre_tracked:
684 self._dirty_tracked_set = True
684 self._dirty_tracked_set = True
685 return pre_tracked
685 return pre_tracked
686
686
687 @requires_changing_files
687 @requires_changing_files
688 def set_untracked(self, filename):
688 def set_untracked(self, filename):
689 """a "public" method for generic code to mark a file as untracked
689 """a "public" method for generic code to mark a file as untracked
690
690
691 This function is to be called outside of "update/merge" case. For
691 This function is to be called outside of "update/merge" case. For
692 example by a command like `hg remove X`.
692 example by a command like `hg remove X`.
693
693
694 return True the file was previously tracked, False otherwise.
694 return True the file was previously tracked, False otherwise.
695 """
695 """
696 ret = self._map.set_untracked(filename)
696 ret = self._map.set_untracked(filename)
697 if ret:
697 if ret:
698 self._dirty = True
698 self._dirty = True
699 self._dirty_tracked_set = True
699 self._dirty_tracked_set = True
700 return ret
700 return ret
701
701
702 @requires_changing_files_or_status
702 @requires_changing_files_or_status
703 def set_clean(self, filename, parentfiledata):
703 def set_clean(self, filename, parentfiledata):
704 """record that the current state of the file on disk is known to be clean"""
704 """record that the current state of the file on disk is known to be clean"""
705 self._dirty = True
705 self._dirty = True
706 if not self._map[filename].tracked:
706 if not self._map[filename].tracked:
707 self._check_new_tracked_filename(filename)
707 self._check_new_tracked_filename(filename)
708 (mode, size, mtime) = parentfiledata
708 (mode, size, mtime) = parentfiledata
709 self._map.set_clean(filename, mode, size, mtime)
709 self._map.set_clean(filename, mode, size, mtime)
710
710
711 @requires_changing_files_or_status
711 @requires_changing_files_or_status
712 def set_possibly_dirty(self, filename):
712 def set_possibly_dirty(self, filename):
713 """record that the current state of the file on disk is unknown"""
713 """record that the current state of the file on disk is unknown"""
714 self._dirty = True
714 self._dirty = True
715 self._map.set_possibly_dirty(filename)
715 self._map.set_possibly_dirty(filename)
716
716
717 @requires_changing_parents
717 @requires_changing_parents
718 def update_file_p1(
718 def update_file_p1(
719 self,
719 self,
720 filename,
720 filename,
721 p1_tracked,
721 p1_tracked,
722 ):
722 ):
723 """Set a file as tracked in the parent (or not)
723 """Set a file as tracked in the parent (or not)
724
724
725 This is to be called when adjust the dirstate to a new parent after an history
725 This is to be called when adjust the dirstate to a new parent after an history
726 rewriting operation.
726 rewriting operation.
727
727
728 It should not be called during a merge (p2 != nullid) and only within
728 It should not be called during a merge (p2 != nullid) and only within
729 a `with dirstate.changing_parents(repo):` context.
729 a `with dirstate.changing_parents(repo):` context.
730 """
730 """
731 if self.in_merge:
731 if self.in_merge:
732 msg = b'update_file_reference should not be called when merging'
732 msg = b'update_file_reference should not be called when merging'
733 raise error.ProgrammingError(msg)
733 raise error.ProgrammingError(msg)
734 entry = self._map.get(filename)
734 entry = self._map.get(filename)
735 if entry is None:
735 if entry is None:
736 wc_tracked = False
736 wc_tracked = False
737 else:
737 else:
738 wc_tracked = entry.tracked
738 wc_tracked = entry.tracked
739 if not (p1_tracked or wc_tracked):
739 if not (p1_tracked or wc_tracked):
740 # the file is no longer relevant to anyone
740 # the file is no longer relevant to anyone
741 if self._map.get(filename) is not None:
741 if self._map.get(filename) is not None:
742 self._map.reset_state(filename)
742 self._map.reset_state(filename)
743 self._dirty = True
743 self._dirty = True
744 elif (not p1_tracked) and wc_tracked:
744 elif (not p1_tracked) and wc_tracked:
745 if entry is not None and entry.added:
745 if entry is not None and entry.added:
746 return # avoid dropping copy information (maybe?)
746 return # avoid dropping copy information (maybe?)
747
747
748 self._map.reset_state(
748 self._map.reset_state(
749 filename,
749 filename,
750 wc_tracked,
750 wc_tracked,
751 p1_tracked,
751 p1_tracked,
752 # the underlying reference might have changed, we will have to
752 # the underlying reference might have changed, we will have to
753 # check it.
753 # check it.
754 has_meaningful_mtime=False,
754 has_meaningful_mtime=False,
755 )
755 )
756
756
757 @requires_changing_parents
757 @requires_changing_parents
758 def update_file(
758 def update_file(
759 self,
759 self,
760 filename,
760 filename,
761 wc_tracked,
761 wc_tracked,
762 p1_tracked,
762 p1_tracked,
763 p2_info=False,
763 p2_info=False,
764 possibly_dirty=False,
764 possibly_dirty=False,
765 parentfiledata=None,
765 parentfiledata=None,
766 ):
766 ):
767 """update the information about a file in the dirstate
767 """update the information about a file in the dirstate
768
768
769 This is to be called when the direstates parent changes to keep track
769 This is to be called when the direstates parent changes to keep track
770 of what is the file situation in regards to the working copy and its parent.
770 of what is the file situation in regards to the working copy and its parent.
771
771
772 This function must be called within a `dirstate.changing_parents` context.
772 This function must be called within a `dirstate.changing_parents` context.
773
773
774 note: the API is at an early stage and we might need to adjust it
774 note: the API is at an early stage and we might need to adjust it
775 depending of what information ends up being relevant and useful to
775 depending of what information ends up being relevant and useful to
776 other processing.
776 other processing.
777 """
777 """
778 self._update_file(
778 self._update_file(
779 filename=filename,
779 filename=filename,
780 wc_tracked=wc_tracked,
780 wc_tracked=wc_tracked,
781 p1_tracked=p1_tracked,
781 p1_tracked=p1_tracked,
782 p2_info=p2_info,
782 p2_info=p2_info,
783 possibly_dirty=possibly_dirty,
783 possibly_dirty=possibly_dirty,
784 parentfiledata=parentfiledata,
784 parentfiledata=parentfiledata,
785 )
785 )
786
786
787 def hacky_extension_update_file(self, *args, **kwargs):
787 def hacky_extension_update_file(self, *args, **kwargs):
788 """NEVER USE THIS, YOU DO NOT NEED IT
788 """NEVER USE THIS, YOU DO NOT NEED IT
789
789
790 This function is a variant of "update_file" to be called by a small set
790 This function is a variant of "update_file" to be called by a small set
791 of extensions, it also adjust the internal state of file, but can be
791 of extensions, it also adjust the internal state of file, but can be
792 called outside an `changing_parents` context.
792 called outside an `changing_parents` context.
793
793
794 A very small number of extension meddle with the working copy content
794 A very small number of extension meddle with the working copy content
795 in a way that requires to adjust the dirstate accordingly. At the time
795 in a way that requires to adjust the dirstate accordingly. At the time
796 this command is written they are :
796 this command is written they are :
797 - keyword,
797 - keyword,
798 - largefile,
798 - largefile,
799 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
799 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
800
800
801 This function could probably be replaced by more semantic one (like
801 This function could probably be replaced by more semantic one (like
802 "adjust expected size" or "always revalidate file content", etc)
802 "adjust expected size" or "always revalidate file content", etc)
803 however at the time where this is writen, this is too much of a detour
803 however at the time where this is writen, this is too much of a detour
804 to be considered.
804 to be considered.
805 """
805 """
806 if not (self._changing_level > 0 or self._running_status > 0):
806 if not (self._changing_level > 0 or self._running_status > 0):
807 msg = "requires a changes context"
807 msg = "requires a changes context"
808 raise error.ProgrammingError(msg)
808 raise error.ProgrammingError(msg)
809 self._update_file(
809 self._update_file(
810 *args,
810 *args,
811 **kwargs,
811 **kwargs,
812 )
812 )
813
813
814 def _update_file(
814 def _update_file(
815 self,
815 self,
816 filename,
816 filename,
817 wc_tracked,
817 wc_tracked,
818 p1_tracked,
818 p1_tracked,
819 p2_info=False,
819 p2_info=False,
820 possibly_dirty=False,
820 possibly_dirty=False,
821 parentfiledata=None,
821 parentfiledata=None,
822 ):
822 ):
823
823
824 # note: I do not think we need to double check name clash here since we
824 # note: I do not think we need to double check name clash here since we
825 # are in a update/merge case that should already have taken care of
825 # are in a update/merge case that should already have taken care of
826 # this. The test agrees
826 # this. The test agrees
827
827
828 self._dirty = True
828 self._dirty = True
829 old_entry = self._map.get(filename)
829 old_entry = self._map.get(filename)
830 if old_entry is None:
830 if old_entry is None:
831 prev_tracked = False
831 prev_tracked = False
832 else:
832 else:
833 prev_tracked = old_entry.tracked
833 prev_tracked = old_entry.tracked
834 if prev_tracked != wc_tracked:
834 if prev_tracked != wc_tracked:
835 self._dirty_tracked_set = True
835 self._dirty_tracked_set = True
836
836
837 self._map.reset_state(
837 self._map.reset_state(
838 filename,
838 filename,
839 wc_tracked,
839 wc_tracked,
840 p1_tracked,
840 p1_tracked,
841 p2_info=p2_info,
841 p2_info=p2_info,
842 has_meaningful_mtime=not possibly_dirty,
842 has_meaningful_mtime=not possibly_dirty,
843 parentfiledata=parentfiledata,
843 parentfiledata=parentfiledata,
844 )
844 )
845
845
846 def _check_new_tracked_filename(self, filename):
846 def _check_new_tracked_filename(self, filename):
847 scmutil.checkfilename(filename)
847 scmutil.checkfilename(filename)
848 if self._map.hastrackeddir(filename):
848 if self._map.hastrackeddir(filename):
849 msg = _(b'directory %r already in dirstate')
849 msg = _(b'directory %r already in dirstate')
850 msg %= pycompat.bytestr(filename)
850 msg %= pycompat.bytestr(filename)
851 raise error.Abort(msg)
851 raise error.Abort(msg)
852 # shadows
852 # shadows
853 for d in pathutil.finddirs(filename):
853 for d in pathutil.finddirs(filename):
854 if self._map.hastrackeddir(d):
854 if self._map.hastrackeddir(d):
855 break
855 break
856 entry = self._map.get(d)
856 entry = self._map.get(d)
857 if entry is not None and not entry.removed:
857 if entry is not None and not entry.removed:
858 msg = _(b'file %r in dirstate clashes with %r')
858 msg = _(b'file %r in dirstate clashes with %r')
859 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
859 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
860 raise error.Abort(msg)
860 raise error.Abort(msg)
861 self._check_sparse(filename)
861 self._check_sparse(filename)
862
862
863 def _check_sparse(self, filename):
863 def _check_sparse(self, filename):
864 """Check that a filename is inside the sparse profile"""
864 """Check that a filename is inside the sparse profile"""
865 sparsematch = self._sparsematcher
865 sparsematch = self._sparsematcher
866 if sparsematch is not None and not sparsematch.always():
866 if sparsematch is not None and not sparsematch.always():
867 if not sparsematch(filename):
867 if not sparsematch(filename):
868 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
868 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
869 hint = _(
869 hint = _(
870 b'include file with `hg debugsparse --include <pattern>` or use '
870 b'include file with `hg debugsparse --include <pattern>` or use '
871 b'`hg add -s <file>` to include file directory while adding'
871 b'`hg add -s <file>` to include file directory while adding'
872 )
872 )
873 raise error.Abort(msg % filename, hint=hint)
873 raise error.Abort(msg % filename, hint=hint)
874
874
875 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
875 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
876 if exists is None:
876 if exists is None:
877 exists = os.path.lexists(os.path.join(self._root, path))
877 exists = os.path.lexists(os.path.join(self._root, path))
878 if not exists:
878 if not exists:
879 # Maybe a path component exists
879 # Maybe a path component exists
880 if not ignoremissing and b'/' in path:
880 if not ignoremissing and b'/' in path:
881 d, f = path.rsplit(b'/', 1)
881 d, f = path.rsplit(b'/', 1)
882 d = self._normalize(d, False, ignoremissing, None)
882 d = self._normalize(d, False, ignoremissing, None)
883 folded = d + b"/" + f
883 folded = d + b"/" + f
884 else:
884 else:
885 # No path components, preserve original case
885 # No path components, preserve original case
886 folded = path
886 folded = path
887 else:
887 else:
888 # recursively normalize leading directory components
888 # recursively normalize leading directory components
889 # against dirstate
889 # against dirstate
890 if b'/' in normed:
890 if b'/' in normed:
891 d, f = normed.rsplit(b'/', 1)
891 d, f = normed.rsplit(b'/', 1)
892 d = self._normalize(d, False, ignoremissing, True)
892 d = self._normalize(d, False, ignoremissing, True)
893 r = self._root + b"/" + d
893 r = self._root + b"/" + d
894 folded = d + b"/" + util.fspath(f, r)
894 folded = d + b"/" + util.fspath(f, r)
895 else:
895 else:
896 folded = util.fspath(normed, self._root)
896 folded = util.fspath(normed, self._root)
897 storemap[normed] = folded
897 storemap[normed] = folded
898
898
899 return folded
899 return folded
900
900
901 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
901 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
902 normed = util.normcase(path)
902 normed = util.normcase(path)
903 folded = self._map.filefoldmap.get(normed, None)
903 folded = self._map.filefoldmap.get(normed, None)
904 if folded is None:
904 if folded is None:
905 if isknown:
905 if isknown:
906 folded = path
906 folded = path
907 else:
907 else:
908 folded = self._discoverpath(
908 folded = self._discoverpath(
909 path, normed, ignoremissing, exists, self._map.filefoldmap
909 path, normed, ignoremissing, exists, self._map.filefoldmap
910 )
910 )
911 return folded
911 return folded
912
912
913 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
913 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
914 normed = util.normcase(path)
914 normed = util.normcase(path)
915 folded = self._map.filefoldmap.get(normed, None)
915 folded = self._map.filefoldmap.get(normed, None)
916 if folded is None:
916 if folded is None:
917 folded = self._map.dirfoldmap.get(normed, None)
917 folded = self._map.dirfoldmap.get(normed, None)
918 if folded is None:
918 if folded is None:
919 if isknown:
919 if isknown:
920 folded = path
920 folded = path
921 else:
921 else:
922 # store discovered result in dirfoldmap so that future
922 # store discovered result in dirfoldmap so that future
923 # normalizefile calls don't start matching directories
923 # normalizefile calls don't start matching directories
924 folded = self._discoverpath(
924 folded = self._discoverpath(
925 path, normed, ignoremissing, exists, self._map.dirfoldmap
925 path, normed, ignoremissing, exists, self._map.dirfoldmap
926 )
926 )
927 return folded
927 return folded
928
928
929 def normalize(self, path, isknown=False, ignoremissing=False):
929 def normalize(self, path, isknown=False, ignoremissing=False):
930 """
930 """
931 normalize the case of a pathname when on a casefolding filesystem
931 normalize the case of a pathname when on a casefolding filesystem
932
932
933 isknown specifies whether the filename came from walking the
933 isknown specifies whether the filename came from walking the
934 disk, to avoid extra filesystem access.
934 disk, to avoid extra filesystem access.
935
935
936 If ignoremissing is True, missing path are returned
936 If ignoremissing is True, missing path are returned
937 unchanged. Otherwise, we try harder to normalize possibly
937 unchanged. Otherwise, we try harder to normalize possibly
938 existing path components.
938 existing path components.
939
939
940 The normalized case is determined based on the following precedence:
940 The normalized case is determined based on the following precedence:
941
941
942 - version of name already stored in the dirstate
942 - version of name already stored in the dirstate
943 - version of name stored on disk
943 - version of name stored on disk
944 - version provided via command arguments
944 - version provided via command arguments
945 """
945 """
946
946
947 if self._checkcase:
947 if self._checkcase:
948 return self._normalize(path, isknown, ignoremissing)
948 return self._normalize(path, isknown, ignoremissing)
949 return path
949 return path
950
950
951 # XXX this method is barely used, as a result:
951 # XXX this method is barely used, as a result:
952 # - its semantic is unclear
952 # - its semantic is unclear
953 # - do we really needs it ?
953 # - do we really needs it ?
954 @requires_changing_parents
954 @requires_changing_parents
955 def clear(self):
955 def clear(self):
956 self._map.clear()
956 self._map.clear()
957 self._dirty = True
957 self._dirty = True
958
958
959 @requires_changing_parents
959 @requires_changing_parents
960 def rebuild(self, parent, allfiles, changedfiles=None):
960 def rebuild(self, parent, allfiles, changedfiles=None):
961 matcher = self._sparsematcher
961 matcher = self._sparsematcher
962 if matcher is not None and not matcher.always():
962 if matcher is not None and not matcher.always():
963 # should not add non-matching files
963 # should not add non-matching files
964 allfiles = [f for f in allfiles if matcher(f)]
964 allfiles = [f for f in allfiles if matcher(f)]
965 if changedfiles:
965 if changedfiles:
966 changedfiles = [f for f in changedfiles if matcher(f)]
966 changedfiles = [f for f in changedfiles if matcher(f)]
967
967
968 if changedfiles is not None:
968 if changedfiles is not None:
969 # these files will be deleted from the dirstate when they are
969 # these files will be deleted from the dirstate when they are
970 # not found to be in allfiles
970 # not found to be in allfiles
971 dirstatefilestoremove = {f for f in self if not matcher(f)}
971 dirstatefilestoremove = {f for f in self if not matcher(f)}
972 changedfiles = dirstatefilestoremove.union(changedfiles)
972 changedfiles = dirstatefilestoremove.union(changedfiles)
973
973
974 if changedfiles is None:
974 if changedfiles is None:
975 # Rebuild entire dirstate
975 # Rebuild entire dirstate
976 to_lookup = allfiles
976 to_lookup = allfiles
977 to_drop = []
977 to_drop = []
978 self.clear()
978 self.clear()
979 elif len(changedfiles) < 10:
979 elif len(changedfiles) < 10:
980 # Avoid turning allfiles into a set, which can be expensive if it's
980 # Avoid turning allfiles into a set, which can be expensive if it's
981 # large.
981 # large.
982 to_lookup = []
982 to_lookup = []
983 to_drop = []
983 to_drop = []
984 for f in changedfiles:
984 for f in changedfiles:
985 if f in allfiles:
985 if f in allfiles:
986 to_lookup.append(f)
986 to_lookup.append(f)
987 else:
987 else:
988 to_drop.append(f)
988 to_drop.append(f)
989 else:
989 else:
990 changedfilesset = set(changedfiles)
990 changedfilesset = set(changedfiles)
991 to_lookup = changedfilesset & set(allfiles)
991 to_lookup = changedfilesset & set(allfiles)
992 to_drop = changedfilesset - to_lookup
992 to_drop = changedfilesset - to_lookup
993
993
994 if self._origpl is None:
994 if self._origpl is None:
995 self._origpl = self._pl
995 self._origpl = self._pl
996 self._map.setparents(parent, self._nodeconstants.nullid)
996 self._map.setparents(parent, self._nodeconstants.nullid)
997
997
998 for f in to_lookup:
998 for f in to_lookup:
999 if self.in_merge:
999 if self.in_merge:
1000 self.set_tracked(f)
1000 self.set_tracked(f)
1001 else:
1001 else:
1002 self._map.reset_state(
1002 self._map.reset_state(
1003 f,
1003 f,
1004 wc_tracked=True,
1004 wc_tracked=True,
1005 p1_tracked=True,
1005 p1_tracked=True,
1006 )
1006 )
1007 for f in to_drop:
1007 for f in to_drop:
1008 self._map.reset_state(f)
1008 self._map.reset_state(f)
1009
1009
1010 self._dirty = True
1010 self._dirty = True
1011
1011
1012 def _setup_tr_abort(self, tr):
1013 """make sure we invalidate the current change on abort"""
1014 if tr is None:
1015 return
1016
1017 def on_abort(tr):
1018 self._attached_to_a_transaction = False
1019 self.invalidate()
1020
1021 tr.addabort(
1022 b'dirstate-invalidate%s' % self._tr_key_suffix,
1023 on_abort,
1024 )
1025
1012 def write(self, tr):
1026 def write(self, tr):
1013 if not self._dirty:
1027 if not self._dirty:
1014 return
1028 return
1015 # make sure we don't request a write of invalidated content
1029 # make sure we don't request a write of invalidated content
1016 # XXX move before the dirty check once `unlock` stop calling `write`
1030 # XXX move before the dirty check once `unlock` stop calling `write`
1017 assert not self._invalidated_context
1031 assert not self._invalidated_context
1018
1032
1019 write_key = self._use_tracked_hint and self._dirty_tracked_set
1033 write_key = self._use_tracked_hint and self._dirty_tracked_set
1020 if tr:
1034 if tr:
1021
1035
1022 def on_abort(tr):
1036 self._setup_tr_abort(tr)
1023 self._attached_to_a_transaction = False
1024 self.invalidate()
1025
1026 # make sure we invalidate the current change on abort
1027 if tr is not None:
1028 tr.addabort(
1029 b'dirstate-invalidate%s' % self._tr_key_suffix,
1030 on_abort,
1031 )
1032
1033 self._attached_to_a_transaction = True
1037 self._attached_to_a_transaction = True
1034
1038
1035 def on_success(f):
1039 def on_success(f):
1036 self._attached_to_a_transaction = False
1040 self._attached_to_a_transaction = False
1037 self._writedirstate(tr, f),
1041 self._writedirstate(tr, f),
1038
1042
1039 # delay writing in-memory changes out
1043 # delay writing in-memory changes out
1040 tr.addfilegenerator(
1044 tr.addfilegenerator(
1041 b'dirstate-1-main%s' % self._tr_key_suffix,
1045 b'dirstate-1-main%s' % self._tr_key_suffix,
1042 (self._filename,),
1046 (self._filename,),
1043 on_success,
1047 on_success,
1044 location=b'plain',
1048 location=b'plain',
1045 post_finalize=True,
1049 post_finalize=True,
1046 )
1050 )
1047 if write_key:
1051 if write_key:
1048 tr.addfilegenerator(
1052 tr.addfilegenerator(
1049 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1053 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1050 (self._filename_th,),
1054 (self._filename_th,),
1051 lambda f: self._write_tracked_hint(tr, f),
1055 lambda f: self._write_tracked_hint(tr, f),
1052 location=b'plain',
1056 location=b'plain',
1053 post_finalize=True,
1057 post_finalize=True,
1054 )
1058 )
1055 return
1059 return
1056
1060
1057 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1061 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1058 with file(self._filename) as f:
1062 with file(self._filename) as f:
1059 self._writedirstate(tr, f)
1063 self._writedirstate(tr, f)
1060 if write_key:
1064 if write_key:
1061 # we update the key-file after writing to make sure reader have a
1065 # we update the key-file after writing to make sure reader have a
1062 # key that match the newly written content
1066 # key that match the newly written content
1063 with file(self._filename_th) as f:
1067 with file(self._filename_th) as f:
1064 self._write_tracked_hint(tr, f)
1068 self._write_tracked_hint(tr, f)
1065
1069
1066 def delete_tracked_hint(self):
1070 def delete_tracked_hint(self):
1067 """remove the tracked_hint file
1071 """remove the tracked_hint file
1068
1072
1069 To be used by format downgrades operation"""
1073 To be used by format downgrades operation"""
1070 self._opener.unlink(self._filename_th)
1074 self._opener.unlink(self._filename_th)
1071 self._use_tracked_hint = False
1075 self._use_tracked_hint = False
1072
1076
1073 def addparentchangecallback(self, category, callback):
1077 def addparentchangecallback(self, category, callback):
1074 """add a callback to be called when the wd parents are changed
1078 """add a callback to be called when the wd parents are changed
1075
1079
1076 Callback will be called with the following arguments:
1080 Callback will be called with the following arguments:
1077 dirstate, (oldp1, oldp2), (newp1, newp2)
1081 dirstate, (oldp1, oldp2), (newp1, newp2)
1078
1082
1079 Category is a unique identifier to allow overwriting an old callback
1083 Category is a unique identifier to allow overwriting an old callback
1080 with a newer callback.
1084 with a newer callback.
1081 """
1085 """
1082 self._plchangecallbacks[category] = callback
1086 self._plchangecallbacks[category] = callback
1083
1087
1084 def _writedirstate(self, tr, st):
1088 def _writedirstate(self, tr, st):
1085 # make sure we don't write invalidated content
1089 # make sure we don't write invalidated content
1086 assert not self._invalidated_context
1090 assert not self._invalidated_context
1087 # notify callbacks about parents change
1091 # notify callbacks about parents change
1088 if self._origpl is not None and self._origpl != self._pl:
1092 if self._origpl is not None and self._origpl != self._pl:
1089 for c, callback in sorted(self._plchangecallbacks.items()):
1093 for c, callback in sorted(self._plchangecallbacks.items()):
1090 callback(self, self._origpl, self._pl)
1094 callback(self, self._origpl, self._pl)
1091 self._origpl = None
1095 self._origpl = None
1092 self._map.write(tr, st)
1096 self._map.write(tr, st)
1093 self._dirty = False
1097 self._dirty = False
1094 self._dirty_tracked_set = False
1098 self._dirty_tracked_set = False
1095
1099
1096 def _write_tracked_hint(self, tr, f):
1100 def _write_tracked_hint(self, tr, f):
1097 key = node.hex(uuid.uuid4().bytes)
1101 key = node.hex(uuid.uuid4().bytes)
1098 f.write(b"1\n%s\n" % key) # 1 is the format version
1102 f.write(b"1\n%s\n" % key) # 1 is the format version
1099
1103
1100 def _dirignore(self, f):
1104 def _dirignore(self, f):
1101 if self._ignore(f):
1105 if self._ignore(f):
1102 return True
1106 return True
1103 for p in pathutil.finddirs(f):
1107 for p in pathutil.finddirs(f):
1104 if self._ignore(p):
1108 if self._ignore(p):
1105 return True
1109 return True
1106 return False
1110 return False
1107
1111
1108 def _ignorefiles(self):
1112 def _ignorefiles(self):
1109 files = []
1113 files = []
1110 if os.path.exists(self._join(b'.hgignore')):
1114 if os.path.exists(self._join(b'.hgignore')):
1111 files.append(self._join(b'.hgignore'))
1115 files.append(self._join(b'.hgignore'))
1112 for name, path in self._ui.configitems(b"ui"):
1116 for name, path in self._ui.configitems(b"ui"):
1113 if name == b'ignore' or name.startswith(b'ignore.'):
1117 if name == b'ignore' or name.startswith(b'ignore.'):
1114 # we need to use os.path.join here rather than self._join
1118 # we need to use os.path.join here rather than self._join
1115 # because path is arbitrary and user-specified
1119 # because path is arbitrary and user-specified
1116 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1120 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1117 return files
1121 return files
1118
1122
1119 def _ignorefileandline(self, f):
1123 def _ignorefileandline(self, f):
1120 files = collections.deque(self._ignorefiles())
1124 files = collections.deque(self._ignorefiles())
1121 visited = set()
1125 visited = set()
1122 while files:
1126 while files:
1123 i = files.popleft()
1127 i = files.popleft()
1124 patterns = matchmod.readpatternfile(
1128 patterns = matchmod.readpatternfile(
1125 i, self._ui.warn, sourceinfo=True
1129 i, self._ui.warn, sourceinfo=True
1126 )
1130 )
1127 for pattern, lineno, line in patterns:
1131 for pattern, lineno, line in patterns:
1128 kind, p = matchmod._patsplit(pattern, b'glob')
1132 kind, p = matchmod._patsplit(pattern, b'glob')
1129 if kind == b"subinclude":
1133 if kind == b"subinclude":
1130 if p not in visited:
1134 if p not in visited:
1131 files.append(p)
1135 files.append(p)
1132 continue
1136 continue
1133 m = matchmod.match(
1137 m = matchmod.match(
1134 self._root, b'', [], [pattern], warn=self._ui.warn
1138 self._root, b'', [], [pattern], warn=self._ui.warn
1135 )
1139 )
1136 if m(f):
1140 if m(f):
1137 return (i, lineno, line)
1141 return (i, lineno, line)
1138 visited.add(i)
1142 visited.add(i)
1139 return (None, -1, b"")
1143 return (None, -1, b"")
1140
1144
1141 def _walkexplicit(self, match, subrepos):
1145 def _walkexplicit(self, match, subrepos):
1142 """Get stat data about the files explicitly specified by match.
1146 """Get stat data about the files explicitly specified by match.
1143
1147
1144 Return a triple (results, dirsfound, dirsnotfound).
1148 Return a triple (results, dirsfound, dirsnotfound).
1145 - results is a mapping from filename to stat result. It also contains
1149 - results is a mapping from filename to stat result. It also contains
1146 listings mapping subrepos and .hg to None.
1150 listings mapping subrepos and .hg to None.
1147 - dirsfound is a list of files found to be directories.
1151 - dirsfound is a list of files found to be directories.
1148 - dirsnotfound is a list of files that the dirstate thinks are
1152 - dirsnotfound is a list of files that the dirstate thinks are
1149 directories and that were not found."""
1153 directories and that were not found."""
1150
1154
1151 def badtype(mode):
1155 def badtype(mode):
1152 kind = _(b'unknown')
1156 kind = _(b'unknown')
1153 if stat.S_ISCHR(mode):
1157 if stat.S_ISCHR(mode):
1154 kind = _(b'character device')
1158 kind = _(b'character device')
1155 elif stat.S_ISBLK(mode):
1159 elif stat.S_ISBLK(mode):
1156 kind = _(b'block device')
1160 kind = _(b'block device')
1157 elif stat.S_ISFIFO(mode):
1161 elif stat.S_ISFIFO(mode):
1158 kind = _(b'fifo')
1162 kind = _(b'fifo')
1159 elif stat.S_ISSOCK(mode):
1163 elif stat.S_ISSOCK(mode):
1160 kind = _(b'socket')
1164 kind = _(b'socket')
1161 elif stat.S_ISDIR(mode):
1165 elif stat.S_ISDIR(mode):
1162 kind = _(b'directory')
1166 kind = _(b'directory')
1163 return _(b'unsupported file type (type is %s)') % kind
1167 return _(b'unsupported file type (type is %s)') % kind
1164
1168
1165 badfn = match.bad
1169 badfn = match.bad
1166 dmap = self._map
1170 dmap = self._map
1167 lstat = os.lstat
1171 lstat = os.lstat
1168 getkind = stat.S_IFMT
1172 getkind = stat.S_IFMT
1169 dirkind = stat.S_IFDIR
1173 dirkind = stat.S_IFDIR
1170 regkind = stat.S_IFREG
1174 regkind = stat.S_IFREG
1171 lnkkind = stat.S_IFLNK
1175 lnkkind = stat.S_IFLNK
1172 join = self._join
1176 join = self._join
1173 dirsfound = []
1177 dirsfound = []
1174 foundadd = dirsfound.append
1178 foundadd = dirsfound.append
1175 dirsnotfound = []
1179 dirsnotfound = []
1176 notfoundadd = dirsnotfound.append
1180 notfoundadd = dirsnotfound.append
1177
1181
1178 if not match.isexact() and self._checkcase:
1182 if not match.isexact() and self._checkcase:
1179 normalize = self._normalize
1183 normalize = self._normalize
1180 else:
1184 else:
1181 normalize = None
1185 normalize = None
1182
1186
1183 files = sorted(match.files())
1187 files = sorted(match.files())
1184 subrepos.sort()
1188 subrepos.sort()
1185 i, j = 0, 0
1189 i, j = 0, 0
1186 while i < len(files) and j < len(subrepos):
1190 while i < len(files) and j < len(subrepos):
1187 subpath = subrepos[j] + b"/"
1191 subpath = subrepos[j] + b"/"
1188 if files[i] < subpath:
1192 if files[i] < subpath:
1189 i += 1
1193 i += 1
1190 continue
1194 continue
1191 while i < len(files) and files[i].startswith(subpath):
1195 while i < len(files) and files[i].startswith(subpath):
1192 del files[i]
1196 del files[i]
1193 j += 1
1197 j += 1
1194
1198
1195 if not files or b'' in files:
1199 if not files or b'' in files:
1196 files = [b'']
1200 files = [b'']
1197 # constructing the foldmap is expensive, so don't do it for the
1201 # constructing the foldmap is expensive, so don't do it for the
1198 # common case where files is ['']
1202 # common case where files is ['']
1199 normalize = None
1203 normalize = None
1200 results = dict.fromkeys(subrepos)
1204 results = dict.fromkeys(subrepos)
1201 results[b'.hg'] = None
1205 results[b'.hg'] = None
1202
1206
1203 for ff in files:
1207 for ff in files:
1204 if normalize:
1208 if normalize:
1205 nf = normalize(ff, False, True)
1209 nf = normalize(ff, False, True)
1206 else:
1210 else:
1207 nf = ff
1211 nf = ff
1208 if nf in results:
1212 if nf in results:
1209 continue
1213 continue
1210
1214
1211 try:
1215 try:
1212 st = lstat(join(nf))
1216 st = lstat(join(nf))
1213 kind = getkind(st.st_mode)
1217 kind = getkind(st.st_mode)
1214 if kind == dirkind:
1218 if kind == dirkind:
1215 if nf in dmap:
1219 if nf in dmap:
1216 # file replaced by dir on disk but still in dirstate
1220 # file replaced by dir on disk but still in dirstate
1217 results[nf] = None
1221 results[nf] = None
1218 foundadd((nf, ff))
1222 foundadd((nf, ff))
1219 elif kind == regkind or kind == lnkkind:
1223 elif kind == regkind or kind == lnkkind:
1220 results[nf] = st
1224 results[nf] = st
1221 else:
1225 else:
1222 badfn(ff, badtype(kind))
1226 badfn(ff, badtype(kind))
1223 if nf in dmap:
1227 if nf in dmap:
1224 results[nf] = None
1228 results[nf] = None
1225 except (OSError) as inst:
1229 except (OSError) as inst:
1226 # nf not found on disk - it is dirstate only
1230 # nf not found on disk - it is dirstate only
1227 if nf in dmap: # does it exactly match a missing file?
1231 if nf in dmap: # does it exactly match a missing file?
1228 results[nf] = None
1232 results[nf] = None
1229 else: # does it match a missing directory?
1233 else: # does it match a missing directory?
1230 if self._map.hasdir(nf):
1234 if self._map.hasdir(nf):
1231 notfoundadd(nf)
1235 notfoundadd(nf)
1232 else:
1236 else:
1233 badfn(ff, encoding.strtolocal(inst.strerror))
1237 badfn(ff, encoding.strtolocal(inst.strerror))
1234
1238
1235 # match.files() may contain explicitly-specified paths that shouldn't
1239 # match.files() may contain explicitly-specified paths that shouldn't
1236 # be taken; drop them from the list of files found. dirsfound/notfound
1240 # be taken; drop them from the list of files found. dirsfound/notfound
1237 # aren't filtered here because they will be tested later.
1241 # aren't filtered here because they will be tested later.
1238 if match.anypats():
1242 if match.anypats():
1239 for f in list(results):
1243 for f in list(results):
1240 if f == b'.hg' or f in subrepos:
1244 if f == b'.hg' or f in subrepos:
1241 # keep sentinel to disable further out-of-repo walks
1245 # keep sentinel to disable further out-of-repo walks
1242 continue
1246 continue
1243 if not match(f):
1247 if not match(f):
1244 del results[f]
1248 del results[f]
1245
1249
1246 # Case insensitive filesystems cannot rely on lstat() failing to detect
1250 # Case insensitive filesystems cannot rely on lstat() failing to detect
1247 # a case-only rename. Prune the stat object for any file that does not
1251 # a case-only rename. Prune the stat object for any file that does not
1248 # match the case in the filesystem, if there are multiple files that
1252 # match the case in the filesystem, if there are multiple files that
1249 # normalize to the same path.
1253 # normalize to the same path.
1250 if match.isexact() and self._checkcase:
1254 if match.isexact() and self._checkcase:
1251 normed = {}
1255 normed = {}
1252
1256
1253 for f, st in results.items():
1257 for f, st in results.items():
1254 if st is None:
1258 if st is None:
1255 continue
1259 continue
1256
1260
1257 nc = util.normcase(f)
1261 nc = util.normcase(f)
1258 paths = normed.get(nc)
1262 paths = normed.get(nc)
1259
1263
1260 if paths is None:
1264 if paths is None:
1261 paths = set()
1265 paths = set()
1262 normed[nc] = paths
1266 normed[nc] = paths
1263
1267
1264 paths.add(f)
1268 paths.add(f)
1265
1269
1266 for norm, paths in normed.items():
1270 for norm, paths in normed.items():
1267 if len(paths) > 1:
1271 if len(paths) > 1:
1268 for path in paths:
1272 for path in paths:
1269 folded = self._discoverpath(
1273 folded = self._discoverpath(
1270 path, norm, True, None, self._map.dirfoldmap
1274 path, norm, True, None, self._map.dirfoldmap
1271 )
1275 )
1272 if path != folded:
1276 if path != folded:
1273 results[path] = None
1277 results[path] = None
1274
1278
1275 return results, dirsfound, dirsnotfound
1279 return results, dirsfound, dirsnotfound
1276
1280
1277 def walk(self, match, subrepos, unknown, ignored, full=True):
1281 def walk(self, match, subrepos, unknown, ignored, full=True):
1278 """
1282 """
1279 Walk recursively through the directory tree, finding all files
1283 Walk recursively through the directory tree, finding all files
1280 matched by match.
1284 matched by match.
1281
1285
1282 If full is False, maybe skip some known-clean files.
1286 If full is False, maybe skip some known-clean files.
1283
1287
1284 Return a dict mapping filename to stat-like object (either
1288 Return a dict mapping filename to stat-like object (either
1285 mercurial.osutil.stat instance or return value of os.stat()).
1289 mercurial.osutil.stat instance or return value of os.stat()).
1286
1290
1287 """
1291 """
1288 # full is a flag that extensions that hook into walk can use -- this
1292 # full is a flag that extensions that hook into walk can use -- this
1289 # implementation doesn't use it at all. This satisfies the contract
1293 # implementation doesn't use it at all. This satisfies the contract
1290 # because we only guarantee a "maybe".
1294 # because we only guarantee a "maybe".
1291
1295
1292 if ignored:
1296 if ignored:
1293 ignore = util.never
1297 ignore = util.never
1294 dirignore = util.never
1298 dirignore = util.never
1295 elif unknown:
1299 elif unknown:
1296 ignore = self._ignore
1300 ignore = self._ignore
1297 dirignore = self._dirignore
1301 dirignore = self._dirignore
1298 else:
1302 else:
1299 # if not unknown and not ignored, drop dir recursion and step 2
1303 # if not unknown and not ignored, drop dir recursion and step 2
1300 ignore = util.always
1304 ignore = util.always
1301 dirignore = util.always
1305 dirignore = util.always
1302
1306
1303 if self._sparsematchfn is not None:
1307 if self._sparsematchfn is not None:
1304 em = matchmod.exact(match.files())
1308 em = matchmod.exact(match.files())
1305 sm = matchmod.unionmatcher([self._sparsematcher, em])
1309 sm = matchmod.unionmatcher([self._sparsematcher, em])
1306 match = matchmod.intersectmatchers(match, sm)
1310 match = matchmod.intersectmatchers(match, sm)
1307
1311
1308 matchfn = match.matchfn
1312 matchfn = match.matchfn
1309 matchalways = match.always()
1313 matchalways = match.always()
1310 matchtdir = match.traversedir
1314 matchtdir = match.traversedir
1311 dmap = self._map
1315 dmap = self._map
1312 listdir = util.listdir
1316 listdir = util.listdir
1313 lstat = os.lstat
1317 lstat = os.lstat
1314 dirkind = stat.S_IFDIR
1318 dirkind = stat.S_IFDIR
1315 regkind = stat.S_IFREG
1319 regkind = stat.S_IFREG
1316 lnkkind = stat.S_IFLNK
1320 lnkkind = stat.S_IFLNK
1317 join = self._join
1321 join = self._join
1318
1322
1319 exact = skipstep3 = False
1323 exact = skipstep3 = False
1320 if match.isexact(): # match.exact
1324 if match.isexact(): # match.exact
1321 exact = True
1325 exact = True
1322 dirignore = util.always # skip step 2
1326 dirignore = util.always # skip step 2
1323 elif match.prefix(): # match.match, no patterns
1327 elif match.prefix(): # match.match, no patterns
1324 skipstep3 = True
1328 skipstep3 = True
1325
1329
1326 if not exact and self._checkcase:
1330 if not exact and self._checkcase:
1327 normalize = self._normalize
1331 normalize = self._normalize
1328 normalizefile = self._normalizefile
1332 normalizefile = self._normalizefile
1329 skipstep3 = False
1333 skipstep3 = False
1330 else:
1334 else:
1331 normalize = self._normalize
1335 normalize = self._normalize
1332 normalizefile = None
1336 normalizefile = None
1333
1337
1334 # step 1: find all explicit files
1338 # step 1: find all explicit files
1335 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1339 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1336 if matchtdir:
1340 if matchtdir:
1337 for d in work:
1341 for d in work:
1338 matchtdir(d[0])
1342 matchtdir(d[0])
1339 for d in dirsnotfound:
1343 for d in dirsnotfound:
1340 matchtdir(d)
1344 matchtdir(d)
1341
1345
1342 skipstep3 = skipstep3 and not (work or dirsnotfound)
1346 skipstep3 = skipstep3 and not (work or dirsnotfound)
1343 work = [d for d in work if not dirignore(d[0])]
1347 work = [d for d in work if not dirignore(d[0])]
1344
1348
1345 # step 2: visit subdirectories
1349 # step 2: visit subdirectories
1346 def traverse(work, alreadynormed):
1350 def traverse(work, alreadynormed):
1347 wadd = work.append
1351 wadd = work.append
1348 while work:
1352 while work:
1349 tracing.counter('dirstate.walk work', len(work))
1353 tracing.counter('dirstate.walk work', len(work))
1350 nd = work.pop()
1354 nd = work.pop()
1351 visitentries = match.visitchildrenset(nd)
1355 visitentries = match.visitchildrenset(nd)
1352 if not visitentries:
1356 if not visitentries:
1353 continue
1357 continue
1354 if visitentries == b'this' or visitentries == b'all':
1358 if visitentries == b'this' or visitentries == b'all':
1355 visitentries = None
1359 visitentries = None
1356 skip = None
1360 skip = None
1357 if nd != b'':
1361 if nd != b'':
1358 skip = b'.hg'
1362 skip = b'.hg'
1359 try:
1363 try:
1360 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1364 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1361 entries = listdir(join(nd), stat=True, skip=skip)
1365 entries = listdir(join(nd), stat=True, skip=skip)
1362 except (PermissionError, FileNotFoundError) as inst:
1366 except (PermissionError, FileNotFoundError) as inst:
1363 match.bad(
1367 match.bad(
1364 self.pathto(nd), encoding.strtolocal(inst.strerror)
1368 self.pathto(nd), encoding.strtolocal(inst.strerror)
1365 )
1369 )
1366 continue
1370 continue
1367 for f, kind, st in entries:
1371 for f, kind, st in entries:
1368 # Some matchers may return files in the visitentries set,
1372 # Some matchers may return files in the visitentries set,
1369 # instead of 'this', if the matcher explicitly mentions them
1373 # instead of 'this', if the matcher explicitly mentions them
1370 # and is not an exactmatcher. This is acceptable; we do not
1374 # and is not an exactmatcher. This is acceptable; we do not
1371 # make any hard assumptions about file-or-directory below
1375 # make any hard assumptions about file-or-directory below
1372 # based on the presence of `f` in visitentries. If
1376 # based on the presence of `f` in visitentries. If
1373 # visitchildrenset returned a set, we can always skip the
1377 # visitchildrenset returned a set, we can always skip the
1374 # entries *not* in the set it provided regardless of whether
1378 # entries *not* in the set it provided regardless of whether
1375 # they're actually a file or a directory.
1379 # they're actually a file or a directory.
1376 if visitentries and f not in visitentries:
1380 if visitentries and f not in visitentries:
1377 continue
1381 continue
1378 if normalizefile:
1382 if normalizefile:
1379 # even though f might be a directory, we're only
1383 # even though f might be a directory, we're only
1380 # interested in comparing it to files currently in the
1384 # interested in comparing it to files currently in the
1381 # dmap -- therefore normalizefile is enough
1385 # dmap -- therefore normalizefile is enough
1382 nf = normalizefile(
1386 nf = normalizefile(
1383 nd and (nd + b"/" + f) or f, True, True
1387 nd and (nd + b"/" + f) or f, True, True
1384 )
1388 )
1385 else:
1389 else:
1386 nf = nd and (nd + b"/" + f) or f
1390 nf = nd and (nd + b"/" + f) or f
1387 if nf not in results:
1391 if nf not in results:
1388 if kind == dirkind:
1392 if kind == dirkind:
1389 if not ignore(nf):
1393 if not ignore(nf):
1390 if matchtdir:
1394 if matchtdir:
1391 matchtdir(nf)
1395 matchtdir(nf)
1392 wadd(nf)
1396 wadd(nf)
1393 if nf in dmap and (matchalways or matchfn(nf)):
1397 if nf in dmap and (matchalways or matchfn(nf)):
1394 results[nf] = None
1398 results[nf] = None
1395 elif kind == regkind or kind == lnkkind:
1399 elif kind == regkind or kind == lnkkind:
1396 if nf in dmap:
1400 if nf in dmap:
1397 if matchalways or matchfn(nf):
1401 if matchalways or matchfn(nf):
1398 results[nf] = st
1402 results[nf] = st
1399 elif (matchalways or matchfn(nf)) and not ignore(
1403 elif (matchalways or matchfn(nf)) and not ignore(
1400 nf
1404 nf
1401 ):
1405 ):
1402 # unknown file -- normalize if necessary
1406 # unknown file -- normalize if necessary
1403 if not alreadynormed:
1407 if not alreadynormed:
1404 nf = normalize(nf, False, True)
1408 nf = normalize(nf, False, True)
1405 results[nf] = st
1409 results[nf] = st
1406 elif nf in dmap and (matchalways or matchfn(nf)):
1410 elif nf in dmap and (matchalways or matchfn(nf)):
1407 results[nf] = None
1411 results[nf] = None
1408
1412
1409 for nd, d in work:
1413 for nd, d in work:
1410 # alreadynormed means that processwork doesn't have to do any
1414 # alreadynormed means that processwork doesn't have to do any
1411 # expensive directory normalization
1415 # expensive directory normalization
1412 alreadynormed = not normalize or nd == d
1416 alreadynormed = not normalize or nd == d
1413 traverse([d], alreadynormed)
1417 traverse([d], alreadynormed)
1414
1418
1415 for s in subrepos:
1419 for s in subrepos:
1416 del results[s]
1420 del results[s]
1417 del results[b'.hg']
1421 del results[b'.hg']
1418
1422
1419 # step 3: visit remaining files from dmap
1423 # step 3: visit remaining files from dmap
1420 if not skipstep3 and not exact:
1424 if not skipstep3 and not exact:
1421 # If a dmap file is not in results yet, it was either
1425 # If a dmap file is not in results yet, it was either
1422 # a) not matching matchfn b) ignored, c) missing, or d) under a
1426 # a) not matching matchfn b) ignored, c) missing, or d) under a
1423 # symlink directory.
1427 # symlink directory.
1424 if not results and matchalways:
1428 if not results and matchalways:
1425 visit = [f for f in dmap]
1429 visit = [f for f in dmap]
1426 else:
1430 else:
1427 visit = [f for f in dmap if f not in results and matchfn(f)]
1431 visit = [f for f in dmap if f not in results and matchfn(f)]
1428 visit.sort()
1432 visit.sort()
1429
1433
1430 if unknown:
1434 if unknown:
1431 # unknown == True means we walked all dirs under the roots
1435 # unknown == True means we walked all dirs under the roots
1432 # that wasn't ignored, and everything that matched was stat'ed
1436 # that wasn't ignored, and everything that matched was stat'ed
1433 # and is already in results.
1437 # and is already in results.
1434 # The rest must thus be ignored or under a symlink.
1438 # The rest must thus be ignored or under a symlink.
1435 audit_path = pathutil.pathauditor(self._root, cached=True)
1439 audit_path = pathutil.pathauditor(self._root, cached=True)
1436
1440
1437 for nf in iter(visit):
1441 for nf in iter(visit):
1438 # If a stat for the same file was already added with a
1442 # If a stat for the same file was already added with a
1439 # different case, don't add one for this, since that would
1443 # different case, don't add one for this, since that would
1440 # make it appear as if the file exists under both names
1444 # make it appear as if the file exists under both names
1441 # on disk.
1445 # on disk.
1442 if (
1446 if (
1443 normalizefile
1447 normalizefile
1444 and normalizefile(nf, True, True) in results
1448 and normalizefile(nf, True, True) in results
1445 ):
1449 ):
1446 results[nf] = None
1450 results[nf] = None
1447 # Report ignored items in the dmap as long as they are not
1451 # Report ignored items in the dmap as long as they are not
1448 # under a symlink directory.
1452 # under a symlink directory.
1449 elif audit_path.check(nf):
1453 elif audit_path.check(nf):
1450 try:
1454 try:
1451 results[nf] = lstat(join(nf))
1455 results[nf] = lstat(join(nf))
1452 # file was just ignored, no links, and exists
1456 # file was just ignored, no links, and exists
1453 except OSError:
1457 except OSError:
1454 # file doesn't exist
1458 # file doesn't exist
1455 results[nf] = None
1459 results[nf] = None
1456 else:
1460 else:
1457 # It's either missing or under a symlink directory
1461 # It's either missing or under a symlink directory
1458 # which we in this case report as missing
1462 # which we in this case report as missing
1459 results[nf] = None
1463 results[nf] = None
1460 else:
1464 else:
1461 # We may not have walked the full directory tree above,
1465 # We may not have walked the full directory tree above,
1462 # so stat and check everything we missed.
1466 # so stat and check everything we missed.
1463 iv = iter(visit)
1467 iv = iter(visit)
1464 for st in util.statfiles([join(i) for i in visit]):
1468 for st in util.statfiles([join(i) for i in visit]):
1465 results[next(iv)] = st
1469 results[next(iv)] = st
1466 return results
1470 return results
1467
1471
1468 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1472 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1469 if self._sparsematchfn is not None:
1473 if self._sparsematchfn is not None:
1470 em = matchmod.exact(matcher.files())
1474 em = matchmod.exact(matcher.files())
1471 sm = matchmod.unionmatcher([self._sparsematcher, em])
1475 sm = matchmod.unionmatcher([self._sparsematcher, em])
1472 matcher = matchmod.intersectmatchers(matcher, sm)
1476 matcher = matchmod.intersectmatchers(matcher, sm)
1473 # Force Rayon (Rust parallelism library) to respect the number of
1477 # Force Rayon (Rust parallelism library) to respect the number of
1474 # workers. This is a temporary workaround until Rust code knows
1478 # workers. This is a temporary workaround until Rust code knows
1475 # how to read the config file.
1479 # how to read the config file.
1476 numcpus = self._ui.configint(b"worker", b"numcpus")
1480 numcpus = self._ui.configint(b"worker", b"numcpus")
1477 if numcpus is not None:
1481 if numcpus is not None:
1478 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1482 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1479
1483
1480 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1484 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1481 if not workers_enabled:
1485 if not workers_enabled:
1482 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1486 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1483
1487
1484 (
1488 (
1485 lookup,
1489 lookup,
1486 modified,
1490 modified,
1487 added,
1491 added,
1488 removed,
1492 removed,
1489 deleted,
1493 deleted,
1490 clean,
1494 clean,
1491 ignored,
1495 ignored,
1492 unknown,
1496 unknown,
1493 warnings,
1497 warnings,
1494 bad,
1498 bad,
1495 traversed,
1499 traversed,
1496 dirty,
1500 dirty,
1497 ) = rustmod.status(
1501 ) = rustmod.status(
1498 self._map._map,
1502 self._map._map,
1499 matcher,
1503 matcher,
1500 self._rootdir,
1504 self._rootdir,
1501 self._ignorefiles(),
1505 self._ignorefiles(),
1502 self._checkexec,
1506 self._checkexec,
1503 bool(list_clean),
1507 bool(list_clean),
1504 bool(list_ignored),
1508 bool(list_ignored),
1505 bool(list_unknown),
1509 bool(list_unknown),
1506 bool(matcher.traversedir),
1510 bool(matcher.traversedir),
1507 )
1511 )
1508
1512
1509 self._dirty |= dirty
1513 self._dirty |= dirty
1510
1514
1511 if matcher.traversedir:
1515 if matcher.traversedir:
1512 for dir in traversed:
1516 for dir in traversed:
1513 matcher.traversedir(dir)
1517 matcher.traversedir(dir)
1514
1518
1515 if self._ui.warn:
1519 if self._ui.warn:
1516 for item in warnings:
1520 for item in warnings:
1517 if isinstance(item, tuple):
1521 if isinstance(item, tuple):
1518 file_path, syntax = item
1522 file_path, syntax = item
1519 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1523 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1520 file_path,
1524 file_path,
1521 syntax,
1525 syntax,
1522 )
1526 )
1523 self._ui.warn(msg)
1527 self._ui.warn(msg)
1524 else:
1528 else:
1525 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1529 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1526 self._ui.warn(
1530 self._ui.warn(
1527 msg
1531 msg
1528 % (
1532 % (
1529 pathutil.canonpath(
1533 pathutil.canonpath(
1530 self._rootdir, self._rootdir, item
1534 self._rootdir, self._rootdir, item
1531 ),
1535 ),
1532 b"No such file or directory",
1536 b"No such file or directory",
1533 )
1537 )
1534 )
1538 )
1535
1539
1536 for fn, message in bad:
1540 for fn, message in bad:
1537 matcher.bad(fn, encoding.strtolocal(message))
1541 matcher.bad(fn, encoding.strtolocal(message))
1538
1542
1539 status = scmutil.status(
1543 status = scmutil.status(
1540 modified=modified,
1544 modified=modified,
1541 added=added,
1545 added=added,
1542 removed=removed,
1546 removed=removed,
1543 deleted=deleted,
1547 deleted=deleted,
1544 unknown=unknown,
1548 unknown=unknown,
1545 ignored=ignored,
1549 ignored=ignored,
1546 clean=clean,
1550 clean=clean,
1547 )
1551 )
1548 return (lookup, status)
1552 return (lookup, status)
1549
1553
1550 def status(self, match, subrepos, ignored, clean, unknown):
1554 def status(self, match, subrepos, ignored, clean, unknown):
1551 """Determine the status of the working copy relative to the
1555 """Determine the status of the working copy relative to the
1552 dirstate and return a pair of (unsure, status), where status is of type
1556 dirstate and return a pair of (unsure, status), where status is of type
1553 scmutil.status and:
1557 scmutil.status and:
1554
1558
1555 unsure:
1559 unsure:
1556 files that might have been modified since the dirstate was
1560 files that might have been modified since the dirstate was
1557 written, but need to be read to be sure (size is the same
1561 written, but need to be read to be sure (size is the same
1558 but mtime differs)
1562 but mtime differs)
1559 status.modified:
1563 status.modified:
1560 files that have definitely been modified since the dirstate
1564 files that have definitely been modified since the dirstate
1561 was written (different size or mode)
1565 was written (different size or mode)
1562 status.clean:
1566 status.clean:
1563 files that have definitely not been modified since the
1567 files that have definitely not been modified since the
1564 dirstate was written
1568 dirstate was written
1565 """
1569 """
1566 if not self._running_status:
1570 if not self._running_status:
1567 msg = "Calling `status` outside a `running_status` context"
1571 msg = "Calling `status` outside a `running_status` context"
1568 raise error.ProgrammingError(msg)
1572 raise error.ProgrammingError(msg)
1569 listignored, listclean, listunknown = ignored, clean, unknown
1573 listignored, listclean, listunknown = ignored, clean, unknown
1570 lookup, modified, added, unknown, ignored = [], [], [], [], []
1574 lookup, modified, added, unknown, ignored = [], [], [], [], []
1571 removed, deleted, clean = [], [], []
1575 removed, deleted, clean = [], [], []
1572
1576
1573 dmap = self._map
1577 dmap = self._map
1574 dmap.preload()
1578 dmap.preload()
1575
1579
1576 use_rust = True
1580 use_rust = True
1577
1581
1578 allowed_matchers = (
1582 allowed_matchers = (
1579 matchmod.alwaysmatcher,
1583 matchmod.alwaysmatcher,
1580 matchmod.differencematcher,
1584 matchmod.differencematcher,
1581 matchmod.exactmatcher,
1585 matchmod.exactmatcher,
1582 matchmod.includematcher,
1586 matchmod.includematcher,
1583 matchmod.intersectionmatcher,
1587 matchmod.intersectionmatcher,
1584 matchmod.nevermatcher,
1588 matchmod.nevermatcher,
1585 matchmod.unionmatcher,
1589 matchmod.unionmatcher,
1586 )
1590 )
1587
1591
1588 if rustmod is None:
1592 if rustmod is None:
1589 use_rust = False
1593 use_rust = False
1590 elif self._checkcase:
1594 elif self._checkcase:
1591 # Case-insensitive filesystems are not handled yet
1595 # Case-insensitive filesystems are not handled yet
1592 use_rust = False
1596 use_rust = False
1593 elif subrepos:
1597 elif subrepos:
1594 use_rust = False
1598 use_rust = False
1595 elif not isinstance(match, allowed_matchers):
1599 elif not isinstance(match, allowed_matchers):
1596 # Some matchers have yet to be implemented
1600 # Some matchers have yet to be implemented
1597 use_rust = False
1601 use_rust = False
1598
1602
1599 # Get the time from the filesystem so we can disambiguate files that
1603 # Get the time from the filesystem so we can disambiguate files that
1600 # appear modified in the present or future.
1604 # appear modified in the present or future.
1601 try:
1605 try:
1602 mtime_boundary = timestamp.get_fs_now(self._opener)
1606 mtime_boundary = timestamp.get_fs_now(self._opener)
1603 except OSError:
1607 except OSError:
1604 # In largefiles or readonly context
1608 # In largefiles or readonly context
1605 mtime_boundary = None
1609 mtime_boundary = None
1606
1610
1607 if use_rust:
1611 if use_rust:
1608 try:
1612 try:
1609 res = self._rust_status(
1613 res = self._rust_status(
1610 match, listclean, listignored, listunknown
1614 match, listclean, listignored, listunknown
1611 )
1615 )
1612 return res + (mtime_boundary,)
1616 return res + (mtime_boundary,)
1613 except rustmod.FallbackError:
1617 except rustmod.FallbackError:
1614 pass
1618 pass
1615
1619
1616 def noop(f):
1620 def noop(f):
1617 pass
1621 pass
1618
1622
1619 dcontains = dmap.__contains__
1623 dcontains = dmap.__contains__
1620 dget = dmap.__getitem__
1624 dget = dmap.__getitem__
1621 ladd = lookup.append # aka "unsure"
1625 ladd = lookup.append # aka "unsure"
1622 madd = modified.append
1626 madd = modified.append
1623 aadd = added.append
1627 aadd = added.append
1624 uadd = unknown.append if listunknown else noop
1628 uadd = unknown.append if listunknown else noop
1625 iadd = ignored.append if listignored else noop
1629 iadd = ignored.append if listignored else noop
1626 radd = removed.append
1630 radd = removed.append
1627 dadd = deleted.append
1631 dadd = deleted.append
1628 cadd = clean.append if listclean else noop
1632 cadd = clean.append if listclean else noop
1629 mexact = match.exact
1633 mexact = match.exact
1630 dirignore = self._dirignore
1634 dirignore = self._dirignore
1631 checkexec = self._checkexec
1635 checkexec = self._checkexec
1632 checklink = self._checklink
1636 checklink = self._checklink
1633 copymap = self._map.copymap
1637 copymap = self._map.copymap
1634
1638
1635 # We need to do full walks when either
1639 # We need to do full walks when either
1636 # - we're listing all clean files, or
1640 # - we're listing all clean files, or
1637 # - match.traversedir does something, because match.traversedir should
1641 # - match.traversedir does something, because match.traversedir should
1638 # be called for every dir in the working dir
1642 # be called for every dir in the working dir
1639 full = listclean or match.traversedir is not None
1643 full = listclean or match.traversedir is not None
1640 for fn, st in self.walk(
1644 for fn, st in self.walk(
1641 match, subrepos, listunknown, listignored, full=full
1645 match, subrepos, listunknown, listignored, full=full
1642 ).items():
1646 ).items():
1643 if not dcontains(fn):
1647 if not dcontains(fn):
1644 if (listignored or mexact(fn)) and dirignore(fn):
1648 if (listignored or mexact(fn)) and dirignore(fn):
1645 if listignored:
1649 if listignored:
1646 iadd(fn)
1650 iadd(fn)
1647 else:
1651 else:
1648 uadd(fn)
1652 uadd(fn)
1649 continue
1653 continue
1650
1654
1651 t = dget(fn)
1655 t = dget(fn)
1652 mode = t.mode
1656 mode = t.mode
1653 size = t.size
1657 size = t.size
1654
1658
1655 if not st and t.tracked:
1659 if not st and t.tracked:
1656 dadd(fn)
1660 dadd(fn)
1657 elif t.p2_info:
1661 elif t.p2_info:
1658 madd(fn)
1662 madd(fn)
1659 elif t.added:
1663 elif t.added:
1660 aadd(fn)
1664 aadd(fn)
1661 elif t.removed:
1665 elif t.removed:
1662 radd(fn)
1666 radd(fn)
1663 elif t.tracked:
1667 elif t.tracked:
1664 if not checklink and t.has_fallback_symlink:
1668 if not checklink and t.has_fallback_symlink:
1665 # If the file system does not support symlink, the mode
1669 # If the file system does not support symlink, the mode
1666 # might not be correctly stored in the dirstate, so do not
1670 # might not be correctly stored in the dirstate, so do not
1667 # trust it.
1671 # trust it.
1668 ladd(fn)
1672 ladd(fn)
1669 elif not checkexec and t.has_fallback_exec:
1673 elif not checkexec and t.has_fallback_exec:
1670 # If the file system does not support exec bits, the mode
1674 # If the file system does not support exec bits, the mode
1671 # might not be correctly stored in the dirstate, so do not
1675 # might not be correctly stored in the dirstate, so do not
1672 # trust it.
1676 # trust it.
1673 ladd(fn)
1677 ladd(fn)
1674 elif (
1678 elif (
1675 size >= 0
1679 size >= 0
1676 and (
1680 and (
1677 (size != st.st_size and size != st.st_size & _rangemask)
1681 (size != st.st_size and size != st.st_size & _rangemask)
1678 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1682 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1679 )
1683 )
1680 or fn in copymap
1684 or fn in copymap
1681 ):
1685 ):
1682 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1686 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1683 # issue6456: Size returned may be longer due to
1687 # issue6456: Size returned may be longer due to
1684 # encryption on EXT-4 fscrypt, undecided.
1688 # encryption on EXT-4 fscrypt, undecided.
1685 ladd(fn)
1689 ladd(fn)
1686 else:
1690 else:
1687 madd(fn)
1691 madd(fn)
1688 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1692 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1689 # There might be a change in the future if for example the
1693 # There might be a change in the future if for example the
1690 # internal clock is off, but this is a case where the issues
1694 # internal clock is off, but this is a case where the issues
1691 # the user would face would be a lot worse and there is
1695 # the user would face would be a lot worse and there is
1692 # nothing we can really do.
1696 # nothing we can really do.
1693 ladd(fn)
1697 ladd(fn)
1694 elif listclean:
1698 elif listclean:
1695 cadd(fn)
1699 cadd(fn)
1696 status = scmutil.status(
1700 status = scmutil.status(
1697 modified, added, removed, deleted, unknown, ignored, clean
1701 modified, added, removed, deleted, unknown, ignored, clean
1698 )
1702 )
1699 return (lookup, status, mtime_boundary)
1703 return (lookup, status, mtime_boundary)
1700
1704
1701 def matches(self, match):
1705 def matches(self, match):
1702 """
1706 """
1703 return files in the dirstate (in whatever state) filtered by match
1707 return files in the dirstate (in whatever state) filtered by match
1704 """
1708 """
1705 dmap = self._map
1709 dmap = self._map
1706 if rustmod is not None:
1710 if rustmod is not None:
1707 dmap = self._map._map
1711 dmap = self._map._map
1708
1712
1709 if match.always():
1713 if match.always():
1710 return dmap.keys()
1714 return dmap.keys()
1711 files = match.files()
1715 files = match.files()
1712 if match.isexact():
1716 if match.isexact():
1713 # fast path -- filter the other way around, since typically files is
1717 # fast path -- filter the other way around, since typically files is
1714 # much smaller than dmap
1718 # much smaller than dmap
1715 return [f for f in files if f in dmap]
1719 return [f for f in files if f in dmap]
1716 if match.prefix() and all(fn in dmap for fn in files):
1720 if match.prefix() and all(fn in dmap for fn in files):
1717 # fast path -- all the values are known to be files, so just return
1721 # fast path -- all the values are known to be files, so just return
1718 # that
1722 # that
1719 return list(files)
1723 return list(files)
1720 return [f for f in dmap if match(f)]
1724 return [f for f in dmap if match(f)]
1721
1725
1722 def _actualfilename(self, tr):
1726 def _actualfilename(self, tr):
1723 if tr:
1727 if tr:
1724 return self._pendingfilename
1728 return self._pendingfilename
1725 else:
1729 else:
1726 return self._filename
1730 return self._filename
1727
1731
1728 def all_file_names(self):
1732 def all_file_names(self):
1729 """list all filename currently used by this dirstate
1733 """list all filename currently used by this dirstate
1730
1734
1731 This is only used to do `hg rollback` related backup in the transaction
1735 This is only used to do `hg rollback` related backup in the transaction
1732 """
1736 """
1733 if not self._opener.exists(self._filename):
1737 if not self._opener.exists(self._filename):
1734 # no data every written to disk yet
1738 # no data every written to disk yet
1735 return ()
1739 return ()
1736 elif self._use_dirstate_v2:
1740 elif self._use_dirstate_v2:
1737 return (
1741 return (
1738 self._filename,
1742 self._filename,
1739 self._map.docket.data_filename(),
1743 self._map.docket.data_filename(),
1740 )
1744 )
1741 else:
1745 else:
1742 return (self._filename,)
1746 return (self._filename,)
1743
1747
1744 def verify(self, m1, m2, p1, narrow_matcher=None):
1748 def verify(self, m1, m2, p1, narrow_matcher=None):
1745 """
1749 """
1746 check the dirstate contents against the parent manifest and yield errors
1750 check the dirstate contents against the parent manifest and yield errors
1747 """
1751 """
1748 missing_from_p1 = _(
1752 missing_from_p1 = _(
1749 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1753 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1750 )
1754 )
1751 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1755 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1752 missing_from_ps = _(
1756 missing_from_ps = _(
1753 b"%s marked as modified, but not in either manifest\n"
1757 b"%s marked as modified, but not in either manifest\n"
1754 )
1758 )
1755 missing_from_ds = _(
1759 missing_from_ds = _(
1756 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1760 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1757 )
1761 )
1758 for f, entry in self.items():
1762 for f, entry in self.items():
1759 if entry.p1_tracked:
1763 if entry.p1_tracked:
1760 if entry.modified and f not in m1 and f not in m2:
1764 if entry.modified and f not in m1 and f not in m2:
1761 yield missing_from_ps % f
1765 yield missing_from_ps % f
1762 elif f not in m1:
1766 elif f not in m1:
1763 yield missing_from_p1 % (f, node.short(p1))
1767 yield missing_from_p1 % (f, node.short(p1))
1764 if entry.added and f in m1:
1768 if entry.added and f in m1:
1765 yield unexpected_in_p1 % f
1769 yield unexpected_in_p1 % f
1766 for f in m1:
1770 for f in m1:
1767 if narrow_matcher is not None and not narrow_matcher(f):
1771 if narrow_matcher is not None and not narrow_matcher(f):
1768 continue
1772 continue
1769 entry = self.get_entry(f)
1773 entry = self.get_entry(f)
1770 if not entry.p1_tracked:
1774 if not entry.p1_tracked:
1771 yield missing_from_ds % (f, node.short(p1))
1775 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now