##// END OF EJS Templates
dirstate: have `running_status` warn when exiting with a dirty dirstate...
marmoute -
r51043:b583988c default
parent child Browse files
Show More
@@ -1,1773 +1,1778 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136
136
137 # used by largefile to avoid overwritting transaction callbacK
137 # used by largefile to avoid overwritting transaction callbacK
138 _tr_key_suffix = b''
138 _tr_key_suffix = b''
139
139
140 def __init__(
140 def __init__(
141 self,
141 self,
142 opener,
142 opener,
143 ui,
143 ui,
144 root,
144 root,
145 validate,
145 validate,
146 sparsematchfn,
146 sparsematchfn,
147 nodeconstants,
147 nodeconstants,
148 use_dirstate_v2,
148 use_dirstate_v2,
149 use_tracked_hint=False,
149 use_tracked_hint=False,
150 ):
150 ):
151 """Create a new dirstate object.
151 """Create a new dirstate object.
152
152
153 opener is an open()-like callable that can be used to open the
153 opener is an open()-like callable that can be used to open the
154 dirstate file; root is the root of the directory tracked by
154 dirstate file; root is the root of the directory tracked by
155 the dirstate.
155 the dirstate.
156 """
156 """
157 self._use_dirstate_v2 = use_dirstate_v2
157 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_tracked_hint = use_tracked_hint
158 self._use_tracked_hint = use_tracked_hint
159 self._nodeconstants = nodeconstants
159 self._nodeconstants = nodeconstants
160 self._opener = opener
160 self._opener = opener
161 self._validate = validate
161 self._validate = validate
162 self._root = root
162 self._root = root
163 # Either build a sparse-matcher or None if sparse is disabled
163 # Either build a sparse-matcher or None if sparse is disabled
164 self._sparsematchfn = sparsematchfn
164 self._sparsematchfn = sparsematchfn
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # UNC path pointing to root share (issue4557)
166 # UNC path pointing to root share (issue4557)
167 self._rootdir = pathutil.normasprefix(root)
167 self._rootdir = pathutil.normasprefix(root)
168 # True is any internal state may be different
168 # True is any internal state may be different
169 self._dirty = False
169 self._dirty = False
170 # True if the set of tracked file may be different
170 # True if the set of tracked file may be different
171 self._dirty_tracked_set = False
171 self._dirty_tracked_set = False
172 self._ui = ui
172 self._ui = ui
173 self._filecache = {}
173 self._filecache = {}
174 # nesting level of `changing_parents` context
174 # nesting level of `changing_parents` context
175 self._changing_level = 0
175 self._changing_level = 0
176 # the change currently underway
176 # the change currently underway
177 self._change_type = None
177 self._change_type = None
178 # number of open _running_status context
178 # number of open _running_status context
179 self._running_status = 0
179 self._running_status = 0
180 # True if the current dirstate changing operations have been
180 # True if the current dirstate changing operations have been
181 # invalidated (used to make sure all nested contexts have been exited)
181 # invalidated (used to make sure all nested contexts have been exited)
182 self._invalidated_context = False
182 self._invalidated_context = False
183 self._attached_to_a_transaction = False
183 self._attached_to_a_transaction = False
184 self._filename = b'dirstate'
184 self._filename = b'dirstate'
185 self._filename_th = b'dirstate-tracked-hint'
185 self._filename_th = b'dirstate-tracked-hint'
186 self._pendingfilename = b'%s.pending' % self._filename
186 self._pendingfilename = b'%s.pending' % self._filename
187 self._plchangecallbacks = {}
187 self._plchangecallbacks = {}
188 self._origpl = None
188 self._origpl = None
189 self._mapcls = dirstatemap.dirstatemap
189 self._mapcls = dirstatemap.dirstatemap
190 # Access and cache cwd early, so we don't access it for the first time
190 # Access and cache cwd early, so we don't access it for the first time
191 # after a working-copy update caused it to not exist (accessing it then
191 # after a working-copy update caused it to not exist (accessing it then
192 # raises an exception).
192 # raises an exception).
193 self._cwd
193 self._cwd
194
194
195 def refresh(self):
195 def refresh(self):
196 if '_branch' in vars(self):
196 if '_branch' in vars(self):
197 del self._branch
197 del self._branch
198 if '_map' in vars(self) and self._map.may_need_refresh():
198 if '_map' in vars(self) and self._map.may_need_refresh():
199 self.invalidate()
199 self.invalidate()
200
200
201 def prefetch_parents(self):
201 def prefetch_parents(self):
202 """make sure the parents are loaded
202 """make sure the parents are loaded
203
203
204 Used to avoid a race condition.
204 Used to avoid a race condition.
205 """
205 """
206 self._pl
206 self._pl
207
207
208 @contextlib.contextmanager
208 @contextlib.contextmanager
209 @check_invalidated
209 @check_invalidated
210 def running_status(self, repo):
210 def running_status(self, repo):
211 """Wrap a status operation
211 """Wrap a status operation
212
212
213 This context is not mutally exclusive with the `changing_*` context. It
213 This context is not mutally exclusive with the `changing_*` context. It
214 also do not warrant for the `wlock` to be taken.
214 also do not warrant for the `wlock` to be taken.
215
215
216 If the wlock is taken, this context will behave in a simple way, and
216 If the wlock is taken, this context will behave in a simple way, and
217 ensure the data are scheduled for write when leaving the top level
217 ensure the data are scheduled for write when leaving the top level
218 context.
218 context.
219
219
220 If the lock is not taken, it will only warrant that the data are either
220 If the lock is not taken, it will only warrant that the data are either
221 committed (written) and rolled back (invalidated) when exiting the top
221 committed (written) and rolled back (invalidated) when exiting the top
222 level context. The write/invalidate action must be performed by the
222 level context. The write/invalidate action must be performed by the
223 wrapped code.
223 wrapped code.
224
224
225
225
226 The expected logic is:
226 The expected logic is:
227
227
228 A: read the dirstate
228 A: read the dirstate
229 B: run status
229 B: run status
230 This might make the dirstate dirty by updating cache,
230 This might make the dirstate dirty by updating cache,
231 especially in Rust.
231 especially in Rust.
232 C: do more "post status fixup if relevant
232 C: do more "post status fixup if relevant
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
235 E1: elif lock was acquired β†’ write the changes
235 E1: elif lock was acquired β†’ write the changes
236 E2: else β†’ discard the changes
236 E2: else β†’ discard the changes
237 """
237 """
238 has_lock = repo.currentwlock() is not None
238 has_lock = repo.currentwlock() is not None
239 is_changing = self.is_changing_any
239 is_changing = self.is_changing_any
240 tr = repo.currenttransaction()
240 tr = repo.currenttransaction()
241 has_tr = tr is not None
241 has_tr = tr is not None
242 nested = bool(self._running_status)
242 nested = bool(self._running_status)
243
243
244 first_and_alone = not (is_changing or has_tr or nested)
244 first_and_alone = not (is_changing or has_tr or nested)
245
245
246 # enforce no change happened outside of a proper context.
246 # enforce no change happened outside of a proper context.
247 if first_and_alone and self._dirty:
247 if first_and_alone and self._dirty:
248 has_tr = repo.currenttransaction() is not None
248 has_tr = repo.currenttransaction() is not None
249 if not has_tr and self._changing_level == 0 and self._dirty:
249 if not has_tr and self._changing_level == 0 and self._dirty:
250 msg = "entering a status context, but dirstate is already dirty"
250 msg = "entering a status context, but dirstate is already dirty"
251 raise error.ProgrammingError(msg)
251 raise error.ProgrammingError(msg)
252
252
253 should_write = has_lock and not (nested or is_changing)
253 should_write = has_lock and not (nested or is_changing)
254
254
255 self._running_status += 1
255 self._running_status += 1
256 try:
256 try:
257 yield
257 yield
258 except Exception:
258 except Exception:
259 self.invalidate()
259 self.invalidate()
260 raise
260 raise
261 finally:
261 finally:
262 self._running_status -= 1
262 self._running_status -= 1
263 if self._invalidated_context:
263 if self._invalidated_context:
264 should_write = False
264 should_write = False
265 self.invalidate()
265 self.invalidate()
266
266
267 if should_write:
267 if should_write:
268 assert repo.currenttransaction() is tr
268 assert repo.currenttransaction() is tr
269 self.write(tr)
269 self.write(tr)
270 elif not has_lock:
271 if self._dirty:
272 msg = b'dirstate dirty while exiting an isolated status context'
273 repo.ui.develwarn(msg)
274 self.invalidate()
270
275
271 @contextlib.contextmanager
276 @contextlib.contextmanager
272 @check_invalidated
277 @check_invalidated
273 def _changing(self, repo, change_type):
278 def _changing(self, repo, change_type):
274 if repo.currentwlock() is None:
279 if repo.currentwlock() is None:
275 msg = b"trying to change the dirstate without holding the wlock"
280 msg = b"trying to change the dirstate without holding the wlock"
276 raise error.ProgrammingError(msg)
281 raise error.ProgrammingError(msg)
277
282
278 has_tr = repo.currenttransaction() is not None
283 has_tr = repo.currenttransaction() is not None
279 if not has_tr and self._changing_level == 0 and self._dirty:
284 if not has_tr and self._changing_level == 0 and self._dirty:
280 msg = "entering a changing context, but dirstate is already dirty"
285 msg = "entering a changing context, but dirstate is already dirty"
281 raise error.ProgrammingError(msg)
286 raise error.ProgrammingError(msg)
282
287
283 assert self._changing_level >= 0
288 assert self._changing_level >= 0
284 # different type of change are mutually exclusive
289 # different type of change are mutually exclusive
285 if self._change_type is None:
290 if self._change_type is None:
286 assert self._changing_level == 0
291 assert self._changing_level == 0
287 self._change_type = change_type
292 self._change_type = change_type
288 elif self._change_type != change_type:
293 elif self._change_type != change_type:
289 msg = (
294 msg = (
290 'trying to open "%s" dirstate-changing context while a "%s" is'
295 'trying to open "%s" dirstate-changing context while a "%s" is'
291 ' already open'
296 ' already open'
292 )
297 )
293 msg %= (change_type, self._change_type)
298 msg %= (change_type, self._change_type)
294 raise error.ProgrammingError(msg)
299 raise error.ProgrammingError(msg)
295 should_write = False
300 should_write = False
296 self._changing_level += 1
301 self._changing_level += 1
297 try:
302 try:
298 yield
303 yield
299 except: # re-raises
304 except: # re-raises
300 self.invalidate() # this will set `_invalidated_context`
305 self.invalidate() # this will set `_invalidated_context`
301 raise
306 raise
302 finally:
307 finally:
303 assert self._changing_level > 0
308 assert self._changing_level > 0
304 self._changing_level -= 1
309 self._changing_level -= 1
305 # If the dirstate is being invalidated, call invalidate again.
310 # If the dirstate is being invalidated, call invalidate again.
306 # This will throw away anything added by a upper context and
311 # This will throw away anything added by a upper context and
307 # reset the `_invalidated_context` flag when relevant
312 # reset the `_invalidated_context` flag when relevant
308 if self._changing_level <= 0:
313 if self._changing_level <= 0:
309 self._change_type = None
314 self._change_type = None
310 assert self._changing_level == 0
315 assert self._changing_level == 0
311 if self._invalidated_context:
316 if self._invalidated_context:
312 # make sure we invalidate anything an upper context might
317 # make sure we invalidate anything an upper context might
313 # have changed.
318 # have changed.
314 self.invalidate()
319 self.invalidate()
315 else:
320 else:
316 should_write = self._changing_level <= 0
321 should_write = self._changing_level <= 0
317 tr = repo.currenttransaction()
322 tr = repo.currenttransaction()
318 if has_tr != (tr is not None):
323 if has_tr != (tr is not None):
319 if has_tr:
324 if has_tr:
320 m = "transaction vanished while changing dirstate"
325 m = "transaction vanished while changing dirstate"
321 else:
326 else:
322 m = "transaction appeared while changing dirstate"
327 m = "transaction appeared while changing dirstate"
323 raise error.ProgrammingError(m)
328 raise error.ProgrammingError(m)
324 if should_write:
329 if should_write:
325 self.write(tr)
330 self.write(tr)
326
331
327 @contextlib.contextmanager
332 @contextlib.contextmanager
328 def changing_parents(self, repo):
333 def changing_parents(self, repo):
329 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
334 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
330 yield c
335 yield c
331
336
332 @contextlib.contextmanager
337 @contextlib.contextmanager
333 def changing_files(self, repo):
338 def changing_files(self, repo):
334 with self._changing(repo, CHANGE_TYPE_FILES) as c:
339 with self._changing(repo, CHANGE_TYPE_FILES) as c:
335 yield c
340 yield c
336
341
337 # here to help migration to the new code
342 # here to help migration to the new code
338 def parentchange(self):
343 def parentchange(self):
339 msg = (
344 msg = (
340 "Mercurial 6.4 and later requires call to "
345 "Mercurial 6.4 and later requires call to "
341 "`dirstate.changing_parents(repo)`"
346 "`dirstate.changing_parents(repo)`"
342 )
347 )
343 raise error.ProgrammingError(msg)
348 raise error.ProgrammingError(msg)
344
349
345 @property
350 @property
346 def is_changing_any(self):
351 def is_changing_any(self):
347 """Returns true if the dirstate is in the middle of a set of changes.
352 """Returns true if the dirstate is in the middle of a set of changes.
348
353
349 This returns True for any kind of change.
354 This returns True for any kind of change.
350 """
355 """
351 return self._changing_level > 0
356 return self._changing_level > 0
352
357
353 def pendingparentchange(self):
358 def pendingparentchange(self):
354 return self.is_changing_parent()
359 return self.is_changing_parent()
355
360
356 def is_changing_parent(self):
361 def is_changing_parent(self):
357 """Returns true if the dirstate is in the middle of a set of changes
362 """Returns true if the dirstate is in the middle of a set of changes
358 that modify the dirstate parent.
363 that modify the dirstate parent.
359 """
364 """
360 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
365 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
361 return self.is_changing_parents
366 return self.is_changing_parents
362
367
363 @property
368 @property
364 def is_changing_parents(self):
369 def is_changing_parents(self):
365 """Returns true if the dirstate is in the middle of a set of changes
370 """Returns true if the dirstate is in the middle of a set of changes
366 that modify the dirstate parent.
371 that modify the dirstate parent.
367 """
372 """
368 if self._changing_level <= 0:
373 if self._changing_level <= 0:
369 return False
374 return False
370 return self._change_type == CHANGE_TYPE_PARENTS
375 return self._change_type == CHANGE_TYPE_PARENTS
371
376
372 @property
377 @property
373 def is_changing_files(self):
378 def is_changing_files(self):
374 """Returns true if the dirstate is in the middle of a set of changes
379 """Returns true if the dirstate is in the middle of a set of changes
375 that modify the files tracked or their sources.
380 that modify the files tracked or their sources.
376 """
381 """
377 if self._changing_level <= 0:
382 if self._changing_level <= 0:
378 return False
383 return False
379 return self._change_type == CHANGE_TYPE_FILES
384 return self._change_type == CHANGE_TYPE_FILES
380
385
381 @propertycache
386 @propertycache
382 def _map(self):
387 def _map(self):
383 """Return the dirstate contents (see documentation for dirstatemap)."""
388 """Return the dirstate contents (see documentation for dirstatemap)."""
384 return self._mapcls(
389 return self._mapcls(
385 self._ui,
390 self._ui,
386 self._opener,
391 self._opener,
387 self._root,
392 self._root,
388 self._nodeconstants,
393 self._nodeconstants,
389 self._use_dirstate_v2,
394 self._use_dirstate_v2,
390 )
395 )
391
396
392 @property
397 @property
393 def _sparsematcher(self):
398 def _sparsematcher(self):
394 """The matcher for the sparse checkout.
399 """The matcher for the sparse checkout.
395
400
396 The working directory may not include every file from a manifest. The
401 The working directory may not include every file from a manifest. The
397 matcher obtained by this property will match a path if it is to be
402 matcher obtained by this property will match a path if it is to be
398 included in the working directory.
403 included in the working directory.
399
404
400 When sparse if disabled, return None.
405 When sparse if disabled, return None.
401 """
406 """
402 if self._sparsematchfn is None:
407 if self._sparsematchfn is None:
403 return None
408 return None
404 # TODO there is potential to cache this property. For now, the matcher
409 # TODO there is potential to cache this property. For now, the matcher
405 # is resolved on every access. (But the called function does use a
410 # is resolved on every access. (But the called function does use a
406 # cache to keep the lookup fast.)
411 # cache to keep the lookup fast.)
407 return self._sparsematchfn()
412 return self._sparsematchfn()
408
413
409 @repocache(b'branch')
414 @repocache(b'branch')
410 def _branch(self):
415 def _branch(self):
411 try:
416 try:
412 return self._opener.read(b"branch").strip() or b"default"
417 return self._opener.read(b"branch").strip() or b"default"
413 except FileNotFoundError:
418 except FileNotFoundError:
414 return b"default"
419 return b"default"
415
420
416 @property
421 @property
417 def _pl(self):
422 def _pl(self):
418 return self._map.parents()
423 return self._map.parents()
419
424
420 def hasdir(self, d):
425 def hasdir(self, d):
421 return self._map.hastrackeddir(d)
426 return self._map.hastrackeddir(d)
422
427
423 @rootcache(b'.hgignore')
428 @rootcache(b'.hgignore')
424 def _ignore(self):
429 def _ignore(self):
425 files = self._ignorefiles()
430 files = self._ignorefiles()
426 if not files:
431 if not files:
427 return matchmod.never()
432 return matchmod.never()
428
433
429 pats = [b'include:%s' % f for f in files]
434 pats = [b'include:%s' % f for f in files]
430 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
435 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
431
436
432 @propertycache
437 @propertycache
433 def _slash(self):
438 def _slash(self):
434 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
439 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
435
440
436 @propertycache
441 @propertycache
437 def _checklink(self):
442 def _checklink(self):
438 return util.checklink(self._root)
443 return util.checklink(self._root)
439
444
440 @propertycache
445 @propertycache
441 def _checkexec(self):
446 def _checkexec(self):
442 return bool(util.checkexec(self._root))
447 return bool(util.checkexec(self._root))
443
448
444 @propertycache
449 @propertycache
445 def _checkcase(self):
450 def _checkcase(self):
446 return not util.fscasesensitive(self._join(b'.hg'))
451 return not util.fscasesensitive(self._join(b'.hg'))
447
452
448 def _join(self, f):
453 def _join(self, f):
449 # much faster than os.path.join()
454 # much faster than os.path.join()
450 # it's safe because f is always a relative path
455 # it's safe because f is always a relative path
451 return self._rootdir + f
456 return self._rootdir + f
452
457
453 def flagfunc(self, buildfallback):
458 def flagfunc(self, buildfallback):
454 """build a callable that returns flags associated with a filename
459 """build a callable that returns flags associated with a filename
455
460
456 The information is extracted from three possible layers:
461 The information is extracted from three possible layers:
457 1. the file system if it supports the information
462 1. the file system if it supports the information
458 2. the "fallback" information stored in the dirstate if any
463 2. the "fallback" information stored in the dirstate if any
459 3. a more expensive mechanism inferring the flags from the parents.
464 3. a more expensive mechanism inferring the flags from the parents.
460 """
465 """
461
466
462 # small hack to cache the result of buildfallback()
467 # small hack to cache the result of buildfallback()
463 fallback_func = []
468 fallback_func = []
464
469
465 def get_flags(x):
470 def get_flags(x):
466 entry = None
471 entry = None
467 fallback_value = None
472 fallback_value = None
468 try:
473 try:
469 st = os.lstat(self._join(x))
474 st = os.lstat(self._join(x))
470 except OSError:
475 except OSError:
471 return b''
476 return b''
472
477
473 if self._checklink:
478 if self._checklink:
474 if util.statislink(st):
479 if util.statislink(st):
475 return b'l'
480 return b'l'
476 else:
481 else:
477 entry = self.get_entry(x)
482 entry = self.get_entry(x)
478 if entry.has_fallback_symlink:
483 if entry.has_fallback_symlink:
479 if entry.fallback_symlink:
484 if entry.fallback_symlink:
480 return b'l'
485 return b'l'
481 else:
486 else:
482 if not fallback_func:
487 if not fallback_func:
483 fallback_func.append(buildfallback())
488 fallback_func.append(buildfallback())
484 fallback_value = fallback_func[0](x)
489 fallback_value = fallback_func[0](x)
485 if b'l' in fallback_value:
490 if b'l' in fallback_value:
486 return b'l'
491 return b'l'
487
492
488 if self._checkexec:
493 if self._checkexec:
489 if util.statisexec(st):
494 if util.statisexec(st):
490 return b'x'
495 return b'x'
491 else:
496 else:
492 if entry is None:
497 if entry is None:
493 entry = self.get_entry(x)
498 entry = self.get_entry(x)
494 if entry.has_fallback_exec:
499 if entry.has_fallback_exec:
495 if entry.fallback_exec:
500 if entry.fallback_exec:
496 return b'x'
501 return b'x'
497 else:
502 else:
498 if fallback_value is None:
503 if fallback_value is None:
499 if not fallback_func:
504 if not fallback_func:
500 fallback_func.append(buildfallback())
505 fallback_func.append(buildfallback())
501 fallback_value = fallback_func[0](x)
506 fallback_value = fallback_func[0](x)
502 if b'x' in fallback_value:
507 if b'x' in fallback_value:
503 return b'x'
508 return b'x'
504 return b''
509 return b''
505
510
506 return get_flags
511 return get_flags
507
512
508 @propertycache
513 @propertycache
509 def _cwd(self):
514 def _cwd(self):
510 # internal config: ui.forcecwd
515 # internal config: ui.forcecwd
511 forcecwd = self._ui.config(b'ui', b'forcecwd')
516 forcecwd = self._ui.config(b'ui', b'forcecwd')
512 if forcecwd:
517 if forcecwd:
513 return forcecwd
518 return forcecwd
514 return encoding.getcwd()
519 return encoding.getcwd()
515
520
516 def getcwd(self):
521 def getcwd(self):
517 """Return the path from which a canonical path is calculated.
522 """Return the path from which a canonical path is calculated.
518
523
519 This path should be used to resolve file patterns or to convert
524 This path should be used to resolve file patterns or to convert
520 canonical paths back to file paths for display. It shouldn't be
525 canonical paths back to file paths for display. It shouldn't be
521 used to get real file paths. Use vfs functions instead.
526 used to get real file paths. Use vfs functions instead.
522 """
527 """
523 cwd = self._cwd
528 cwd = self._cwd
524 if cwd == self._root:
529 if cwd == self._root:
525 return b''
530 return b''
526 # self._root ends with a path separator if self._root is '/' or 'C:\'
531 # self._root ends with a path separator if self._root is '/' or 'C:\'
527 rootsep = self._root
532 rootsep = self._root
528 if not util.endswithsep(rootsep):
533 if not util.endswithsep(rootsep):
529 rootsep += pycompat.ossep
534 rootsep += pycompat.ossep
530 if cwd.startswith(rootsep):
535 if cwd.startswith(rootsep):
531 return cwd[len(rootsep) :]
536 return cwd[len(rootsep) :]
532 else:
537 else:
533 # we're outside the repo. return an absolute path.
538 # we're outside the repo. return an absolute path.
534 return cwd
539 return cwd
535
540
536 def pathto(self, f, cwd=None):
541 def pathto(self, f, cwd=None):
537 if cwd is None:
542 if cwd is None:
538 cwd = self.getcwd()
543 cwd = self.getcwd()
539 path = util.pathto(self._root, cwd, f)
544 path = util.pathto(self._root, cwd, f)
540 if self._slash:
545 if self._slash:
541 return util.pconvert(path)
546 return util.pconvert(path)
542 return path
547 return path
543
548
544 def get_entry(self, path):
549 def get_entry(self, path):
545 """return a DirstateItem for the associated path"""
550 """return a DirstateItem for the associated path"""
546 entry = self._map.get(path)
551 entry = self._map.get(path)
547 if entry is None:
552 if entry is None:
548 return DirstateItem()
553 return DirstateItem()
549 return entry
554 return entry
550
555
551 def __contains__(self, key):
556 def __contains__(self, key):
552 return key in self._map
557 return key in self._map
553
558
554 def __iter__(self):
559 def __iter__(self):
555 return iter(sorted(self._map))
560 return iter(sorted(self._map))
556
561
557 def items(self):
562 def items(self):
558 return self._map.items()
563 return self._map.items()
559
564
560 iteritems = items
565 iteritems = items
561
566
562 def parents(self):
567 def parents(self):
563 return [self._validate(p) for p in self._pl]
568 return [self._validate(p) for p in self._pl]
564
569
565 def p1(self):
570 def p1(self):
566 return self._validate(self._pl[0])
571 return self._validate(self._pl[0])
567
572
568 def p2(self):
573 def p2(self):
569 return self._validate(self._pl[1])
574 return self._validate(self._pl[1])
570
575
571 @property
576 @property
572 def in_merge(self):
577 def in_merge(self):
573 """True if a merge is in progress"""
578 """True if a merge is in progress"""
574 return self._pl[1] != self._nodeconstants.nullid
579 return self._pl[1] != self._nodeconstants.nullid
575
580
576 def branch(self):
581 def branch(self):
577 return encoding.tolocal(self._branch)
582 return encoding.tolocal(self._branch)
578
583
579 @requires_changing_parents
584 @requires_changing_parents
580 def setparents(self, p1, p2=None):
585 def setparents(self, p1, p2=None):
581 """Set dirstate parents to p1 and p2.
586 """Set dirstate parents to p1 and p2.
582
587
583 When moving from two parents to one, "merged" entries a
588 When moving from two parents to one, "merged" entries a
584 adjusted to normal and previous copy records discarded and
589 adjusted to normal and previous copy records discarded and
585 returned by the call.
590 returned by the call.
586
591
587 See localrepo.setparents()
592 See localrepo.setparents()
588 """
593 """
589 if p2 is None:
594 if p2 is None:
590 p2 = self._nodeconstants.nullid
595 p2 = self._nodeconstants.nullid
591 if self._changing_level == 0:
596 if self._changing_level == 0:
592 raise ValueError(
597 raise ValueError(
593 b"cannot set dirstate parent outside of "
598 b"cannot set dirstate parent outside of "
594 b"dirstate.changing_parents context manager"
599 b"dirstate.changing_parents context manager"
595 )
600 )
596
601
597 self._dirty = True
602 self._dirty = True
598 oldp2 = self._pl[1]
603 oldp2 = self._pl[1]
599 if self._origpl is None:
604 if self._origpl is None:
600 self._origpl = self._pl
605 self._origpl = self._pl
601 nullid = self._nodeconstants.nullid
606 nullid = self._nodeconstants.nullid
602 # True if we need to fold p2 related state back to a linear case
607 # True if we need to fold p2 related state back to a linear case
603 fold_p2 = oldp2 != nullid and p2 == nullid
608 fold_p2 = oldp2 != nullid and p2 == nullid
604 return self._map.setparents(p1, p2, fold_p2=fold_p2)
609 return self._map.setparents(p1, p2, fold_p2=fold_p2)
605
610
606 def setbranch(self, branch):
611 def setbranch(self, branch):
607 self.__class__._branch.set(self, encoding.fromlocal(branch))
612 self.__class__._branch.set(self, encoding.fromlocal(branch))
608 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
613 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
609 try:
614 try:
610 f.write(self._branch + b'\n')
615 f.write(self._branch + b'\n')
611 f.close()
616 f.close()
612
617
613 # make sure filecache has the correct stat info for _branch after
618 # make sure filecache has the correct stat info for _branch after
614 # replacing the underlying file
619 # replacing the underlying file
615 ce = self._filecache[b'_branch']
620 ce = self._filecache[b'_branch']
616 if ce:
621 if ce:
617 ce.refresh()
622 ce.refresh()
618 except: # re-raises
623 except: # re-raises
619 f.discard()
624 f.discard()
620 raise
625 raise
621
626
622 def invalidate(self):
627 def invalidate(self):
623 """Causes the next access to reread the dirstate.
628 """Causes the next access to reread the dirstate.
624
629
625 This is different from localrepo.invalidatedirstate() because it always
630 This is different from localrepo.invalidatedirstate() because it always
626 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
631 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
627 check whether the dirstate has changed before rereading it."""
632 check whether the dirstate has changed before rereading it."""
628
633
629 for a in ("_map", "_branch", "_ignore"):
634 for a in ("_map", "_branch", "_ignore"):
630 if a in self.__dict__:
635 if a in self.__dict__:
631 delattr(self, a)
636 delattr(self, a)
632 self._dirty = False
637 self._dirty = False
633 self._dirty_tracked_set = False
638 self._dirty_tracked_set = False
634 self._invalidated_context = bool(
639 self._invalidated_context = bool(
635 self._changing_level > 0
640 self._changing_level > 0
636 or self._attached_to_a_transaction
641 or self._attached_to_a_transaction
637 or self._running_status
642 or self._running_status
638 )
643 )
639 self._origpl = None
644 self._origpl = None
640
645
641 @requires_changing_any
646 @requires_changing_any
642 def copy(self, source, dest):
647 def copy(self, source, dest):
643 """Mark dest as a copy of source. Unmark dest if source is None."""
648 """Mark dest as a copy of source. Unmark dest if source is None."""
644 if source == dest:
649 if source == dest:
645 return
650 return
646 self._dirty = True
651 self._dirty = True
647 if source is not None:
652 if source is not None:
648 self._check_sparse(source)
653 self._check_sparse(source)
649 self._map.copymap[dest] = source
654 self._map.copymap[dest] = source
650 else:
655 else:
651 self._map.copymap.pop(dest, None)
656 self._map.copymap.pop(dest, None)
652
657
653 def copied(self, file):
658 def copied(self, file):
654 return self._map.copymap.get(file, None)
659 return self._map.copymap.get(file, None)
655
660
656 def copies(self):
661 def copies(self):
657 return self._map.copymap
662 return self._map.copymap
658
663
659 @requires_changing_files
664 @requires_changing_files
660 def set_tracked(self, filename, reset_copy=False):
665 def set_tracked(self, filename, reset_copy=False):
661 """a "public" method for generic code to mark a file as tracked
666 """a "public" method for generic code to mark a file as tracked
662
667
663 This function is to be called outside of "update/merge" case. For
668 This function is to be called outside of "update/merge" case. For
664 example by a command like `hg add X`.
669 example by a command like `hg add X`.
665
670
666 if reset_copy is set, any existing copy information will be dropped.
671 if reset_copy is set, any existing copy information will be dropped.
667
672
668 return True the file was previously untracked, False otherwise.
673 return True the file was previously untracked, False otherwise.
669 """
674 """
670 self._dirty = True
675 self._dirty = True
671 entry = self._map.get(filename)
676 entry = self._map.get(filename)
672 if entry is None or not entry.tracked:
677 if entry is None or not entry.tracked:
673 self._check_new_tracked_filename(filename)
678 self._check_new_tracked_filename(filename)
674 pre_tracked = self._map.set_tracked(filename)
679 pre_tracked = self._map.set_tracked(filename)
675 if reset_copy:
680 if reset_copy:
676 self._map.copymap.pop(filename, None)
681 self._map.copymap.pop(filename, None)
677 if pre_tracked:
682 if pre_tracked:
678 self._dirty_tracked_set = True
683 self._dirty_tracked_set = True
679 return pre_tracked
684 return pre_tracked
680
685
681 @requires_changing_files
686 @requires_changing_files
682 def set_untracked(self, filename):
687 def set_untracked(self, filename):
683 """a "public" method for generic code to mark a file as untracked
688 """a "public" method for generic code to mark a file as untracked
684
689
685 This function is to be called outside of "update/merge" case. For
690 This function is to be called outside of "update/merge" case. For
686 example by a command like `hg remove X`.
691 example by a command like `hg remove X`.
687
692
688 return True the file was previously tracked, False otherwise.
693 return True the file was previously tracked, False otherwise.
689 """
694 """
690 ret = self._map.set_untracked(filename)
695 ret = self._map.set_untracked(filename)
691 if ret:
696 if ret:
692 self._dirty = True
697 self._dirty = True
693 self._dirty_tracked_set = True
698 self._dirty_tracked_set = True
694 return ret
699 return ret
695
700
696 @requires_not_changing_parents
701 @requires_not_changing_parents
697 def set_clean(self, filename, parentfiledata):
702 def set_clean(self, filename, parentfiledata):
698 """record that the current state of the file on disk is known to be clean"""
703 """record that the current state of the file on disk is known to be clean"""
699 self._dirty = True
704 self._dirty = True
700 if not self._map[filename].tracked:
705 if not self._map[filename].tracked:
701 self._check_new_tracked_filename(filename)
706 self._check_new_tracked_filename(filename)
702 (mode, size, mtime) = parentfiledata
707 (mode, size, mtime) = parentfiledata
703 self._map.set_clean(filename, mode, size, mtime)
708 self._map.set_clean(filename, mode, size, mtime)
704
709
705 @requires_not_changing_parents
710 @requires_not_changing_parents
706 def set_possibly_dirty(self, filename):
711 def set_possibly_dirty(self, filename):
707 """record that the current state of the file on disk is unknown"""
712 """record that the current state of the file on disk is unknown"""
708 self._dirty = True
713 self._dirty = True
709 self._map.set_possibly_dirty(filename)
714 self._map.set_possibly_dirty(filename)
710
715
711 @requires_changing_parents
716 @requires_changing_parents
712 def update_file_p1(
717 def update_file_p1(
713 self,
718 self,
714 filename,
719 filename,
715 p1_tracked,
720 p1_tracked,
716 ):
721 ):
717 """Set a file as tracked in the parent (or not)
722 """Set a file as tracked in the parent (or not)
718
723
719 This is to be called when adjust the dirstate to a new parent after an history
724 This is to be called when adjust the dirstate to a new parent after an history
720 rewriting operation.
725 rewriting operation.
721
726
722 It should not be called during a merge (p2 != nullid) and only within
727 It should not be called during a merge (p2 != nullid) and only within
723 a `with dirstate.changing_parents(repo):` context.
728 a `with dirstate.changing_parents(repo):` context.
724 """
729 """
725 if self.in_merge:
730 if self.in_merge:
726 msg = b'update_file_reference should not be called when merging'
731 msg = b'update_file_reference should not be called when merging'
727 raise error.ProgrammingError(msg)
732 raise error.ProgrammingError(msg)
728 entry = self._map.get(filename)
733 entry = self._map.get(filename)
729 if entry is None:
734 if entry is None:
730 wc_tracked = False
735 wc_tracked = False
731 else:
736 else:
732 wc_tracked = entry.tracked
737 wc_tracked = entry.tracked
733 if not (p1_tracked or wc_tracked):
738 if not (p1_tracked or wc_tracked):
734 # the file is no longer relevant to anyone
739 # the file is no longer relevant to anyone
735 if self._map.get(filename) is not None:
740 if self._map.get(filename) is not None:
736 self._map.reset_state(filename)
741 self._map.reset_state(filename)
737 self._dirty = True
742 self._dirty = True
738 elif (not p1_tracked) and wc_tracked:
743 elif (not p1_tracked) and wc_tracked:
739 if entry is not None and entry.added:
744 if entry is not None and entry.added:
740 return # avoid dropping copy information (maybe?)
745 return # avoid dropping copy information (maybe?)
741
746
742 self._map.reset_state(
747 self._map.reset_state(
743 filename,
748 filename,
744 wc_tracked,
749 wc_tracked,
745 p1_tracked,
750 p1_tracked,
746 # the underlying reference might have changed, we will have to
751 # the underlying reference might have changed, we will have to
747 # check it.
752 # check it.
748 has_meaningful_mtime=False,
753 has_meaningful_mtime=False,
749 )
754 )
750
755
751 @requires_changing_parents
756 @requires_changing_parents
752 def update_file(
757 def update_file(
753 self,
758 self,
754 filename,
759 filename,
755 wc_tracked,
760 wc_tracked,
756 p1_tracked,
761 p1_tracked,
757 p2_info=False,
762 p2_info=False,
758 possibly_dirty=False,
763 possibly_dirty=False,
759 parentfiledata=None,
764 parentfiledata=None,
760 ):
765 ):
761 """update the information about a file in the dirstate
766 """update the information about a file in the dirstate
762
767
763 This is to be called when the direstates parent changes to keep track
768 This is to be called when the direstates parent changes to keep track
764 of what is the file situation in regards to the working copy and its parent.
769 of what is the file situation in regards to the working copy and its parent.
765
770
766 This function must be called within a `dirstate.changing_parents` context.
771 This function must be called within a `dirstate.changing_parents` context.
767
772
768 note: the API is at an early stage and we might need to adjust it
773 note: the API is at an early stage and we might need to adjust it
769 depending of what information ends up being relevant and useful to
774 depending of what information ends up being relevant and useful to
770 other processing.
775 other processing.
771 """
776 """
772 self._update_file(
777 self._update_file(
773 filename=filename,
778 filename=filename,
774 wc_tracked=wc_tracked,
779 wc_tracked=wc_tracked,
775 p1_tracked=p1_tracked,
780 p1_tracked=p1_tracked,
776 p2_info=p2_info,
781 p2_info=p2_info,
777 possibly_dirty=possibly_dirty,
782 possibly_dirty=possibly_dirty,
778 parentfiledata=parentfiledata,
783 parentfiledata=parentfiledata,
779 )
784 )
780
785
781 # XXX since this make the dirstate dirty, we should enforce that it is done
786 # XXX since this make the dirstate dirty, we should enforce that it is done
782 # withing an appropriate change-context that scope the change and ensure it
787 # withing an appropriate change-context that scope the change and ensure it
783 # eventually get written on disk (or rolled back)
788 # eventually get written on disk (or rolled back)
784 def hacky_extension_update_file(self, *args, **kwargs):
789 def hacky_extension_update_file(self, *args, **kwargs):
785 """NEVER USE THIS, YOU DO NOT NEED IT
790 """NEVER USE THIS, YOU DO NOT NEED IT
786
791
787 This function is a variant of "update_file" to be called by a small set
792 This function is a variant of "update_file" to be called by a small set
788 of extensions, it also adjust the internal state of file, but can be
793 of extensions, it also adjust the internal state of file, but can be
789 called outside an `changing_parents` context.
794 called outside an `changing_parents` context.
790
795
791 A very small number of extension meddle with the working copy content
796 A very small number of extension meddle with the working copy content
792 in a way that requires to adjust the dirstate accordingly. At the time
797 in a way that requires to adjust the dirstate accordingly. At the time
793 this command is written they are :
798 this command is written they are :
794 - keyword,
799 - keyword,
795 - largefile,
800 - largefile,
796 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
801 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
797
802
798 This function could probably be replaced by more semantic one (like
803 This function could probably be replaced by more semantic one (like
799 "adjust expected size" or "always revalidate file content", etc)
804 "adjust expected size" or "always revalidate file content", etc)
800 however at the time where this is writen, this is too much of a detour
805 however at the time where this is writen, this is too much of a detour
801 to be considered.
806 to be considered.
802 """
807 """
803 self._update_file(
808 self._update_file(
804 *args,
809 *args,
805 **kwargs,
810 **kwargs,
806 )
811 )
807
812
808 def _update_file(
813 def _update_file(
809 self,
814 self,
810 filename,
815 filename,
811 wc_tracked,
816 wc_tracked,
812 p1_tracked,
817 p1_tracked,
813 p2_info=False,
818 p2_info=False,
814 possibly_dirty=False,
819 possibly_dirty=False,
815 parentfiledata=None,
820 parentfiledata=None,
816 ):
821 ):
817
822
818 # note: I do not think we need to double check name clash here since we
823 # note: I do not think we need to double check name clash here since we
819 # are in a update/merge case that should already have taken care of
824 # are in a update/merge case that should already have taken care of
820 # this. The test agrees
825 # this. The test agrees
821
826
822 self._dirty = True
827 self._dirty = True
823 old_entry = self._map.get(filename)
828 old_entry = self._map.get(filename)
824 if old_entry is None:
829 if old_entry is None:
825 prev_tracked = False
830 prev_tracked = False
826 else:
831 else:
827 prev_tracked = old_entry.tracked
832 prev_tracked = old_entry.tracked
828 if prev_tracked != wc_tracked:
833 if prev_tracked != wc_tracked:
829 self._dirty_tracked_set = True
834 self._dirty_tracked_set = True
830
835
831 self._map.reset_state(
836 self._map.reset_state(
832 filename,
837 filename,
833 wc_tracked,
838 wc_tracked,
834 p1_tracked,
839 p1_tracked,
835 p2_info=p2_info,
840 p2_info=p2_info,
836 has_meaningful_mtime=not possibly_dirty,
841 has_meaningful_mtime=not possibly_dirty,
837 parentfiledata=parentfiledata,
842 parentfiledata=parentfiledata,
838 )
843 )
839
844
840 def _check_new_tracked_filename(self, filename):
845 def _check_new_tracked_filename(self, filename):
841 scmutil.checkfilename(filename)
846 scmutil.checkfilename(filename)
842 if self._map.hastrackeddir(filename):
847 if self._map.hastrackeddir(filename):
843 msg = _(b'directory %r already in dirstate')
848 msg = _(b'directory %r already in dirstate')
844 msg %= pycompat.bytestr(filename)
849 msg %= pycompat.bytestr(filename)
845 raise error.Abort(msg)
850 raise error.Abort(msg)
846 # shadows
851 # shadows
847 for d in pathutil.finddirs(filename):
852 for d in pathutil.finddirs(filename):
848 if self._map.hastrackeddir(d):
853 if self._map.hastrackeddir(d):
849 break
854 break
850 entry = self._map.get(d)
855 entry = self._map.get(d)
851 if entry is not None and not entry.removed:
856 if entry is not None and not entry.removed:
852 msg = _(b'file %r in dirstate clashes with %r')
857 msg = _(b'file %r in dirstate clashes with %r')
853 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
858 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
854 raise error.Abort(msg)
859 raise error.Abort(msg)
855 self._check_sparse(filename)
860 self._check_sparse(filename)
856
861
857 def _check_sparse(self, filename):
862 def _check_sparse(self, filename):
858 """Check that a filename is inside the sparse profile"""
863 """Check that a filename is inside the sparse profile"""
859 sparsematch = self._sparsematcher
864 sparsematch = self._sparsematcher
860 if sparsematch is not None and not sparsematch.always():
865 if sparsematch is not None and not sparsematch.always():
861 if not sparsematch(filename):
866 if not sparsematch(filename):
862 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
867 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
863 hint = _(
868 hint = _(
864 b'include file with `hg debugsparse --include <pattern>` or use '
869 b'include file with `hg debugsparse --include <pattern>` or use '
865 b'`hg add -s <file>` to include file directory while adding'
870 b'`hg add -s <file>` to include file directory while adding'
866 )
871 )
867 raise error.Abort(msg % filename, hint=hint)
872 raise error.Abort(msg % filename, hint=hint)
868
873
869 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
874 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
870 if exists is None:
875 if exists is None:
871 exists = os.path.lexists(os.path.join(self._root, path))
876 exists = os.path.lexists(os.path.join(self._root, path))
872 if not exists:
877 if not exists:
873 # Maybe a path component exists
878 # Maybe a path component exists
874 if not ignoremissing and b'/' in path:
879 if not ignoremissing and b'/' in path:
875 d, f = path.rsplit(b'/', 1)
880 d, f = path.rsplit(b'/', 1)
876 d = self._normalize(d, False, ignoremissing, None)
881 d = self._normalize(d, False, ignoremissing, None)
877 folded = d + b"/" + f
882 folded = d + b"/" + f
878 else:
883 else:
879 # No path components, preserve original case
884 # No path components, preserve original case
880 folded = path
885 folded = path
881 else:
886 else:
882 # recursively normalize leading directory components
887 # recursively normalize leading directory components
883 # against dirstate
888 # against dirstate
884 if b'/' in normed:
889 if b'/' in normed:
885 d, f = normed.rsplit(b'/', 1)
890 d, f = normed.rsplit(b'/', 1)
886 d = self._normalize(d, False, ignoremissing, True)
891 d = self._normalize(d, False, ignoremissing, True)
887 r = self._root + b"/" + d
892 r = self._root + b"/" + d
888 folded = d + b"/" + util.fspath(f, r)
893 folded = d + b"/" + util.fspath(f, r)
889 else:
894 else:
890 folded = util.fspath(normed, self._root)
895 folded = util.fspath(normed, self._root)
891 storemap[normed] = folded
896 storemap[normed] = folded
892
897
893 return folded
898 return folded
894
899
895 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
900 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
896 normed = util.normcase(path)
901 normed = util.normcase(path)
897 folded = self._map.filefoldmap.get(normed, None)
902 folded = self._map.filefoldmap.get(normed, None)
898 if folded is None:
903 if folded is None:
899 if isknown:
904 if isknown:
900 folded = path
905 folded = path
901 else:
906 else:
902 folded = self._discoverpath(
907 folded = self._discoverpath(
903 path, normed, ignoremissing, exists, self._map.filefoldmap
908 path, normed, ignoremissing, exists, self._map.filefoldmap
904 )
909 )
905 return folded
910 return folded
906
911
907 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
912 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
908 normed = util.normcase(path)
913 normed = util.normcase(path)
909 folded = self._map.filefoldmap.get(normed, None)
914 folded = self._map.filefoldmap.get(normed, None)
910 if folded is None:
915 if folded is None:
911 folded = self._map.dirfoldmap.get(normed, None)
916 folded = self._map.dirfoldmap.get(normed, None)
912 if folded is None:
917 if folded is None:
913 if isknown:
918 if isknown:
914 folded = path
919 folded = path
915 else:
920 else:
916 # store discovered result in dirfoldmap so that future
921 # store discovered result in dirfoldmap so that future
917 # normalizefile calls don't start matching directories
922 # normalizefile calls don't start matching directories
918 folded = self._discoverpath(
923 folded = self._discoverpath(
919 path, normed, ignoremissing, exists, self._map.dirfoldmap
924 path, normed, ignoremissing, exists, self._map.dirfoldmap
920 )
925 )
921 return folded
926 return folded
922
927
923 def normalize(self, path, isknown=False, ignoremissing=False):
928 def normalize(self, path, isknown=False, ignoremissing=False):
924 """
929 """
925 normalize the case of a pathname when on a casefolding filesystem
930 normalize the case of a pathname when on a casefolding filesystem
926
931
927 isknown specifies whether the filename came from walking the
932 isknown specifies whether the filename came from walking the
928 disk, to avoid extra filesystem access.
933 disk, to avoid extra filesystem access.
929
934
930 If ignoremissing is True, missing path are returned
935 If ignoremissing is True, missing path are returned
931 unchanged. Otherwise, we try harder to normalize possibly
936 unchanged. Otherwise, we try harder to normalize possibly
932 existing path components.
937 existing path components.
933
938
934 The normalized case is determined based on the following precedence:
939 The normalized case is determined based on the following precedence:
935
940
936 - version of name already stored in the dirstate
941 - version of name already stored in the dirstate
937 - version of name stored on disk
942 - version of name stored on disk
938 - version provided via command arguments
943 - version provided via command arguments
939 """
944 """
940
945
941 if self._checkcase:
946 if self._checkcase:
942 return self._normalize(path, isknown, ignoremissing)
947 return self._normalize(path, isknown, ignoremissing)
943 return path
948 return path
944
949
945 # XXX this method is barely used, as a result:
950 # XXX this method is barely used, as a result:
946 # - its semantic is unclear
951 # - its semantic is unclear
947 # - do we really needs it ?
952 # - do we really needs it ?
948 @requires_changing_parents
953 @requires_changing_parents
949 def clear(self):
954 def clear(self):
950 self._map.clear()
955 self._map.clear()
951 self._dirty = True
956 self._dirty = True
952
957
953 @requires_changing_parents
958 @requires_changing_parents
954 def rebuild(self, parent, allfiles, changedfiles=None):
959 def rebuild(self, parent, allfiles, changedfiles=None):
955 matcher = self._sparsematcher
960 matcher = self._sparsematcher
956 if matcher is not None and not matcher.always():
961 if matcher is not None and not matcher.always():
957 # should not add non-matching files
962 # should not add non-matching files
958 allfiles = [f for f in allfiles if matcher(f)]
963 allfiles = [f for f in allfiles if matcher(f)]
959 if changedfiles:
964 if changedfiles:
960 changedfiles = [f for f in changedfiles if matcher(f)]
965 changedfiles = [f for f in changedfiles if matcher(f)]
961
966
962 if changedfiles is not None:
967 if changedfiles is not None:
963 # these files will be deleted from the dirstate when they are
968 # these files will be deleted from the dirstate when they are
964 # not found to be in allfiles
969 # not found to be in allfiles
965 dirstatefilestoremove = {f for f in self if not matcher(f)}
970 dirstatefilestoremove = {f for f in self if not matcher(f)}
966 changedfiles = dirstatefilestoremove.union(changedfiles)
971 changedfiles = dirstatefilestoremove.union(changedfiles)
967
972
968 if changedfiles is None:
973 if changedfiles is None:
969 # Rebuild entire dirstate
974 # Rebuild entire dirstate
970 to_lookup = allfiles
975 to_lookup = allfiles
971 to_drop = []
976 to_drop = []
972 self.clear()
977 self.clear()
973 elif len(changedfiles) < 10:
978 elif len(changedfiles) < 10:
974 # Avoid turning allfiles into a set, which can be expensive if it's
979 # Avoid turning allfiles into a set, which can be expensive if it's
975 # large.
980 # large.
976 to_lookup = []
981 to_lookup = []
977 to_drop = []
982 to_drop = []
978 for f in changedfiles:
983 for f in changedfiles:
979 if f in allfiles:
984 if f in allfiles:
980 to_lookup.append(f)
985 to_lookup.append(f)
981 else:
986 else:
982 to_drop.append(f)
987 to_drop.append(f)
983 else:
988 else:
984 changedfilesset = set(changedfiles)
989 changedfilesset = set(changedfiles)
985 to_lookup = changedfilesset & set(allfiles)
990 to_lookup = changedfilesset & set(allfiles)
986 to_drop = changedfilesset - to_lookup
991 to_drop = changedfilesset - to_lookup
987
992
988 if self._origpl is None:
993 if self._origpl is None:
989 self._origpl = self._pl
994 self._origpl = self._pl
990 self._map.setparents(parent, self._nodeconstants.nullid)
995 self._map.setparents(parent, self._nodeconstants.nullid)
991
996
992 for f in to_lookup:
997 for f in to_lookup:
993 if self.in_merge:
998 if self.in_merge:
994 self.set_tracked(f)
999 self.set_tracked(f)
995 else:
1000 else:
996 self._map.reset_state(
1001 self._map.reset_state(
997 f,
1002 f,
998 wc_tracked=True,
1003 wc_tracked=True,
999 p1_tracked=True,
1004 p1_tracked=True,
1000 )
1005 )
1001 for f in to_drop:
1006 for f in to_drop:
1002 self._map.reset_state(f)
1007 self._map.reset_state(f)
1003
1008
1004 self._dirty = True
1009 self._dirty = True
1005
1010
1006 def identity(self):
1011 def identity(self):
1007 """Return identity of dirstate itself to detect changing in storage
1012 """Return identity of dirstate itself to detect changing in storage
1008
1013
1009 If identity of previous dirstate is equal to this, writing
1014 If identity of previous dirstate is equal to this, writing
1010 changes based on the former dirstate out can keep consistency.
1015 changes based on the former dirstate out can keep consistency.
1011 """
1016 """
1012 return self._map.identity
1017 return self._map.identity
1013
1018
1014 def write(self, tr):
1019 def write(self, tr):
1015 if not self._dirty:
1020 if not self._dirty:
1016 return
1021 return
1017 # make sure we don't request a write of invalidated content
1022 # make sure we don't request a write of invalidated content
1018 # XXX move before the dirty check once `unlock` stop calling `write`
1023 # XXX move before the dirty check once `unlock` stop calling `write`
1019 assert not self._invalidated_context
1024 assert not self._invalidated_context
1020
1025
1021 write_key = self._use_tracked_hint and self._dirty_tracked_set
1026 write_key = self._use_tracked_hint and self._dirty_tracked_set
1022 if tr:
1027 if tr:
1023
1028
1024 def on_abort(tr):
1029 def on_abort(tr):
1025 self._attached_to_a_transaction = False
1030 self._attached_to_a_transaction = False
1026 self.invalidate()
1031 self.invalidate()
1027
1032
1028 # make sure we invalidate the current change on abort
1033 # make sure we invalidate the current change on abort
1029 if tr is not None:
1034 if tr is not None:
1030 tr.addabort(
1035 tr.addabort(
1031 b'dirstate-invalidate%s' % self._tr_key_suffix,
1036 b'dirstate-invalidate%s' % self._tr_key_suffix,
1032 on_abort,
1037 on_abort,
1033 )
1038 )
1034
1039
1035 self._attached_to_a_transaction = True
1040 self._attached_to_a_transaction = True
1036
1041
1037 def on_success(f):
1042 def on_success(f):
1038 self._attached_to_a_transaction = False
1043 self._attached_to_a_transaction = False
1039 self._writedirstate(tr, f),
1044 self._writedirstate(tr, f),
1040
1045
1041 # delay writing in-memory changes out
1046 # delay writing in-memory changes out
1042 tr.addfilegenerator(
1047 tr.addfilegenerator(
1043 b'dirstate-1-main%s' % self._tr_key_suffix,
1048 b'dirstate-1-main%s' % self._tr_key_suffix,
1044 (self._filename,),
1049 (self._filename,),
1045 on_success,
1050 on_success,
1046 location=b'plain',
1051 location=b'plain',
1047 post_finalize=True,
1052 post_finalize=True,
1048 )
1053 )
1049 if write_key:
1054 if write_key:
1050 tr.addfilegenerator(
1055 tr.addfilegenerator(
1051 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1056 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1052 (self._filename_th,),
1057 (self._filename_th,),
1053 lambda f: self._write_tracked_hint(tr, f),
1058 lambda f: self._write_tracked_hint(tr, f),
1054 location=b'plain',
1059 location=b'plain',
1055 post_finalize=True,
1060 post_finalize=True,
1056 )
1061 )
1057 return
1062 return
1058
1063
1059 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1064 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1060 with file(self._filename) as f:
1065 with file(self._filename) as f:
1061 self._writedirstate(tr, f)
1066 self._writedirstate(tr, f)
1062 if write_key:
1067 if write_key:
1063 # we update the key-file after writing to make sure reader have a
1068 # we update the key-file after writing to make sure reader have a
1064 # key that match the newly written content
1069 # key that match the newly written content
1065 with file(self._filename_th) as f:
1070 with file(self._filename_th) as f:
1066 self._write_tracked_hint(tr, f)
1071 self._write_tracked_hint(tr, f)
1067
1072
1068 def delete_tracked_hint(self):
1073 def delete_tracked_hint(self):
1069 """remove the tracked_hint file
1074 """remove the tracked_hint file
1070
1075
1071 To be used by format downgrades operation"""
1076 To be used by format downgrades operation"""
1072 self._opener.unlink(self._filename_th)
1077 self._opener.unlink(self._filename_th)
1073 self._use_tracked_hint = False
1078 self._use_tracked_hint = False
1074
1079
1075 def addparentchangecallback(self, category, callback):
1080 def addparentchangecallback(self, category, callback):
1076 """add a callback to be called when the wd parents are changed
1081 """add a callback to be called when the wd parents are changed
1077
1082
1078 Callback will be called with the following arguments:
1083 Callback will be called with the following arguments:
1079 dirstate, (oldp1, oldp2), (newp1, newp2)
1084 dirstate, (oldp1, oldp2), (newp1, newp2)
1080
1085
1081 Category is a unique identifier to allow overwriting an old callback
1086 Category is a unique identifier to allow overwriting an old callback
1082 with a newer callback.
1087 with a newer callback.
1083 """
1088 """
1084 self._plchangecallbacks[category] = callback
1089 self._plchangecallbacks[category] = callback
1085
1090
1086 def _writedirstate(self, tr, st):
1091 def _writedirstate(self, tr, st):
1087 # make sure we don't write invalidated content
1092 # make sure we don't write invalidated content
1088 assert not self._invalidated_context
1093 assert not self._invalidated_context
1089 # notify callbacks about parents change
1094 # notify callbacks about parents change
1090 if self._origpl is not None and self._origpl != self._pl:
1095 if self._origpl is not None and self._origpl != self._pl:
1091 for c, callback in sorted(self._plchangecallbacks.items()):
1096 for c, callback in sorted(self._plchangecallbacks.items()):
1092 callback(self, self._origpl, self._pl)
1097 callback(self, self._origpl, self._pl)
1093 self._origpl = None
1098 self._origpl = None
1094 self._map.write(tr, st)
1099 self._map.write(tr, st)
1095 self._dirty = False
1100 self._dirty = False
1096 self._dirty_tracked_set = False
1101 self._dirty_tracked_set = False
1097
1102
1098 def _write_tracked_hint(self, tr, f):
1103 def _write_tracked_hint(self, tr, f):
1099 key = node.hex(uuid.uuid4().bytes)
1104 key = node.hex(uuid.uuid4().bytes)
1100 f.write(b"1\n%s\n" % key) # 1 is the format version
1105 f.write(b"1\n%s\n" % key) # 1 is the format version
1101
1106
1102 def _dirignore(self, f):
1107 def _dirignore(self, f):
1103 if self._ignore(f):
1108 if self._ignore(f):
1104 return True
1109 return True
1105 for p in pathutil.finddirs(f):
1110 for p in pathutil.finddirs(f):
1106 if self._ignore(p):
1111 if self._ignore(p):
1107 return True
1112 return True
1108 return False
1113 return False
1109
1114
1110 def _ignorefiles(self):
1115 def _ignorefiles(self):
1111 files = []
1116 files = []
1112 if os.path.exists(self._join(b'.hgignore')):
1117 if os.path.exists(self._join(b'.hgignore')):
1113 files.append(self._join(b'.hgignore'))
1118 files.append(self._join(b'.hgignore'))
1114 for name, path in self._ui.configitems(b"ui"):
1119 for name, path in self._ui.configitems(b"ui"):
1115 if name == b'ignore' or name.startswith(b'ignore.'):
1120 if name == b'ignore' or name.startswith(b'ignore.'):
1116 # we need to use os.path.join here rather than self._join
1121 # we need to use os.path.join here rather than self._join
1117 # because path is arbitrary and user-specified
1122 # because path is arbitrary and user-specified
1118 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1123 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1119 return files
1124 return files
1120
1125
1121 def _ignorefileandline(self, f):
1126 def _ignorefileandline(self, f):
1122 files = collections.deque(self._ignorefiles())
1127 files = collections.deque(self._ignorefiles())
1123 visited = set()
1128 visited = set()
1124 while files:
1129 while files:
1125 i = files.popleft()
1130 i = files.popleft()
1126 patterns = matchmod.readpatternfile(
1131 patterns = matchmod.readpatternfile(
1127 i, self._ui.warn, sourceinfo=True
1132 i, self._ui.warn, sourceinfo=True
1128 )
1133 )
1129 for pattern, lineno, line in patterns:
1134 for pattern, lineno, line in patterns:
1130 kind, p = matchmod._patsplit(pattern, b'glob')
1135 kind, p = matchmod._patsplit(pattern, b'glob')
1131 if kind == b"subinclude":
1136 if kind == b"subinclude":
1132 if p not in visited:
1137 if p not in visited:
1133 files.append(p)
1138 files.append(p)
1134 continue
1139 continue
1135 m = matchmod.match(
1140 m = matchmod.match(
1136 self._root, b'', [], [pattern], warn=self._ui.warn
1141 self._root, b'', [], [pattern], warn=self._ui.warn
1137 )
1142 )
1138 if m(f):
1143 if m(f):
1139 return (i, lineno, line)
1144 return (i, lineno, line)
1140 visited.add(i)
1145 visited.add(i)
1141 return (None, -1, b"")
1146 return (None, -1, b"")
1142
1147
1143 def _walkexplicit(self, match, subrepos):
1148 def _walkexplicit(self, match, subrepos):
1144 """Get stat data about the files explicitly specified by match.
1149 """Get stat data about the files explicitly specified by match.
1145
1150
1146 Return a triple (results, dirsfound, dirsnotfound).
1151 Return a triple (results, dirsfound, dirsnotfound).
1147 - results is a mapping from filename to stat result. It also contains
1152 - results is a mapping from filename to stat result. It also contains
1148 listings mapping subrepos and .hg to None.
1153 listings mapping subrepos and .hg to None.
1149 - dirsfound is a list of files found to be directories.
1154 - dirsfound is a list of files found to be directories.
1150 - dirsnotfound is a list of files that the dirstate thinks are
1155 - dirsnotfound is a list of files that the dirstate thinks are
1151 directories and that were not found."""
1156 directories and that were not found."""
1152
1157
1153 def badtype(mode):
1158 def badtype(mode):
1154 kind = _(b'unknown')
1159 kind = _(b'unknown')
1155 if stat.S_ISCHR(mode):
1160 if stat.S_ISCHR(mode):
1156 kind = _(b'character device')
1161 kind = _(b'character device')
1157 elif stat.S_ISBLK(mode):
1162 elif stat.S_ISBLK(mode):
1158 kind = _(b'block device')
1163 kind = _(b'block device')
1159 elif stat.S_ISFIFO(mode):
1164 elif stat.S_ISFIFO(mode):
1160 kind = _(b'fifo')
1165 kind = _(b'fifo')
1161 elif stat.S_ISSOCK(mode):
1166 elif stat.S_ISSOCK(mode):
1162 kind = _(b'socket')
1167 kind = _(b'socket')
1163 elif stat.S_ISDIR(mode):
1168 elif stat.S_ISDIR(mode):
1164 kind = _(b'directory')
1169 kind = _(b'directory')
1165 return _(b'unsupported file type (type is %s)') % kind
1170 return _(b'unsupported file type (type is %s)') % kind
1166
1171
1167 badfn = match.bad
1172 badfn = match.bad
1168 dmap = self._map
1173 dmap = self._map
1169 lstat = os.lstat
1174 lstat = os.lstat
1170 getkind = stat.S_IFMT
1175 getkind = stat.S_IFMT
1171 dirkind = stat.S_IFDIR
1176 dirkind = stat.S_IFDIR
1172 regkind = stat.S_IFREG
1177 regkind = stat.S_IFREG
1173 lnkkind = stat.S_IFLNK
1178 lnkkind = stat.S_IFLNK
1174 join = self._join
1179 join = self._join
1175 dirsfound = []
1180 dirsfound = []
1176 foundadd = dirsfound.append
1181 foundadd = dirsfound.append
1177 dirsnotfound = []
1182 dirsnotfound = []
1178 notfoundadd = dirsnotfound.append
1183 notfoundadd = dirsnotfound.append
1179
1184
1180 if not match.isexact() and self._checkcase:
1185 if not match.isexact() and self._checkcase:
1181 normalize = self._normalize
1186 normalize = self._normalize
1182 else:
1187 else:
1183 normalize = None
1188 normalize = None
1184
1189
1185 files = sorted(match.files())
1190 files = sorted(match.files())
1186 subrepos.sort()
1191 subrepos.sort()
1187 i, j = 0, 0
1192 i, j = 0, 0
1188 while i < len(files) and j < len(subrepos):
1193 while i < len(files) and j < len(subrepos):
1189 subpath = subrepos[j] + b"/"
1194 subpath = subrepos[j] + b"/"
1190 if files[i] < subpath:
1195 if files[i] < subpath:
1191 i += 1
1196 i += 1
1192 continue
1197 continue
1193 while i < len(files) and files[i].startswith(subpath):
1198 while i < len(files) and files[i].startswith(subpath):
1194 del files[i]
1199 del files[i]
1195 j += 1
1200 j += 1
1196
1201
1197 if not files or b'' in files:
1202 if not files or b'' in files:
1198 files = [b'']
1203 files = [b'']
1199 # constructing the foldmap is expensive, so don't do it for the
1204 # constructing the foldmap is expensive, so don't do it for the
1200 # common case where files is ['']
1205 # common case where files is ['']
1201 normalize = None
1206 normalize = None
1202 results = dict.fromkeys(subrepos)
1207 results = dict.fromkeys(subrepos)
1203 results[b'.hg'] = None
1208 results[b'.hg'] = None
1204
1209
1205 for ff in files:
1210 for ff in files:
1206 if normalize:
1211 if normalize:
1207 nf = normalize(ff, False, True)
1212 nf = normalize(ff, False, True)
1208 else:
1213 else:
1209 nf = ff
1214 nf = ff
1210 if nf in results:
1215 if nf in results:
1211 continue
1216 continue
1212
1217
1213 try:
1218 try:
1214 st = lstat(join(nf))
1219 st = lstat(join(nf))
1215 kind = getkind(st.st_mode)
1220 kind = getkind(st.st_mode)
1216 if kind == dirkind:
1221 if kind == dirkind:
1217 if nf in dmap:
1222 if nf in dmap:
1218 # file replaced by dir on disk but still in dirstate
1223 # file replaced by dir on disk but still in dirstate
1219 results[nf] = None
1224 results[nf] = None
1220 foundadd((nf, ff))
1225 foundadd((nf, ff))
1221 elif kind == regkind or kind == lnkkind:
1226 elif kind == regkind or kind == lnkkind:
1222 results[nf] = st
1227 results[nf] = st
1223 else:
1228 else:
1224 badfn(ff, badtype(kind))
1229 badfn(ff, badtype(kind))
1225 if nf in dmap:
1230 if nf in dmap:
1226 results[nf] = None
1231 results[nf] = None
1227 except (OSError) as inst:
1232 except (OSError) as inst:
1228 # nf not found on disk - it is dirstate only
1233 # nf not found on disk - it is dirstate only
1229 if nf in dmap: # does it exactly match a missing file?
1234 if nf in dmap: # does it exactly match a missing file?
1230 results[nf] = None
1235 results[nf] = None
1231 else: # does it match a missing directory?
1236 else: # does it match a missing directory?
1232 if self._map.hasdir(nf):
1237 if self._map.hasdir(nf):
1233 notfoundadd(nf)
1238 notfoundadd(nf)
1234 else:
1239 else:
1235 badfn(ff, encoding.strtolocal(inst.strerror))
1240 badfn(ff, encoding.strtolocal(inst.strerror))
1236
1241
1237 # match.files() may contain explicitly-specified paths that shouldn't
1242 # match.files() may contain explicitly-specified paths that shouldn't
1238 # be taken; drop them from the list of files found. dirsfound/notfound
1243 # be taken; drop them from the list of files found. dirsfound/notfound
1239 # aren't filtered here because they will be tested later.
1244 # aren't filtered here because they will be tested later.
1240 if match.anypats():
1245 if match.anypats():
1241 for f in list(results):
1246 for f in list(results):
1242 if f == b'.hg' or f in subrepos:
1247 if f == b'.hg' or f in subrepos:
1243 # keep sentinel to disable further out-of-repo walks
1248 # keep sentinel to disable further out-of-repo walks
1244 continue
1249 continue
1245 if not match(f):
1250 if not match(f):
1246 del results[f]
1251 del results[f]
1247
1252
1248 # Case insensitive filesystems cannot rely on lstat() failing to detect
1253 # Case insensitive filesystems cannot rely on lstat() failing to detect
1249 # a case-only rename. Prune the stat object for any file that does not
1254 # a case-only rename. Prune the stat object for any file that does not
1250 # match the case in the filesystem, if there are multiple files that
1255 # match the case in the filesystem, if there are multiple files that
1251 # normalize to the same path.
1256 # normalize to the same path.
1252 if match.isexact() and self._checkcase:
1257 if match.isexact() and self._checkcase:
1253 normed = {}
1258 normed = {}
1254
1259
1255 for f, st in results.items():
1260 for f, st in results.items():
1256 if st is None:
1261 if st is None:
1257 continue
1262 continue
1258
1263
1259 nc = util.normcase(f)
1264 nc = util.normcase(f)
1260 paths = normed.get(nc)
1265 paths = normed.get(nc)
1261
1266
1262 if paths is None:
1267 if paths is None:
1263 paths = set()
1268 paths = set()
1264 normed[nc] = paths
1269 normed[nc] = paths
1265
1270
1266 paths.add(f)
1271 paths.add(f)
1267
1272
1268 for norm, paths in normed.items():
1273 for norm, paths in normed.items():
1269 if len(paths) > 1:
1274 if len(paths) > 1:
1270 for path in paths:
1275 for path in paths:
1271 folded = self._discoverpath(
1276 folded = self._discoverpath(
1272 path, norm, True, None, self._map.dirfoldmap
1277 path, norm, True, None, self._map.dirfoldmap
1273 )
1278 )
1274 if path != folded:
1279 if path != folded:
1275 results[path] = None
1280 results[path] = None
1276
1281
1277 return results, dirsfound, dirsnotfound
1282 return results, dirsfound, dirsnotfound
1278
1283
1279 def walk(self, match, subrepos, unknown, ignored, full=True):
1284 def walk(self, match, subrepos, unknown, ignored, full=True):
1280 """
1285 """
1281 Walk recursively through the directory tree, finding all files
1286 Walk recursively through the directory tree, finding all files
1282 matched by match.
1287 matched by match.
1283
1288
1284 If full is False, maybe skip some known-clean files.
1289 If full is False, maybe skip some known-clean files.
1285
1290
1286 Return a dict mapping filename to stat-like object (either
1291 Return a dict mapping filename to stat-like object (either
1287 mercurial.osutil.stat instance or return value of os.stat()).
1292 mercurial.osutil.stat instance or return value of os.stat()).
1288
1293
1289 """
1294 """
1290 # full is a flag that extensions that hook into walk can use -- this
1295 # full is a flag that extensions that hook into walk can use -- this
1291 # implementation doesn't use it at all. This satisfies the contract
1296 # implementation doesn't use it at all. This satisfies the contract
1292 # because we only guarantee a "maybe".
1297 # because we only guarantee a "maybe".
1293
1298
1294 if ignored:
1299 if ignored:
1295 ignore = util.never
1300 ignore = util.never
1296 dirignore = util.never
1301 dirignore = util.never
1297 elif unknown:
1302 elif unknown:
1298 ignore = self._ignore
1303 ignore = self._ignore
1299 dirignore = self._dirignore
1304 dirignore = self._dirignore
1300 else:
1305 else:
1301 # if not unknown and not ignored, drop dir recursion and step 2
1306 # if not unknown and not ignored, drop dir recursion and step 2
1302 ignore = util.always
1307 ignore = util.always
1303 dirignore = util.always
1308 dirignore = util.always
1304
1309
1305 if self._sparsematchfn is not None:
1310 if self._sparsematchfn is not None:
1306 em = matchmod.exact(match.files())
1311 em = matchmod.exact(match.files())
1307 sm = matchmod.unionmatcher([self._sparsematcher, em])
1312 sm = matchmod.unionmatcher([self._sparsematcher, em])
1308 match = matchmod.intersectmatchers(match, sm)
1313 match = matchmod.intersectmatchers(match, sm)
1309
1314
1310 matchfn = match.matchfn
1315 matchfn = match.matchfn
1311 matchalways = match.always()
1316 matchalways = match.always()
1312 matchtdir = match.traversedir
1317 matchtdir = match.traversedir
1313 dmap = self._map
1318 dmap = self._map
1314 listdir = util.listdir
1319 listdir = util.listdir
1315 lstat = os.lstat
1320 lstat = os.lstat
1316 dirkind = stat.S_IFDIR
1321 dirkind = stat.S_IFDIR
1317 regkind = stat.S_IFREG
1322 regkind = stat.S_IFREG
1318 lnkkind = stat.S_IFLNK
1323 lnkkind = stat.S_IFLNK
1319 join = self._join
1324 join = self._join
1320
1325
1321 exact = skipstep3 = False
1326 exact = skipstep3 = False
1322 if match.isexact(): # match.exact
1327 if match.isexact(): # match.exact
1323 exact = True
1328 exact = True
1324 dirignore = util.always # skip step 2
1329 dirignore = util.always # skip step 2
1325 elif match.prefix(): # match.match, no patterns
1330 elif match.prefix(): # match.match, no patterns
1326 skipstep3 = True
1331 skipstep3 = True
1327
1332
1328 if not exact and self._checkcase:
1333 if not exact and self._checkcase:
1329 normalize = self._normalize
1334 normalize = self._normalize
1330 normalizefile = self._normalizefile
1335 normalizefile = self._normalizefile
1331 skipstep3 = False
1336 skipstep3 = False
1332 else:
1337 else:
1333 normalize = self._normalize
1338 normalize = self._normalize
1334 normalizefile = None
1339 normalizefile = None
1335
1340
1336 # step 1: find all explicit files
1341 # step 1: find all explicit files
1337 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1342 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1338 if matchtdir:
1343 if matchtdir:
1339 for d in work:
1344 for d in work:
1340 matchtdir(d[0])
1345 matchtdir(d[0])
1341 for d in dirsnotfound:
1346 for d in dirsnotfound:
1342 matchtdir(d)
1347 matchtdir(d)
1343
1348
1344 skipstep3 = skipstep3 and not (work or dirsnotfound)
1349 skipstep3 = skipstep3 and not (work or dirsnotfound)
1345 work = [d for d in work if not dirignore(d[0])]
1350 work = [d for d in work if not dirignore(d[0])]
1346
1351
1347 # step 2: visit subdirectories
1352 # step 2: visit subdirectories
1348 def traverse(work, alreadynormed):
1353 def traverse(work, alreadynormed):
1349 wadd = work.append
1354 wadd = work.append
1350 while work:
1355 while work:
1351 tracing.counter('dirstate.walk work', len(work))
1356 tracing.counter('dirstate.walk work', len(work))
1352 nd = work.pop()
1357 nd = work.pop()
1353 visitentries = match.visitchildrenset(nd)
1358 visitentries = match.visitchildrenset(nd)
1354 if not visitentries:
1359 if not visitentries:
1355 continue
1360 continue
1356 if visitentries == b'this' or visitentries == b'all':
1361 if visitentries == b'this' or visitentries == b'all':
1357 visitentries = None
1362 visitentries = None
1358 skip = None
1363 skip = None
1359 if nd != b'':
1364 if nd != b'':
1360 skip = b'.hg'
1365 skip = b'.hg'
1361 try:
1366 try:
1362 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1367 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1363 entries = listdir(join(nd), stat=True, skip=skip)
1368 entries = listdir(join(nd), stat=True, skip=skip)
1364 except (PermissionError, FileNotFoundError) as inst:
1369 except (PermissionError, FileNotFoundError) as inst:
1365 match.bad(
1370 match.bad(
1366 self.pathto(nd), encoding.strtolocal(inst.strerror)
1371 self.pathto(nd), encoding.strtolocal(inst.strerror)
1367 )
1372 )
1368 continue
1373 continue
1369 for f, kind, st in entries:
1374 for f, kind, st in entries:
1370 # Some matchers may return files in the visitentries set,
1375 # Some matchers may return files in the visitentries set,
1371 # instead of 'this', if the matcher explicitly mentions them
1376 # instead of 'this', if the matcher explicitly mentions them
1372 # and is not an exactmatcher. This is acceptable; we do not
1377 # and is not an exactmatcher. This is acceptable; we do not
1373 # make any hard assumptions about file-or-directory below
1378 # make any hard assumptions about file-or-directory below
1374 # based on the presence of `f` in visitentries. If
1379 # based on the presence of `f` in visitentries. If
1375 # visitchildrenset returned a set, we can always skip the
1380 # visitchildrenset returned a set, we can always skip the
1376 # entries *not* in the set it provided regardless of whether
1381 # entries *not* in the set it provided regardless of whether
1377 # they're actually a file or a directory.
1382 # they're actually a file or a directory.
1378 if visitentries and f not in visitentries:
1383 if visitentries and f not in visitentries:
1379 continue
1384 continue
1380 if normalizefile:
1385 if normalizefile:
1381 # even though f might be a directory, we're only
1386 # even though f might be a directory, we're only
1382 # interested in comparing it to files currently in the
1387 # interested in comparing it to files currently in the
1383 # dmap -- therefore normalizefile is enough
1388 # dmap -- therefore normalizefile is enough
1384 nf = normalizefile(
1389 nf = normalizefile(
1385 nd and (nd + b"/" + f) or f, True, True
1390 nd and (nd + b"/" + f) or f, True, True
1386 )
1391 )
1387 else:
1392 else:
1388 nf = nd and (nd + b"/" + f) or f
1393 nf = nd and (nd + b"/" + f) or f
1389 if nf not in results:
1394 if nf not in results:
1390 if kind == dirkind:
1395 if kind == dirkind:
1391 if not ignore(nf):
1396 if not ignore(nf):
1392 if matchtdir:
1397 if matchtdir:
1393 matchtdir(nf)
1398 matchtdir(nf)
1394 wadd(nf)
1399 wadd(nf)
1395 if nf in dmap and (matchalways or matchfn(nf)):
1400 if nf in dmap and (matchalways or matchfn(nf)):
1396 results[nf] = None
1401 results[nf] = None
1397 elif kind == regkind or kind == lnkkind:
1402 elif kind == regkind or kind == lnkkind:
1398 if nf in dmap:
1403 if nf in dmap:
1399 if matchalways or matchfn(nf):
1404 if matchalways or matchfn(nf):
1400 results[nf] = st
1405 results[nf] = st
1401 elif (matchalways or matchfn(nf)) and not ignore(
1406 elif (matchalways or matchfn(nf)) and not ignore(
1402 nf
1407 nf
1403 ):
1408 ):
1404 # unknown file -- normalize if necessary
1409 # unknown file -- normalize if necessary
1405 if not alreadynormed:
1410 if not alreadynormed:
1406 nf = normalize(nf, False, True)
1411 nf = normalize(nf, False, True)
1407 results[nf] = st
1412 results[nf] = st
1408 elif nf in dmap and (matchalways or matchfn(nf)):
1413 elif nf in dmap and (matchalways or matchfn(nf)):
1409 results[nf] = None
1414 results[nf] = None
1410
1415
1411 for nd, d in work:
1416 for nd, d in work:
1412 # alreadynormed means that processwork doesn't have to do any
1417 # alreadynormed means that processwork doesn't have to do any
1413 # expensive directory normalization
1418 # expensive directory normalization
1414 alreadynormed = not normalize or nd == d
1419 alreadynormed = not normalize or nd == d
1415 traverse([d], alreadynormed)
1420 traverse([d], alreadynormed)
1416
1421
1417 for s in subrepos:
1422 for s in subrepos:
1418 del results[s]
1423 del results[s]
1419 del results[b'.hg']
1424 del results[b'.hg']
1420
1425
1421 # step 3: visit remaining files from dmap
1426 # step 3: visit remaining files from dmap
1422 if not skipstep3 and not exact:
1427 if not skipstep3 and not exact:
1423 # If a dmap file is not in results yet, it was either
1428 # If a dmap file is not in results yet, it was either
1424 # a) not matching matchfn b) ignored, c) missing, or d) under a
1429 # a) not matching matchfn b) ignored, c) missing, or d) under a
1425 # symlink directory.
1430 # symlink directory.
1426 if not results and matchalways:
1431 if not results and matchalways:
1427 visit = [f for f in dmap]
1432 visit = [f for f in dmap]
1428 else:
1433 else:
1429 visit = [f for f in dmap if f not in results and matchfn(f)]
1434 visit = [f for f in dmap if f not in results and matchfn(f)]
1430 visit.sort()
1435 visit.sort()
1431
1436
1432 if unknown:
1437 if unknown:
1433 # unknown == True means we walked all dirs under the roots
1438 # unknown == True means we walked all dirs under the roots
1434 # that wasn't ignored, and everything that matched was stat'ed
1439 # that wasn't ignored, and everything that matched was stat'ed
1435 # and is already in results.
1440 # and is already in results.
1436 # The rest must thus be ignored or under a symlink.
1441 # The rest must thus be ignored or under a symlink.
1437 audit_path = pathutil.pathauditor(self._root, cached=True)
1442 audit_path = pathutil.pathauditor(self._root, cached=True)
1438
1443
1439 for nf in iter(visit):
1444 for nf in iter(visit):
1440 # If a stat for the same file was already added with a
1445 # If a stat for the same file was already added with a
1441 # different case, don't add one for this, since that would
1446 # different case, don't add one for this, since that would
1442 # make it appear as if the file exists under both names
1447 # make it appear as if the file exists under both names
1443 # on disk.
1448 # on disk.
1444 if (
1449 if (
1445 normalizefile
1450 normalizefile
1446 and normalizefile(nf, True, True) in results
1451 and normalizefile(nf, True, True) in results
1447 ):
1452 ):
1448 results[nf] = None
1453 results[nf] = None
1449 # Report ignored items in the dmap as long as they are not
1454 # Report ignored items in the dmap as long as they are not
1450 # under a symlink directory.
1455 # under a symlink directory.
1451 elif audit_path.check(nf):
1456 elif audit_path.check(nf):
1452 try:
1457 try:
1453 results[nf] = lstat(join(nf))
1458 results[nf] = lstat(join(nf))
1454 # file was just ignored, no links, and exists
1459 # file was just ignored, no links, and exists
1455 except OSError:
1460 except OSError:
1456 # file doesn't exist
1461 # file doesn't exist
1457 results[nf] = None
1462 results[nf] = None
1458 else:
1463 else:
1459 # It's either missing or under a symlink directory
1464 # It's either missing or under a symlink directory
1460 # which we in this case report as missing
1465 # which we in this case report as missing
1461 results[nf] = None
1466 results[nf] = None
1462 else:
1467 else:
1463 # We may not have walked the full directory tree above,
1468 # We may not have walked the full directory tree above,
1464 # so stat and check everything we missed.
1469 # so stat and check everything we missed.
1465 iv = iter(visit)
1470 iv = iter(visit)
1466 for st in util.statfiles([join(i) for i in visit]):
1471 for st in util.statfiles([join(i) for i in visit]):
1467 results[next(iv)] = st
1472 results[next(iv)] = st
1468 return results
1473 return results
1469
1474
1470 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1475 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1471 if self._sparsematchfn is not None:
1476 if self._sparsematchfn is not None:
1472 em = matchmod.exact(matcher.files())
1477 em = matchmod.exact(matcher.files())
1473 sm = matchmod.unionmatcher([self._sparsematcher, em])
1478 sm = matchmod.unionmatcher([self._sparsematcher, em])
1474 matcher = matchmod.intersectmatchers(matcher, sm)
1479 matcher = matchmod.intersectmatchers(matcher, sm)
1475 # Force Rayon (Rust parallelism library) to respect the number of
1480 # Force Rayon (Rust parallelism library) to respect the number of
1476 # workers. This is a temporary workaround until Rust code knows
1481 # workers. This is a temporary workaround until Rust code knows
1477 # how to read the config file.
1482 # how to read the config file.
1478 numcpus = self._ui.configint(b"worker", b"numcpus")
1483 numcpus = self._ui.configint(b"worker", b"numcpus")
1479 if numcpus is not None:
1484 if numcpus is not None:
1480 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1485 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1481
1486
1482 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1487 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1483 if not workers_enabled:
1488 if not workers_enabled:
1484 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1489 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1485
1490
1486 (
1491 (
1487 lookup,
1492 lookup,
1488 modified,
1493 modified,
1489 added,
1494 added,
1490 removed,
1495 removed,
1491 deleted,
1496 deleted,
1492 clean,
1497 clean,
1493 ignored,
1498 ignored,
1494 unknown,
1499 unknown,
1495 warnings,
1500 warnings,
1496 bad,
1501 bad,
1497 traversed,
1502 traversed,
1498 dirty,
1503 dirty,
1499 ) = rustmod.status(
1504 ) = rustmod.status(
1500 self._map._map,
1505 self._map._map,
1501 matcher,
1506 matcher,
1502 self._rootdir,
1507 self._rootdir,
1503 self._ignorefiles(),
1508 self._ignorefiles(),
1504 self._checkexec,
1509 self._checkexec,
1505 bool(list_clean),
1510 bool(list_clean),
1506 bool(list_ignored),
1511 bool(list_ignored),
1507 bool(list_unknown),
1512 bool(list_unknown),
1508 bool(matcher.traversedir),
1513 bool(matcher.traversedir),
1509 )
1514 )
1510
1515
1511 self._dirty |= dirty
1516 self._dirty |= dirty
1512
1517
1513 if matcher.traversedir:
1518 if matcher.traversedir:
1514 for dir in traversed:
1519 for dir in traversed:
1515 matcher.traversedir(dir)
1520 matcher.traversedir(dir)
1516
1521
1517 if self._ui.warn:
1522 if self._ui.warn:
1518 for item in warnings:
1523 for item in warnings:
1519 if isinstance(item, tuple):
1524 if isinstance(item, tuple):
1520 file_path, syntax = item
1525 file_path, syntax = item
1521 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1526 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1522 file_path,
1527 file_path,
1523 syntax,
1528 syntax,
1524 )
1529 )
1525 self._ui.warn(msg)
1530 self._ui.warn(msg)
1526 else:
1531 else:
1527 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1532 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1528 self._ui.warn(
1533 self._ui.warn(
1529 msg
1534 msg
1530 % (
1535 % (
1531 pathutil.canonpath(
1536 pathutil.canonpath(
1532 self._rootdir, self._rootdir, item
1537 self._rootdir, self._rootdir, item
1533 ),
1538 ),
1534 b"No such file or directory",
1539 b"No such file or directory",
1535 )
1540 )
1536 )
1541 )
1537
1542
1538 for fn, message in bad:
1543 for fn, message in bad:
1539 matcher.bad(fn, encoding.strtolocal(message))
1544 matcher.bad(fn, encoding.strtolocal(message))
1540
1545
1541 status = scmutil.status(
1546 status = scmutil.status(
1542 modified=modified,
1547 modified=modified,
1543 added=added,
1548 added=added,
1544 removed=removed,
1549 removed=removed,
1545 deleted=deleted,
1550 deleted=deleted,
1546 unknown=unknown,
1551 unknown=unknown,
1547 ignored=ignored,
1552 ignored=ignored,
1548 clean=clean,
1553 clean=clean,
1549 )
1554 )
1550 return (lookup, status)
1555 return (lookup, status)
1551
1556
1552 # XXX since this can make the dirstate dirty (through rust), we should
1557 # XXX since this can make the dirstate dirty (through rust), we should
1553 # enforce that it is done withing an appropriate change-context that scope
1558 # enforce that it is done withing an appropriate change-context that scope
1554 # the change and ensure it eventually get written on disk (or rolled back)
1559 # the change and ensure it eventually get written on disk (or rolled back)
1555 def status(self, match, subrepos, ignored, clean, unknown):
1560 def status(self, match, subrepos, ignored, clean, unknown):
1556 """Determine the status of the working copy relative to the
1561 """Determine the status of the working copy relative to the
1557 dirstate and return a pair of (unsure, status), where status is of type
1562 dirstate and return a pair of (unsure, status), where status is of type
1558 scmutil.status and:
1563 scmutil.status and:
1559
1564
1560 unsure:
1565 unsure:
1561 files that might have been modified since the dirstate was
1566 files that might have been modified since the dirstate was
1562 written, but need to be read to be sure (size is the same
1567 written, but need to be read to be sure (size is the same
1563 but mtime differs)
1568 but mtime differs)
1564 status.modified:
1569 status.modified:
1565 files that have definitely been modified since the dirstate
1570 files that have definitely been modified since the dirstate
1566 was written (different size or mode)
1571 was written (different size or mode)
1567 status.clean:
1572 status.clean:
1568 files that have definitely not been modified since the
1573 files that have definitely not been modified since the
1569 dirstate was written
1574 dirstate was written
1570 """
1575 """
1571 listignored, listclean, listunknown = ignored, clean, unknown
1576 listignored, listclean, listunknown = ignored, clean, unknown
1572 lookup, modified, added, unknown, ignored = [], [], [], [], []
1577 lookup, modified, added, unknown, ignored = [], [], [], [], []
1573 removed, deleted, clean = [], [], []
1578 removed, deleted, clean = [], [], []
1574
1579
1575 dmap = self._map
1580 dmap = self._map
1576 dmap.preload()
1581 dmap.preload()
1577
1582
1578 use_rust = True
1583 use_rust = True
1579
1584
1580 allowed_matchers = (
1585 allowed_matchers = (
1581 matchmod.alwaysmatcher,
1586 matchmod.alwaysmatcher,
1582 matchmod.differencematcher,
1587 matchmod.differencematcher,
1583 matchmod.exactmatcher,
1588 matchmod.exactmatcher,
1584 matchmod.includematcher,
1589 matchmod.includematcher,
1585 matchmod.intersectionmatcher,
1590 matchmod.intersectionmatcher,
1586 matchmod.nevermatcher,
1591 matchmod.nevermatcher,
1587 matchmod.unionmatcher,
1592 matchmod.unionmatcher,
1588 )
1593 )
1589
1594
1590 if rustmod is None:
1595 if rustmod is None:
1591 use_rust = False
1596 use_rust = False
1592 elif self._checkcase:
1597 elif self._checkcase:
1593 # Case-insensitive filesystems are not handled yet
1598 # Case-insensitive filesystems are not handled yet
1594 use_rust = False
1599 use_rust = False
1595 elif subrepos:
1600 elif subrepos:
1596 use_rust = False
1601 use_rust = False
1597 elif not isinstance(match, allowed_matchers):
1602 elif not isinstance(match, allowed_matchers):
1598 # Some matchers have yet to be implemented
1603 # Some matchers have yet to be implemented
1599 use_rust = False
1604 use_rust = False
1600
1605
1601 # Get the time from the filesystem so we can disambiguate files that
1606 # Get the time from the filesystem so we can disambiguate files that
1602 # appear modified in the present or future.
1607 # appear modified in the present or future.
1603 try:
1608 try:
1604 mtime_boundary = timestamp.get_fs_now(self._opener)
1609 mtime_boundary = timestamp.get_fs_now(self._opener)
1605 except OSError:
1610 except OSError:
1606 # In largefiles or readonly context
1611 # In largefiles or readonly context
1607 mtime_boundary = None
1612 mtime_boundary = None
1608
1613
1609 if use_rust:
1614 if use_rust:
1610 try:
1615 try:
1611 res = self._rust_status(
1616 res = self._rust_status(
1612 match, listclean, listignored, listunknown
1617 match, listclean, listignored, listunknown
1613 )
1618 )
1614 return res + (mtime_boundary,)
1619 return res + (mtime_boundary,)
1615 except rustmod.FallbackError:
1620 except rustmod.FallbackError:
1616 pass
1621 pass
1617
1622
1618 def noop(f):
1623 def noop(f):
1619 pass
1624 pass
1620
1625
1621 dcontains = dmap.__contains__
1626 dcontains = dmap.__contains__
1622 dget = dmap.__getitem__
1627 dget = dmap.__getitem__
1623 ladd = lookup.append # aka "unsure"
1628 ladd = lookup.append # aka "unsure"
1624 madd = modified.append
1629 madd = modified.append
1625 aadd = added.append
1630 aadd = added.append
1626 uadd = unknown.append if listunknown else noop
1631 uadd = unknown.append if listunknown else noop
1627 iadd = ignored.append if listignored else noop
1632 iadd = ignored.append if listignored else noop
1628 radd = removed.append
1633 radd = removed.append
1629 dadd = deleted.append
1634 dadd = deleted.append
1630 cadd = clean.append if listclean else noop
1635 cadd = clean.append if listclean else noop
1631 mexact = match.exact
1636 mexact = match.exact
1632 dirignore = self._dirignore
1637 dirignore = self._dirignore
1633 checkexec = self._checkexec
1638 checkexec = self._checkexec
1634 checklink = self._checklink
1639 checklink = self._checklink
1635 copymap = self._map.copymap
1640 copymap = self._map.copymap
1636
1641
1637 # We need to do full walks when either
1642 # We need to do full walks when either
1638 # - we're listing all clean files, or
1643 # - we're listing all clean files, or
1639 # - match.traversedir does something, because match.traversedir should
1644 # - match.traversedir does something, because match.traversedir should
1640 # be called for every dir in the working dir
1645 # be called for every dir in the working dir
1641 full = listclean or match.traversedir is not None
1646 full = listclean or match.traversedir is not None
1642 for fn, st in self.walk(
1647 for fn, st in self.walk(
1643 match, subrepos, listunknown, listignored, full=full
1648 match, subrepos, listunknown, listignored, full=full
1644 ).items():
1649 ).items():
1645 if not dcontains(fn):
1650 if not dcontains(fn):
1646 if (listignored or mexact(fn)) and dirignore(fn):
1651 if (listignored or mexact(fn)) and dirignore(fn):
1647 if listignored:
1652 if listignored:
1648 iadd(fn)
1653 iadd(fn)
1649 else:
1654 else:
1650 uadd(fn)
1655 uadd(fn)
1651 continue
1656 continue
1652
1657
1653 t = dget(fn)
1658 t = dget(fn)
1654 mode = t.mode
1659 mode = t.mode
1655 size = t.size
1660 size = t.size
1656
1661
1657 if not st and t.tracked:
1662 if not st and t.tracked:
1658 dadd(fn)
1663 dadd(fn)
1659 elif t.p2_info:
1664 elif t.p2_info:
1660 madd(fn)
1665 madd(fn)
1661 elif t.added:
1666 elif t.added:
1662 aadd(fn)
1667 aadd(fn)
1663 elif t.removed:
1668 elif t.removed:
1664 radd(fn)
1669 radd(fn)
1665 elif t.tracked:
1670 elif t.tracked:
1666 if not checklink and t.has_fallback_symlink:
1671 if not checklink and t.has_fallback_symlink:
1667 # If the file system does not support symlink, the mode
1672 # If the file system does not support symlink, the mode
1668 # might not be correctly stored in the dirstate, so do not
1673 # might not be correctly stored in the dirstate, so do not
1669 # trust it.
1674 # trust it.
1670 ladd(fn)
1675 ladd(fn)
1671 elif not checkexec and t.has_fallback_exec:
1676 elif not checkexec and t.has_fallback_exec:
1672 # If the file system does not support exec bits, the mode
1677 # If the file system does not support exec bits, the mode
1673 # might not be correctly stored in the dirstate, so do not
1678 # might not be correctly stored in the dirstate, so do not
1674 # trust it.
1679 # trust it.
1675 ladd(fn)
1680 ladd(fn)
1676 elif (
1681 elif (
1677 size >= 0
1682 size >= 0
1678 and (
1683 and (
1679 (size != st.st_size and size != st.st_size & _rangemask)
1684 (size != st.st_size and size != st.st_size & _rangemask)
1680 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1685 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1681 )
1686 )
1682 or fn in copymap
1687 or fn in copymap
1683 ):
1688 ):
1684 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1689 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1685 # issue6456: Size returned may be longer due to
1690 # issue6456: Size returned may be longer due to
1686 # encryption on EXT-4 fscrypt, undecided.
1691 # encryption on EXT-4 fscrypt, undecided.
1687 ladd(fn)
1692 ladd(fn)
1688 else:
1693 else:
1689 madd(fn)
1694 madd(fn)
1690 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1695 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1691 # There might be a change in the future if for example the
1696 # There might be a change in the future if for example the
1692 # internal clock is off, but this is a case where the issues
1697 # internal clock is off, but this is a case where the issues
1693 # the user would face would be a lot worse and there is
1698 # the user would face would be a lot worse and there is
1694 # nothing we can really do.
1699 # nothing we can really do.
1695 ladd(fn)
1700 ladd(fn)
1696 elif listclean:
1701 elif listclean:
1697 cadd(fn)
1702 cadd(fn)
1698 status = scmutil.status(
1703 status = scmutil.status(
1699 modified, added, removed, deleted, unknown, ignored, clean
1704 modified, added, removed, deleted, unknown, ignored, clean
1700 )
1705 )
1701 return (lookup, status, mtime_boundary)
1706 return (lookup, status, mtime_boundary)
1702
1707
1703 def matches(self, match):
1708 def matches(self, match):
1704 """
1709 """
1705 return files in the dirstate (in whatever state) filtered by match
1710 return files in the dirstate (in whatever state) filtered by match
1706 """
1711 """
1707 dmap = self._map
1712 dmap = self._map
1708 if rustmod is not None:
1713 if rustmod is not None:
1709 dmap = self._map._map
1714 dmap = self._map._map
1710
1715
1711 if match.always():
1716 if match.always():
1712 return dmap.keys()
1717 return dmap.keys()
1713 files = match.files()
1718 files = match.files()
1714 if match.isexact():
1719 if match.isexact():
1715 # fast path -- filter the other way around, since typically files is
1720 # fast path -- filter the other way around, since typically files is
1716 # much smaller than dmap
1721 # much smaller than dmap
1717 return [f for f in files if f in dmap]
1722 return [f for f in files if f in dmap]
1718 if match.prefix() and all(fn in dmap for fn in files):
1723 if match.prefix() and all(fn in dmap for fn in files):
1719 # fast path -- all the values are known to be files, so just return
1724 # fast path -- all the values are known to be files, so just return
1720 # that
1725 # that
1721 return list(files)
1726 return list(files)
1722 return [f for f in dmap if match(f)]
1727 return [f for f in dmap if match(f)]
1723
1728
1724 def _actualfilename(self, tr):
1729 def _actualfilename(self, tr):
1725 if tr:
1730 if tr:
1726 return self._pendingfilename
1731 return self._pendingfilename
1727 else:
1732 else:
1728 return self._filename
1733 return self._filename
1729
1734
1730 def all_file_names(self):
1735 def all_file_names(self):
1731 """list all filename currently used by this dirstate
1736 """list all filename currently used by this dirstate
1732
1737
1733 This is only used to do `hg rollback` related backup in the transaction
1738 This is only used to do `hg rollback` related backup in the transaction
1734 """
1739 """
1735 if not self._opener.exists(self._filename):
1740 if not self._opener.exists(self._filename):
1736 # no data every written to disk yet
1741 # no data every written to disk yet
1737 return ()
1742 return ()
1738 elif self._use_dirstate_v2:
1743 elif self._use_dirstate_v2:
1739 return (
1744 return (
1740 self._filename,
1745 self._filename,
1741 self._map.docket.data_filename(),
1746 self._map.docket.data_filename(),
1742 )
1747 )
1743 else:
1748 else:
1744 return (self._filename,)
1749 return (self._filename,)
1745
1750
1746 def verify(self, m1, m2, p1, narrow_matcher=None):
1751 def verify(self, m1, m2, p1, narrow_matcher=None):
1747 """
1752 """
1748 check the dirstate contents against the parent manifest and yield errors
1753 check the dirstate contents against the parent manifest and yield errors
1749 """
1754 """
1750 missing_from_p1 = _(
1755 missing_from_p1 = _(
1751 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1756 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1752 )
1757 )
1753 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1758 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1754 missing_from_ps = _(
1759 missing_from_ps = _(
1755 b"%s marked as modified, but not in either manifest\n"
1760 b"%s marked as modified, but not in either manifest\n"
1756 )
1761 )
1757 missing_from_ds = _(
1762 missing_from_ds = _(
1758 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1763 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1759 )
1764 )
1760 for f, entry in self.items():
1765 for f, entry in self.items():
1761 if entry.p1_tracked:
1766 if entry.p1_tracked:
1762 if entry.modified and f not in m1 and f not in m2:
1767 if entry.modified and f not in m1 and f not in m2:
1763 yield missing_from_ps % f
1768 yield missing_from_ps % f
1764 elif f not in m1:
1769 elif f not in m1:
1765 yield missing_from_p1 % (f, node.short(p1))
1770 yield missing_from_p1 % (f, node.short(p1))
1766 if entry.added and f in m1:
1771 if entry.added and f in m1:
1767 yield unexpected_in_p1 % f
1772 yield unexpected_in_p1 % f
1768 for f in m1:
1773 for f in m1:
1769 if narrow_matcher is not None and not narrow_matcher(f):
1774 if narrow_matcher is not None and not narrow_matcher(f):
1770 continue
1775 continue
1771 entry = self.get_entry(f)
1776 entry = self.get_entry(f)
1772 if not entry.p1_tracked:
1777 if not entry.p1_tracked:
1773 yield missing_from_ds % (f, node.short(p1))
1778 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now