##// END OF EJS Templates
dirstate: have `running_status` write the dirstate when holding the lock...
marmoute -
r51042:0be70c7b default
parent child Browse files
Show More
@@ -1,1764 +1,1773 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136
136
137 # used by largefile to avoid overwritting transaction callbacK
137 # used by largefile to avoid overwritting transaction callbacK
138 _tr_key_suffix = b''
138 _tr_key_suffix = b''
139
139
140 def __init__(
140 def __init__(
141 self,
141 self,
142 opener,
142 opener,
143 ui,
143 ui,
144 root,
144 root,
145 validate,
145 validate,
146 sparsematchfn,
146 sparsematchfn,
147 nodeconstants,
147 nodeconstants,
148 use_dirstate_v2,
148 use_dirstate_v2,
149 use_tracked_hint=False,
149 use_tracked_hint=False,
150 ):
150 ):
151 """Create a new dirstate object.
151 """Create a new dirstate object.
152
152
153 opener is an open()-like callable that can be used to open the
153 opener is an open()-like callable that can be used to open the
154 dirstate file; root is the root of the directory tracked by
154 dirstate file; root is the root of the directory tracked by
155 the dirstate.
155 the dirstate.
156 """
156 """
157 self._use_dirstate_v2 = use_dirstate_v2
157 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_tracked_hint = use_tracked_hint
158 self._use_tracked_hint = use_tracked_hint
159 self._nodeconstants = nodeconstants
159 self._nodeconstants = nodeconstants
160 self._opener = opener
160 self._opener = opener
161 self._validate = validate
161 self._validate = validate
162 self._root = root
162 self._root = root
163 # Either build a sparse-matcher or None if sparse is disabled
163 # Either build a sparse-matcher or None if sparse is disabled
164 self._sparsematchfn = sparsematchfn
164 self._sparsematchfn = sparsematchfn
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # UNC path pointing to root share (issue4557)
166 # UNC path pointing to root share (issue4557)
167 self._rootdir = pathutil.normasprefix(root)
167 self._rootdir = pathutil.normasprefix(root)
168 # True is any internal state may be different
168 # True is any internal state may be different
169 self._dirty = False
169 self._dirty = False
170 # True if the set of tracked file may be different
170 # True if the set of tracked file may be different
171 self._dirty_tracked_set = False
171 self._dirty_tracked_set = False
172 self._ui = ui
172 self._ui = ui
173 self._filecache = {}
173 self._filecache = {}
174 # nesting level of `changing_parents` context
174 # nesting level of `changing_parents` context
175 self._changing_level = 0
175 self._changing_level = 0
176 # the change currently underway
176 # the change currently underway
177 self._change_type = None
177 self._change_type = None
178 # number of open _running_status context
178 # number of open _running_status context
179 self._running_status = 0
179 self._running_status = 0
180 # True if the current dirstate changing operations have been
180 # True if the current dirstate changing operations have been
181 # invalidated (used to make sure all nested contexts have been exited)
181 # invalidated (used to make sure all nested contexts have been exited)
182 self._invalidated_context = False
182 self._invalidated_context = False
183 self._attached_to_a_transaction = False
183 self._attached_to_a_transaction = False
184 self._filename = b'dirstate'
184 self._filename = b'dirstate'
185 self._filename_th = b'dirstate-tracked-hint'
185 self._filename_th = b'dirstate-tracked-hint'
186 self._pendingfilename = b'%s.pending' % self._filename
186 self._pendingfilename = b'%s.pending' % self._filename
187 self._plchangecallbacks = {}
187 self._plchangecallbacks = {}
188 self._origpl = None
188 self._origpl = None
189 self._mapcls = dirstatemap.dirstatemap
189 self._mapcls = dirstatemap.dirstatemap
190 # Access and cache cwd early, so we don't access it for the first time
190 # Access and cache cwd early, so we don't access it for the first time
191 # after a working-copy update caused it to not exist (accessing it then
191 # after a working-copy update caused it to not exist (accessing it then
192 # raises an exception).
192 # raises an exception).
193 self._cwd
193 self._cwd
194
194
195 def refresh(self):
195 def refresh(self):
196 if '_branch' in vars(self):
196 if '_branch' in vars(self):
197 del self._branch
197 del self._branch
198 if '_map' in vars(self) and self._map.may_need_refresh():
198 if '_map' in vars(self) and self._map.may_need_refresh():
199 self.invalidate()
199 self.invalidate()
200
200
201 def prefetch_parents(self):
201 def prefetch_parents(self):
202 """make sure the parents are loaded
202 """make sure the parents are loaded
203
203
204 Used to avoid a race condition.
204 Used to avoid a race condition.
205 """
205 """
206 self._pl
206 self._pl
207
207
208 @contextlib.contextmanager
208 @contextlib.contextmanager
209 @check_invalidated
209 @check_invalidated
210 def running_status(self, repo):
210 def running_status(self, repo):
211 """Wrap a status operation
211 """Wrap a status operation
212
212
213 This context is not mutally exclusive with the `changing_*` context. It
213 This context is not mutally exclusive with the `changing_*` context. It
214 also do not warrant for the `wlock` to be taken.
214 also do not warrant for the `wlock` to be taken.
215
215
216 If the wlock is taken, this context will (in the future) behave in a
216 If the wlock is taken, this context will behave in a simple way, and
217 simple way, and ensure the data are scheduled for write when leaving
217 ensure the data are scheduled for write when leaving the top level
218 the top level context.
218 context.
219
219
220 If the lock is not taken, it will only warrant that the data are either
220 If the lock is not taken, it will only warrant that the data are either
221 committed (written) and rolled back (invalidated) when exiting the top
221 committed (written) and rolled back (invalidated) when exiting the top
222 level context. The write/invalidate action must be performed by the
222 level context. The write/invalidate action must be performed by the
223 wrapped code.
223 wrapped code.
224
224
225
225
226 The expected logic is:
226 The expected logic is:
227
227
228 A: read the dirstate
228 A: read the dirstate
229 B: run status
229 B: run status
230 This might make the dirstate dirty by updating cache,
230 This might make the dirstate dirty by updating cache,
231 especially in Rust.
231 especially in Rust.
232 C: do more "post status fixup if relevant
232 C: do more "post status fixup if relevant
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
235 E1: elif lock was acquired β†’ write the changes
235 E1: elif lock was acquired β†’ write the changes
236 E2: else β†’ discard the changes
236 E2: else β†’ discard the changes
237 """
237 """
238 has_lock = repo.currentwlock() is not None
238 is_changing = self.is_changing_any
239 is_changing = self.is_changing_any
239 has_tr = repo.currenttransaction is not None
240 tr = repo.currenttransaction()
241 has_tr = tr is not None
240 nested = bool(self._running_status)
242 nested = bool(self._running_status)
241
243
242 first_and_alone = not (is_changing or has_tr or nested)
244 first_and_alone = not (is_changing or has_tr or nested)
243
245
244 # enforce no change happened outside of a proper context.
246 # enforce no change happened outside of a proper context.
245 if first_and_alone and self._dirty:
247 if first_and_alone and self._dirty:
246 has_tr = repo.currenttransaction() is not None
248 has_tr = repo.currenttransaction() is not None
247 if not has_tr and self._changing_level == 0 and self._dirty:
249 if not has_tr and self._changing_level == 0 and self._dirty:
248 msg = "entering a status context, but dirstate is already dirty"
250 msg = "entering a status context, but dirstate is already dirty"
249 raise error.ProgrammingError(msg)
251 raise error.ProgrammingError(msg)
250
252
253 should_write = has_lock and not (nested or is_changing)
254
251 self._running_status += 1
255 self._running_status += 1
252 try:
256 try:
253 yield
257 yield
254 except Exception:
258 except Exception:
255 self.invalidate()
259 self.invalidate()
256 raise
260 raise
257 finally:
261 finally:
258 self._running_status -= 1
262 self._running_status -= 1
259 if self._invalidated_context:
263 if self._invalidated_context:
264 should_write = False
260 self.invalidate()
265 self.invalidate()
261
266
267 if should_write:
268 assert repo.currenttransaction() is tr
269 self.write(tr)
270
262 @contextlib.contextmanager
271 @contextlib.contextmanager
263 @check_invalidated
272 @check_invalidated
264 def _changing(self, repo, change_type):
273 def _changing(self, repo, change_type):
265 if repo.currentwlock() is None:
274 if repo.currentwlock() is None:
266 msg = b"trying to change the dirstate without holding the wlock"
275 msg = b"trying to change the dirstate without holding the wlock"
267 raise error.ProgrammingError(msg)
276 raise error.ProgrammingError(msg)
268
277
269 has_tr = repo.currenttransaction() is not None
278 has_tr = repo.currenttransaction() is not None
270 if not has_tr and self._changing_level == 0 and self._dirty:
279 if not has_tr and self._changing_level == 0 and self._dirty:
271 msg = "entering a changing context, but dirstate is already dirty"
280 msg = "entering a changing context, but dirstate is already dirty"
272 raise error.ProgrammingError(msg)
281 raise error.ProgrammingError(msg)
273
282
274 assert self._changing_level >= 0
283 assert self._changing_level >= 0
275 # different type of change are mutually exclusive
284 # different type of change are mutually exclusive
276 if self._change_type is None:
285 if self._change_type is None:
277 assert self._changing_level == 0
286 assert self._changing_level == 0
278 self._change_type = change_type
287 self._change_type = change_type
279 elif self._change_type != change_type:
288 elif self._change_type != change_type:
280 msg = (
289 msg = (
281 'trying to open "%s" dirstate-changing context while a "%s" is'
290 'trying to open "%s" dirstate-changing context while a "%s" is'
282 ' already open'
291 ' already open'
283 )
292 )
284 msg %= (change_type, self._change_type)
293 msg %= (change_type, self._change_type)
285 raise error.ProgrammingError(msg)
294 raise error.ProgrammingError(msg)
286 should_write = False
295 should_write = False
287 self._changing_level += 1
296 self._changing_level += 1
288 try:
297 try:
289 yield
298 yield
290 except: # re-raises
299 except: # re-raises
291 self.invalidate() # this will set `_invalidated_context`
300 self.invalidate() # this will set `_invalidated_context`
292 raise
301 raise
293 finally:
302 finally:
294 assert self._changing_level > 0
303 assert self._changing_level > 0
295 self._changing_level -= 1
304 self._changing_level -= 1
296 # If the dirstate is being invalidated, call invalidate again.
305 # If the dirstate is being invalidated, call invalidate again.
297 # This will throw away anything added by a upper context and
306 # This will throw away anything added by a upper context and
298 # reset the `_invalidated_context` flag when relevant
307 # reset the `_invalidated_context` flag when relevant
299 if self._changing_level <= 0:
308 if self._changing_level <= 0:
300 self._change_type = None
309 self._change_type = None
301 assert self._changing_level == 0
310 assert self._changing_level == 0
302 if self._invalidated_context:
311 if self._invalidated_context:
303 # make sure we invalidate anything an upper context might
312 # make sure we invalidate anything an upper context might
304 # have changed.
313 # have changed.
305 self.invalidate()
314 self.invalidate()
306 else:
315 else:
307 should_write = self._changing_level <= 0
316 should_write = self._changing_level <= 0
308 tr = repo.currenttransaction()
317 tr = repo.currenttransaction()
309 if has_tr != (tr is not None):
318 if has_tr != (tr is not None):
310 if has_tr:
319 if has_tr:
311 m = "transaction vanished while changing dirstate"
320 m = "transaction vanished while changing dirstate"
312 else:
321 else:
313 m = "transaction appeared while changing dirstate"
322 m = "transaction appeared while changing dirstate"
314 raise error.ProgrammingError(m)
323 raise error.ProgrammingError(m)
315 if should_write:
324 if should_write:
316 self.write(tr)
325 self.write(tr)
317
326
318 @contextlib.contextmanager
327 @contextlib.contextmanager
319 def changing_parents(self, repo):
328 def changing_parents(self, repo):
320 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
329 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
321 yield c
330 yield c
322
331
323 @contextlib.contextmanager
332 @contextlib.contextmanager
324 def changing_files(self, repo):
333 def changing_files(self, repo):
325 with self._changing(repo, CHANGE_TYPE_FILES) as c:
334 with self._changing(repo, CHANGE_TYPE_FILES) as c:
326 yield c
335 yield c
327
336
328 # here to help migration to the new code
337 # here to help migration to the new code
329 def parentchange(self):
338 def parentchange(self):
330 msg = (
339 msg = (
331 "Mercurial 6.4 and later requires call to "
340 "Mercurial 6.4 and later requires call to "
332 "`dirstate.changing_parents(repo)`"
341 "`dirstate.changing_parents(repo)`"
333 )
342 )
334 raise error.ProgrammingError(msg)
343 raise error.ProgrammingError(msg)
335
344
336 @property
345 @property
337 def is_changing_any(self):
346 def is_changing_any(self):
338 """Returns true if the dirstate is in the middle of a set of changes.
347 """Returns true if the dirstate is in the middle of a set of changes.
339
348
340 This returns True for any kind of change.
349 This returns True for any kind of change.
341 """
350 """
342 return self._changing_level > 0
351 return self._changing_level > 0
343
352
344 def pendingparentchange(self):
353 def pendingparentchange(self):
345 return self.is_changing_parent()
354 return self.is_changing_parent()
346
355
347 def is_changing_parent(self):
356 def is_changing_parent(self):
348 """Returns true if the dirstate is in the middle of a set of changes
357 """Returns true if the dirstate is in the middle of a set of changes
349 that modify the dirstate parent.
358 that modify the dirstate parent.
350 """
359 """
351 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
360 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
352 return self.is_changing_parents
361 return self.is_changing_parents
353
362
354 @property
363 @property
355 def is_changing_parents(self):
364 def is_changing_parents(self):
356 """Returns true if the dirstate is in the middle of a set of changes
365 """Returns true if the dirstate is in the middle of a set of changes
357 that modify the dirstate parent.
366 that modify the dirstate parent.
358 """
367 """
359 if self._changing_level <= 0:
368 if self._changing_level <= 0:
360 return False
369 return False
361 return self._change_type == CHANGE_TYPE_PARENTS
370 return self._change_type == CHANGE_TYPE_PARENTS
362
371
363 @property
372 @property
364 def is_changing_files(self):
373 def is_changing_files(self):
365 """Returns true if the dirstate is in the middle of a set of changes
374 """Returns true if the dirstate is in the middle of a set of changes
366 that modify the files tracked or their sources.
375 that modify the files tracked or their sources.
367 """
376 """
368 if self._changing_level <= 0:
377 if self._changing_level <= 0:
369 return False
378 return False
370 return self._change_type == CHANGE_TYPE_FILES
379 return self._change_type == CHANGE_TYPE_FILES
371
380
372 @propertycache
381 @propertycache
373 def _map(self):
382 def _map(self):
374 """Return the dirstate contents (see documentation for dirstatemap)."""
383 """Return the dirstate contents (see documentation for dirstatemap)."""
375 return self._mapcls(
384 return self._mapcls(
376 self._ui,
385 self._ui,
377 self._opener,
386 self._opener,
378 self._root,
387 self._root,
379 self._nodeconstants,
388 self._nodeconstants,
380 self._use_dirstate_v2,
389 self._use_dirstate_v2,
381 )
390 )
382
391
383 @property
392 @property
384 def _sparsematcher(self):
393 def _sparsematcher(self):
385 """The matcher for the sparse checkout.
394 """The matcher for the sparse checkout.
386
395
387 The working directory may not include every file from a manifest. The
396 The working directory may not include every file from a manifest. The
388 matcher obtained by this property will match a path if it is to be
397 matcher obtained by this property will match a path if it is to be
389 included in the working directory.
398 included in the working directory.
390
399
391 When sparse if disabled, return None.
400 When sparse if disabled, return None.
392 """
401 """
393 if self._sparsematchfn is None:
402 if self._sparsematchfn is None:
394 return None
403 return None
395 # TODO there is potential to cache this property. For now, the matcher
404 # TODO there is potential to cache this property. For now, the matcher
396 # is resolved on every access. (But the called function does use a
405 # is resolved on every access. (But the called function does use a
397 # cache to keep the lookup fast.)
406 # cache to keep the lookup fast.)
398 return self._sparsematchfn()
407 return self._sparsematchfn()
399
408
400 @repocache(b'branch')
409 @repocache(b'branch')
401 def _branch(self):
410 def _branch(self):
402 try:
411 try:
403 return self._opener.read(b"branch").strip() or b"default"
412 return self._opener.read(b"branch").strip() or b"default"
404 except FileNotFoundError:
413 except FileNotFoundError:
405 return b"default"
414 return b"default"
406
415
407 @property
416 @property
408 def _pl(self):
417 def _pl(self):
409 return self._map.parents()
418 return self._map.parents()
410
419
411 def hasdir(self, d):
420 def hasdir(self, d):
412 return self._map.hastrackeddir(d)
421 return self._map.hastrackeddir(d)
413
422
414 @rootcache(b'.hgignore')
423 @rootcache(b'.hgignore')
415 def _ignore(self):
424 def _ignore(self):
416 files = self._ignorefiles()
425 files = self._ignorefiles()
417 if not files:
426 if not files:
418 return matchmod.never()
427 return matchmod.never()
419
428
420 pats = [b'include:%s' % f for f in files]
429 pats = [b'include:%s' % f for f in files]
421 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
430 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
422
431
423 @propertycache
432 @propertycache
424 def _slash(self):
433 def _slash(self):
425 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
434 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
426
435
427 @propertycache
436 @propertycache
428 def _checklink(self):
437 def _checklink(self):
429 return util.checklink(self._root)
438 return util.checklink(self._root)
430
439
431 @propertycache
440 @propertycache
432 def _checkexec(self):
441 def _checkexec(self):
433 return bool(util.checkexec(self._root))
442 return bool(util.checkexec(self._root))
434
443
435 @propertycache
444 @propertycache
436 def _checkcase(self):
445 def _checkcase(self):
437 return not util.fscasesensitive(self._join(b'.hg'))
446 return not util.fscasesensitive(self._join(b'.hg'))
438
447
439 def _join(self, f):
448 def _join(self, f):
440 # much faster than os.path.join()
449 # much faster than os.path.join()
441 # it's safe because f is always a relative path
450 # it's safe because f is always a relative path
442 return self._rootdir + f
451 return self._rootdir + f
443
452
444 def flagfunc(self, buildfallback):
453 def flagfunc(self, buildfallback):
445 """build a callable that returns flags associated with a filename
454 """build a callable that returns flags associated with a filename
446
455
447 The information is extracted from three possible layers:
456 The information is extracted from three possible layers:
448 1. the file system if it supports the information
457 1. the file system if it supports the information
449 2. the "fallback" information stored in the dirstate if any
458 2. the "fallback" information stored in the dirstate if any
450 3. a more expensive mechanism inferring the flags from the parents.
459 3. a more expensive mechanism inferring the flags from the parents.
451 """
460 """
452
461
453 # small hack to cache the result of buildfallback()
462 # small hack to cache the result of buildfallback()
454 fallback_func = []
463 fallback_func = []
455
464
456 def get_flags(x):
465 def get_flags(x):
457 entry = None
466 entry = None
458 fallback_value = None
467 fallback_value = None
459 try:
468 try:
460 st = os.lstat(self._join(x))
469 st = os.lstat(self._join(x))
461 except OSError:
470 except OSError:
462 return b''
471 return b''
463
472
464 if self._checklink:
473 if self._checklink:
465 if util.statislink(st):
474 if util.statislink(st):
466 return b'l'
475 return b'l'
467 else:
476 else:
468 entry = self.get_entry(x)
477 entry = self.get_entry(x)
469 if entry.has_fallback_symlink:
478 if entry.has_fallback_symlink:
470 if entry.fallback_symlink:
479 if entry.fallback_symlink:
471 return b'l'
480 return b'l'
472 else:
481 else:
473 if not fallback_func:
482 if not fallback_func:
474 fallback_func.append(buildfallback())
483 fallback_func.append(buildfallback())
475 fallback_value = fallback_func[0](x)
484 fallback_value = fallback_func[0](x)
476 if b'l' in fallback_value:
485 if b'l' in fallback_value:
477 return b'l'
486 return b'l'
478
487
479 if self._checkexec:
488 if self._checkexec:
480 if util.statisexec(st):
489 if util.statisexec(st):
481 return b'x'
490 return b'x'
482 else:
491 else:
483 if entry is None:
492 if entry is None:
484 entry = self.get_entry(x)
493 entry = self.get_entry(x)
485 if entry.has_fallback_exec:
494 if entry.has_fallback_exec:
486 if entry.fallback_exec:
495 if entry.fallback_exec:
487 return b'x'
496 return b'x'
488 else:
497 else:
489 if fallback_value is None:
498 if fallback_value is None:
490 if not fallback_func:
499 if not fallback_func:
491 fallback_func.append(buildfallback())
500 fallback_func.append(buildfallback())
492 fallback_value = fallback_func[0](x)
501 fallback_value = fallback_func[0](x)
493 if b'x' in fallback_value:
502 if b'x' in fallback_value:
494 return b'x'
503 return b'x'
495 return b''
504 return b''
496
505
497 return get_flags
506 return get_flags
498
507
499 @propertycache
508 @propertycache
500 def _cwd(self):
509 def _cwd(self):
501 # internal config: ui.forcecwd
510 # internal config: ui.forcecwd
502 forcecwd = self._ui.config(b'ui', b'forcecwd')
511 forcecwd = self._ui.config(b'ui', b'forcecwd')
503 if forcecwd:
512 if forcecwd:
504 return forcecwd
513 return forcecwd
505 return encoding.getcwd()
514 return encoding.getcwd()
506
515
507 def getcwd(self):
516 def getcwd(self):
508 """Return the path from which a canonical path is calculated.
517 """Return the path from which a canonical path is calculated.
509
518
510 This path should be used to resolve file patterns or to convert
519 This path should be used to resolve file patterns or to convert
511 canonical paths back to file paths for display. It shouldn't be
520 canonical paths back to file paths for display. It shouldn't be
512 used to get real file paths. Use vfs functions instead.
521 used to get real file paths. Use vfs functions instead.
513 """
522 """
514 cwd = self._cwd
523 cwd = self._cwd
515 if cwd == self._root:
524 if cwd == self._root:
516 return b''
525 return b''
517 # self._root ends with a path separator if self._root is '/' or 'C:\'
526 # self._root ends with a path separator if self._root is '/' or 'C:\'
518 rootsep = self._root
527 rootsep = self._root
519 if not util.endswithsep(rootsep):
528 if not util.endswithsep(rootsep):
520 rootsep += pycompat.ossep
529 rootsep += pycompat.ossep
521 if cwd.startswith(rootsep):
530 if cwd.startswith(rootsep):
522 return cwd[len(rootsep) :]
531 return cwd[len(rootsep) :]
523 else:
532 else:
524 # we're outside the repo. return an absolute path.
533 # we're outside the repo. return an absolute path.
525 return cwd
534 return cwd
526
535
527 def pathto(self, f, cwd=None):
536 def pathto(self, f, cwd=None):
528 if cwd is None:
537 if cwd is None:
529 cwd = self.getcwd()
538 cwd = self.getcwd()
530 path = util.pathto(self._root, cwd, f)
539 path = util.pathto(self._root, cwd, f)
531 if self._slash:
540 if self._slash:
532 return util.pconvert(path)
541 return util.pconvert(path)
533 return path
542 return path
534
543
535 def get_entry(self, path):
544 def get_entry(self, path):
536 """return a DirstateItem for the associated path"""
545 """return a DirstateItem for the associated path"""
537 entry = self._map.get(path)
546 entry = self._map.get(path)
538 if entry is None:
547 if entry is None:
539 return DirstateItem()
548 return DirstateItem()
540 return entry
549 return entry
541
550
542 def __contains__(self, key):
551 def __contains__(self, key):
543 return key in self._map
552 return key in self._map
544
553
545 def __iter__(self):
554 def __iter__(self):
546 return iter(sorted(self._map))
555 return iter(sorted(self._map))
547
556
548 def items(self):
557 def items(self):
549 return self._map.items()
558 return self._map.items()
550
559
551 iteritems = items
560 iteritems = items
552
561
553 def parents(self):
562 def parents(self):
554 return [self._validate(p) for p in self._pl]
563 return [self._validate(p) for p in self._pl]
555
564
556 def p1(self):
565 def p1(self):
557 return self._validate(self._pl[0])
566 return self._validate(self._pl[0])
558
567
559 def p2(self):
568 def p2(self):
560 return self._validate(self._pl[1])
569 return self._validate(self._pl[1])
561
570
562 @property
571 @property
563 def in_merge(self):
572 def in_merge(self):
564 """True if a merge is in progress"""
573 """True if a merge is in progress"""
565 return self._pl[1] != self._nodeconstants.nullid
574 return self._pl[1] != self._nodeconstants.nullid
566
575
567 def branch(self):
576 def branch(self):
568 return encoding.tolocal(self._branch)
577 return encoding.tolocal(self._branch)
569
578
570 @requires_changing_parents
579 @requires_changing_parents
571 def setparents(self, p1, p2=None):
580 def setparents(self, p1, p2=None):
572 """Set dirstate parents to p1 and p2.
581 """Set dirstate parents to p1 and p2.
573
582
574 When moving from two parents to one, "merged" entries a
583 When moving from two parents to one, "merged" entries a
575 adjusted to normal and previous copy records discarded and
584 adjusted to normal and previous copy records discarded and
576 returned by the call.
585 returned by the call.
577
586
578 See localrepo.setparents()
587 See localrepo.setparents()
579 """
588 """
580 if p2 is None:
589 if p2 is None:
581 p2 = self._nodeconstants.nullid
590 p2 = self._nodeconstants.nullid
582 if self._changing_level == 0:
591 if self._changing_level == 0:
583 raise ValueError(
592 raise ValueError(
584 b"cannot set dirstate parent outside of "
593 b"cannot set dirstate parent outside of "
585 b"dirstate.changing_parents context manager"
594 b"dirstate.changing_parents context manager"
586 )
595 )
587
596
588 self._dirty = True
597 self._dirty = True
589 oldp2 = self._pl[1]
598 oldp2 = self._pl[1]
590 if self._origpl is None:
599 if self._origpl is None:
591 self._origpl = self._pl
600 self._origpl = self._pl
592 nullid = self._nodeconstants.nullid
601 nullid = self._nodeconstants.nullid
593 # True if we need to fold p2 related state back to a linear case
602 # True if we need to fold p2 related state back to a linear case
594 fold_p2 = oldp2 != nullid and p2 == nullid
603 fold_p2 = oldp2 != nullid and p2 == nullid
595 return self._map.setparents(p1, p2, fold_p2=fold_p2)
604 return self._map.setparents(p1, p2, fold_p2=fold_p2)
596
605
597 def setbranch(self, branch):
606 def setbranch(self, branch):
598 self.__class__._branch.set(self, encoding.fromlocal(branch))
607 self.__class__._branch.set(self, encoding.fromlocal(branch))
599 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
608 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
600 try:
609 try:
601 f.write(self._branch + b'\n')
610 f.write(self._branch + b'\n')
602 f.close()
611 f.close()
603
612
604 # make sure filecache has the correct stat info for _branch after
613 # make sure filecache has the correct stat info for _branch after
605 # replacing the underlying file
614 # replacing the underlying file
606 ce = self._filecache[b'_branch']
615 ce = self._filecache[b'_branch']
607 if ce:
616 if ce:
608 ce.refresh()
617 ce.refresh()
609 except: # re-raises
618 except: # re-raises
610 f.discard()
619 f.discard()
611 raise
620 raise
612
621
613 def invalidate(self):
622 def invalidate(self):
614 """Causes the next access to reread the dirstate.
623 """Causes the next access to reread the dirstate.
615
624
616 This is different from localrepo.invalidatedirstate() because it always
625 This is different from localrepo.invalidatedirstate() because it always
617 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
626 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
618 check whether the dirstate has changed before rereading it."""
627 check whether the dirstate has changed before rereading it."""
619
628
620 for a in ("_map", "_branch", "_ignore"):
629 for a in ("_map", "_branch", "_ignore"):
621 if a in self.__dict__:
630 if a in self.__dict__:
622 delattr(self, a)
631 delattr(self, a)
623 self._dirty = False
632 self._dirty = False
624 self._dirty_tracked_set = False
633 self._dirty_tracked_set = False
625 self._invalidated_context = bool(
634 self._invalidated_context = bool(
626 self._changing_level > 0
635 self._changing_level > 0
627 or self._attached_to_a_transaction
636 or self._attached_to_a_transaction
628 or self._running_status
637 or self._running_status
629 )
638 )
630 self._origpl = None
639 self._origpl = None
631
640
632 @requires_changing_any
641 @requires_changing_any
633 def copy(self, source, dest):
642 def copy(self, source, dest):
634 """Mark dest as a copy of source. Unmark dest if source is None."""
643 """Mark dest as a copy of source. Unmark dest if source is None."""
635 if source == dest:
644 if source == dest:
636 return
645 return
637 self._dirty = True
646 self._dirty = True
638 if source is not None:
647 if source is not None:
639 self._check_sparse(source)
648 self._check_sparse(source)
640 self._map.copymap[dest] = source
649 self._map.copymap[dest] = source
641 else:
650 else:
642 self._map.copymap.pop(dest, None)
651 self._map.copymap.pop(dest, None)
643
652
644 def copied(self, file):
653 def copied(self, file):
645 return self._map.copymap.get(file, None)
654 return self._map.copymap.get(file, None)
646
655
647 def copies(self):
656 def copies(self):
648 return self._map.copymap
657 return self._map.copymap
649
658
650 @requires_changing_files
659 @requires_changing_files
651 def set_tracked(self, filename, reset_copy=False):
660 def set_tracked(self, filename, reset_copy=False):
652 """a "public" method for generic code to mark a file as tracked
661 """a "public" method for generic code to mark a file as tracked
653
662
654 This function is to be called outside of "update/merge" case. For
663 This function is to be called outside of "update/merge" case. For
655 example by a command like `hg add X`.
664 example by a command like `hg add X`.
656
665
657 if reset_copy is set, any existing copy information will be dropped.
666 if reset_copy is set, any existing copy information will be dropped.
658
667
659 return True the file was previously untracked, False otherwise.
668 return True the file was previously untracked, False otherwise.
660 """
669 """
661 self._dirty = True
670 self._dirty = True
662 entry = self._map.get(filename)
671 entry = self._map.get(filename)
663 if entry is None or not entry.tracked:
672 if entry is None or not entry.tracked:
664 self._check_new_tracked_filename(filename)
673 self._check_new_tracked_filename(filename)
665 pre_tracked = self._map.set_tracked(filename)
674 pre_tracked = self._map.set_tracked(filename)
666 if reset_copy:
675 if reset_copy:
667 self._map.copymap.pop(filename, None)
676 self._map.copymap.pop(filename, None)
668 if pre_tracked:
677 if pre_tracked:
669 self._dirty_tracked_set = True
678 self._dirty_tracked_set = True
670 return pre_tracked
679 return pre_tracked
671
680
672 @requires_changing_files
681 @requires_changing_files
673 def set_untracked(self, filename):
682 def set_untracked(self, filename):
674 """a "public" method for generic code to mark a file as untracked
683 """a "public" method for generic code to mark a file as untracked
675
684
676 This function is to be called outside of "update/merge" case. For
685 This function is to be called outside of "update/merge" case. For
677 example by a command like `hg remove X`.
686 example by a command like `hg remove X`.
678
687
679 return True the file was previously tracked, False otherwise.
688 return True the file was previously tracked, False otherwise.
680 """
689 """
681 ret = self._map.set_untracked(filename)
690 ret = self._map.set_untracked(filename)
682 if ret:
691 if ret:
683 self._dirty = True
692 self._dirty = True
684 self._dirty_tracked_set = True
693 self._dirty_tracked_set = True
685 return ret
694 return ret
686
695
687 @requires_not_changing_parents
696 @requires_not_changing_parents
688 def set_clean(self, filename, parentfiledata):
697 def set_clean(self, filename, parentfiledata):
689 """record that the current state of the file on disk is known to be clean"""
698 """record that the current state of the file on disk is known to be clean"""
690 self._dirty = True
699 self._dirty = True
691 if not self._map[filename].tracked:
700 if not self._map[filename].tracked:
692 self._check_new_tracked_filename(filename)
701 self._check_new_tracked_filename(filename)
693 (mode, size, mtime) = parentfiledata
702 (mode, size, mtime) = parentfiledata
694 self._map.set_clean(filename, mode, size, mtime)
703 self._map.set_clean(filename, mode, size, mtime)
695
704
696 @requires_not_changing_parents
705 @requires_not_changing_parents
697 def set_possibly_dirty(self, filename):
706 def set_possibly_dirty(self, filename):
698 """record that the current state of the file on disk is unknown"""
707 """record that the current state of the file on disk is unknown"""
699 self._dirty = True
708 self._dirty = True
700 self._map.set_possibly_dirty(filename)
709 self._map.set_possibly_dirty(filename)
701
710
702 @requires_changing_parents
711 @requires_changing_parents
703 def update_file_p1(
712 def update_file_p1(
704 self,
713 self,
705 filename,
714 filename,
706 p1_tracked,
715 p1_tracked,
707 ):
716 ):
708 """Set a file as tracked in the parent (or not)
717 """Set a file as tracked in the parent (or not)
709
718
710 This is to be called when adjust the dirstate to a new parent after an history
719 This is to be called when adjust the dirstate to a new parent after an history
711 rewriting operation.
720 rewriting operation.
712
721
713 It should not be called during a merge (p2 != nullid) and only within
722 It should not be called during a merge (p2 != nullid) and only within
714 a `with dirstate.changing_parents(repo):` context.
723 a `with dirstate.changing_parents(repo):` context.
715 """
724 """
716 if self.in_merge:
725 if self.in_merge:
717 msg = b'update_file_reference should not be called when merging'
726 msg = b'update_file_reference should not be called when merging'
718 raise error.ProgrammingError(msg)
727 raise error.ProgrammingError(msg)
719 entry = self._map.get(filename)
728 entry = self._map.get(filename)
720 if entry is None:
729 if entry is None:
721 wc_tracked = False
730 wc_tracked = False
722 else:
731 else:
723 wc_tracked = entry.tracked
732 wc_tracked = entry.tracked
724 if not (p1_tracked or wc_tracked):
733 if not (p1_tracked or wc_tracked):
725 # the file is no longer relevant to anyone
734 # the file is no longer relevant to anyone
726 if self._map.get(filename) is not None:
735 if self._map.get(filename) is not None:
727 self._map.reset_state(filename)
736 self._map.reset_state(filename)
728 self._dirty = True
737 self._dirty = True
729 elif (not p1_tracked) and wc_tracked:
738 elif (not p1_tracked) and wc_tracked:
730 if entry is not None and entry.added:
739 if entry is not None and entry.added:
731 return # avoid dropping copy information (maybe?)
740 return # avoid dropping copy information (maybe?)
732
741
733 self._map.reset_state(
742 self._map.reset_state(
734 filename,
743 filename,
735 wc_tracked,
744 wc_tracked,
736 p1_tracked,
745 p1_tracked,
737 # the underlying reference might have changed, we will have to
746 # the underlying reference might have changed, we will have to
738 # check it.
747 # check it.
739 has_meaningful_mtime=False,
748 has_meaningful_mtime=False,
740 )
749 )
741
750
742 @requires_changing_parents
751 @requires_changing_parents
743 def update_file(
752 def update_file(
744 self,
753 self,
745 filename,
754 filename,
746 wc_tracked,
755 wc_tracked,
747 p1_tracked,
756 p1_tracked,
748 p2_info=False,
757 p2_info=False,
749 possibly_dirty=False,
758 possibly_dirty=False,
750 parentfiledata=None,
759 parentfiledata=None,
751 ):
760 ):
752 """update the information about a file in the dirstate
761 """update the information about a file in the dirstate
753
762
754 This is to be called when the direstates parent changes to keep track
763 This is to be called when the direstates parent changes to keep track
755 of what is the file situation in regards to the working copy and its parent.
764 of what is the file situation in regards to the working copy and its parent.
756
765
757 This function must be called within a `dirstate.changing_parents` context.
766 This function must be called within a `dirstate.changing_parents` context.
758
767
759 note: the API is at an early stage and we might need to adjust it
768 note: the API is at an early stage and we might need to adjust it
760 depending of what information ends up being relevant and useful to
769 depending of what information ends up being relevant and useful to
761 other processing.
770 other processing.
762 """
771 """
763 self._update_file(
772 self._update_file(
764 filename=filename,
773 filename=filename,
765 wc_tracked=wc_tracked,
774 wc_tracked=wc_tracked,
766 p1_tracked=p1_tracked,
775 p1_tracked=p1_tracked,
767 p2_info=p2_info,
776 p2_info=p2_info,
768 possibly_dirty=possibly_dirty,
777 possibly_dirty=possibly_dirty,
769 parentfiledata=parentfiledata,
778 parentfiledata=parentfiledata,
770 )
779 )
771
780
772 # XXX since this make the dirstate dirty, we should enforce that it is done
781 # XXX since this make the dirstate dirty, we should enforce that it is done
773 # withing an appropriate change-context that scope the change and ensure it
782 # withing an appropriate change-context that scope the change and ensure it
774 # eventually get written on disk (or rolled back)
783 # eventually get written on disk (or rolled back)
775 def hacky_extension_update_file(self, *args, **kwargs):
784 def hacky_extension_update_file(self, *args, **kwargs):
776 """NEVER USE THIS, YOU DO NOT NEED IT
785 """NEVER USE THIS, YOU DO NOT NEED IT
777
786
778 This function is a variant of "update_file" to be called by a small set
787 This function is a variant of "update_file" to be called by a small set
779 of extensions, it also adjust the internal state of file, but can be
788 of extensions, it also adjust the internal state of file, but can be
780 called outside an `changing_parents` context.
789 called outside an `changing_parents` context.
781
790
782 A very small number of extension meddle with the working copy content
791 A very small number of extension meddle with the working copy content
783 in a way that requires to adjust the dirstate accordingly. At the time
792 in a way that requires to adjust the dirstate accordingly. At the time
784 this command is written they are :
793 this command is written they are :
785 - keyword,
794 - keyword,
786 - largefile,
795 - largefile,
787 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
796 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
788
797
789 This function could probably be replaced by more semantic one (like
798 This function could probably be replaced by more semantic one (like
790 "adjust expected size" or "always revalidate file content", etc)
799 "adjust expected size" or "always revalidate file content", etc)
791 however at the time where this is writen, this is too much of a detour
800 however at the time where this is writen, this is too much of a detour
792 to be considered.
801 to be considered.
793 """
802 """
794 self._update_file(
803 self._update_file(
795 *args,
804 *args,
796 **kwargs,
805 **kwargs,
797 )
806 )
798
807
799 def _update_file(
808 def _update_file(
800 self,
809 self,
801 filename,
810 filename,
802 wc_tracked,
811 wc_tracked,
803 p1_tracked,
812 p1_tracked,
804 p2_info=False,
813 p2_info=False,
805 possibly_dirty=False,
814 possibly_dirty=False,
806 parentfiledata=None,
815 parentfiledata=None,
807 ):
816 ):
808
817
809 # note: I do not think we need to double check name clash here since we
818 # note: I do not think we need to double check name clash here since we
810 # are in a update/merge case that should already have taken care of
819 # are in a update/merge case that should already have taken care of
811 # this. The test agrees
820 # this. The test agrees
812
821
813 self._dirty = True
822 self._dirty = True
814 old_entry = self._map.get(filename)
823 old_entry = self._map.get(filename)
815 if old_entry is None:
824 if old_entry is None:
816 prev_tracked = False
825 prev_tracked = False
817 else:
826 else:
818 prev_tracked = old_entry.tracked
827 prev_tracked = old_entry.tracked
819 if prev_tracked != wc_tracked:
828 if prev_tracked != wc_tracked:
820 self._dirty_tracked_set = True
829 self._dirty_tracked_set = True
821
830
822 self._map.reset_state(
831 self._map.reset_state(
823 filename,
832 filename,
824 wc_tracked,
833 wc_tracked,
825 p1_tracked,
834 p1_tracked,
826 p2_info=p2_info,
835 p2_info=p2_info,
827 has_meaningful_mtime=not possibly_dirty,
836 has_meaningful_mtime=not possibly_dirty,
828 parentfiledata=parentfiledata,
837 parentfiledata=parentfiledata,
829 )
838 )
830
839
831 def _check_new_tracked_filename(self, filename):
840 def _check_new_tracked_filename(self, filename):
832 scmutil.checkfilename(filename)
841 scmutil.checkfilename(filename)
833 if self._map.hastrackeddir(filename):
842 if self._map.hastrackeddir(filename):
834 msg = _(b'directory %r already in dirstate')
843 msg = _(b'directory %r already in dirstate')
835 msg %= pycompat.bytestr(filename)
844 msg %= pycompat.bytestr(filename)
836 raise error.Abort(msg)
845 raise error.Abort(msg)
837 # shadows
846 # shadows
838 for d in pathutil.finddirs(filename):
847 for d in pathutil.finddirs(filename):
839 if self._map.hastrackeddir(d):
848 if self._map.hastrackeddir(d):
840 break
849 break
841 entry = self._map.get(d)
850 entry = self._map.get(d)
842 if entry is not None and not entry.removed:
851 if entry is not None and not entry.removed:
843 msg = _(b'file %r in dirstate clashes with %r')
852 msg = _(b'file %r in dirstate clashes with %r')
844 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
853 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
845 raise error.Abort(msg)
854 raise error.Abort(msg)
846 self._check_sparse(filename)
855 self._check_sparse(filename)
847
856
848 def _check_sparse(self, filename):
857 def _check_sparse(self, filename):
849 """Check that a filename is inside the sparse profile"""
858 """Check that a filename is inside the sparse profile"""
850 sparsematch = self._sparsematcher
859 sparsematch = self._sparsematcher
851 if sparsematch is not None and not sparsematch.always():
860 if sparsematch is not None and not sparsematch.always():
852 if not sparsematch(filename):
861 if not sparsematch(filename):
853 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
862 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
854 hint = _(
863 hint = _(
855 b'include file with `hg debugsparse --include <pattern>` or use '
864 b'include file with `hg debugsparse --include <pattern>` or use '
856 b'`hg add -s <file>` to include file directory while adding'
865 b'`hg add -s <file>` to include file directory while adding'
857 )
866 )
858 raise error.Abort(msg % filename, hint=hint)
867 raise error.Abort(msg % filename, hint=hint)
859
868
860 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
869 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
861 if exists is None:
870 if exists is None:
862 exists = os.path.lexists(os.path.join(self._root, path))
871 exists = os.path.lexists(os.path.join(self._root, path))
863 if not exists:
872 if not exists:
864 # Maybe a path component exists
873 # Maybe a path component exists
865 if not ignoremissing and b'/' in path:
874 if not ignoremissing and b'/' in path:
866 d, f = path.rsplit(b'/', 1)
875 d, f = path.rsplit(b'/', 1)
867 d = self._normalize(d, False, ignoremissing, None)
876 d = self._normalize(d, False, ignoremissing, None)
868 folded = d + b"/" + f
877 folded = d + b"/" + f
869 else:
878 else:
870 # No path components, preserve original case
879 # No path components, preserve original case
871 folded = path
880 folded = path
872 else:
881 else:
873 # recursively normalize leading directory components
882 # recursively normalize leading directory components
874 # against dirstate
883 # against dirstate
875 if b'/' in normed:
884 if b'/' in normed:
876 d, f = normed.rsplit(b'/', 1)
885 d, f = normed.rsplit(b'/', 1)
877 d = self._normalize(d, False, ignoremissing, True)
886 d = self._normalize(d, False, ignoremissing, True)
878 r = self._root + b"/" + d
887 r = self._root + b"/" + d
879 folded = d + b"/" + util.fspath(f, r)
888 folded = d + b"/" + util.fspath(f, r)
880 else:
889 else:
881 folded = util.fspath(normed, self._root)
890 folded = util.fspath(normed, self._root)
882 storemap[normed] = folded
891 storemap[normed] = folded
883
892
884 return folded
893 return folded
885
894
886 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
895 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
887 normed = util.normcase(path)
896 normed = util.normcase(path)
888 folded = self._map.filefoldmap.get(normed, None)
897 folded = self._map.filefoldmap.get(normed, None)
889 if folded is None:
898 if folded is None:
890 if isknown:
899 if isknown:
891 folded = path
900 folded = path
892 else:
901 else:
893 folded = self._discoverpath(
902 folded = self._discoverpath(
894 path, normed, ignoremissing, exists, self._map.filefoldmap
903 path, normed, ignoremissing, exists, self._map.filefoldmap
895 )
904 )
896 return folded
905 return folded
897
906
898 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
907 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
899 normed = util.normcase(path)
908 normed = util.normcase(path)
900 folded = self._map.filefoldmap.get(normed, None)
909 folded = self._map.filefoldmap.get(normed, None)
901 if folded is None:
910 if folded is None:
902 folded = self._map.dirfoldmap.get(normed, None)
911 folded = self._map.dirfoldmap.get(normed, None)
903 if folded is None:
912 if folded is None:
904 if isknown:
913 if isknown:
905 folded = path
914 folded = path
906 else:
915 else:
907 # store discovered result in dirfoldmap so that future
916 # store discovered result in dirfoldmap so that future
908 # normalizefile calls don't start matching directories
917 # normalizefile calls don't start matching directories
909 folded = self._discoverpath(
918 folded = self._discoverpath(
910 path, normed, ignoremissing, exists, self._map.dirfoldmap
919 path, normed, ignoremissing, exists, self._map.dirfoldmap
911 )
920 )
912 return folded
921 return folded
913
922
914 def normalize(self, path, isknown=False, ignoremissing=False):
923 def normalize(self, path, isknown=False, ignoremissing=False):
915 """
924 """
916 normalize the case of a pathname when on a casefolding filesystem
925 normalize the case of a pathname when on a casefolding filesystem
917
926
918 isknown specifies whether the filename came from walking the
927 isknown specifies whether the filename came from walking the
919 disk, to avoid extra filesystem access.
928 disk, to avoid extra filesystem access.
920
929
921 If ignoremissing is True, missing path are returned
930 If ignoremissing is True, missing path are returned
922 unchanged. Otherwise, we try harder to normalize possibly
931 unchanged. Otherwise, we try harder to normalize possibly
923 existing path components.
932 existing path components.
924
933
925 The normalized case is determined based on the following precedence:
934 The normalized case is determined based on the following precedence:
926
935
927 - version of name already stored in the dirstate
936 - version of name already stored in the dirstate
928 - version of name stored on disk
937 - version of name stored on disk
929 - version provided via command arguments
938 - version provided via command arguments
930 """
939 """
931
940
932 if self._checkcase:
941 if self._checkcase:
933 return self._normalize(path, isknown, ignoremissing)
942 return self._normalize(path, isknown, ignoremissing)
934 return path
943 return path
935
944
936 # XXX this method is barely used, as a result:
945 # XXX this method is barely used, as a result:
937 # - its semantic is unclear
946 # - its semantic is unclear
938 # - do we really needs it ?
947 # - do we really needs it ?
939 @requires_changing_parents
948 @requires_changing_parents
940 def clear(self):
949 def clear(self):
941 self._map.clear()
950 self._map.clear()
942 self._dirty = True
951 self._dirty = True
943
952
944 @requires_changing_parents
953 @requires_changing_parents
945 def rebuild(self, parent, allfiles, changedfiles=None):
954 def rebuild(self, parent, allfiles, changedfiles=None):
946 matcher = self._sparsematcher
955 matcher = self._sparsematcher
947 if matcher is not None and not matcher.always():
956 if matcher is not None and not matcher.always():
948 # should not add non-matching files
957 # should not add non-matching files
949 allfiles = [f for f in allfiles if matcher(f)]
958 allfiles = [f for f in allfiles if matcher(f)]
950 if changedfiles:
959 if changedfiles:
951 changedfiles = [f for f in changedfiles if matcher(f)]
960 changedfiles = [f for f in changedfiles if matcher(f)]
952
961
953 if changedfiles is not None:
962 if changedfiles is not None:
954 # these files will be deleted from the dirstate when they are
963 # these files will be deleted from the dirstate when they are
955 # not found to be in allfiles
964 # not found to be in allfiles
956 dirstatefilestoremove = {f for f in self if not matcher(f)}
965 dirstatefilestoremove = {f for f in self if not matcher(f)}
957 changedfiles = dirstatefilestoremove.union(changedfiles)
966 changedfiles = dirstatefilestoremove.union(changedfiles)
958
967
959 if changedfiles is None:
968 if changedfiles is None:
960 # Rebuild entire dirstate
969 # Rebuild entire dirstate
961 to_lookup = allfiles
970 to_lookup = allfiles
962 to_drop = []
971 to_drop = []
963 self.clear()
972 self.clear()
964 elif len(changedfiles) < 10:
973 elif len(changedfiles) < 10:
965 # Avoid turning allfiles into a set, which can be expensive if it's
974 # Avoid turning allfiles into a set, which can be expensive if it's
966 # large.
975 # large.
967 to_lookup = []
976 to_lookup = []
968 to_drop = []
977 to_drop = []
969 for f in changedfiles:
978 for f in changedfiles:
970 if f in allfiles:
979 if f in allfiles:
971 to_lookup.append(f)
980 to_lookup.append(f)
972 else:
981 else:
973 to_drop.append(f)
982 to_drop.append(f)
974 else:
983 else:
975 changedfilesset = set(changedfiles)
984 changedfilesset = set(changedfiles)
976 to_lookup = changedfilesset & set(allfiles)
985 to_lookup = changedfilesset & set(allfiles)
977 to_drop = changedfilesset - to_lookup
986 to_drop = changedfilesset - to_lookup
978
987
979 if self._origpl is None:
988 if self._origpl is None:
980 self._origpl = self._pl
989 self._origpl = self._pl
981 self._map.setparents(parent, self._nodeconstants.nullid)
990 self._map.setparents(parent, self._nodeconstants.nullid)
982
991
983 for f in to_lookup:
992 for f in to_lookup:
984 if self.in_merge:
993 if self.in_merge:
985 self.set_tracked(f)
994 self.set_tracked(f)
986 else:
995 else:
987 self._map.reset_state(
996 self._map.reset_state(
988 f,
997 f,
989 wc_tracked=True,
998 wc_tracked=True,
990 p1_tracked=True,
999 p1_tracked=True,
991 )
1000 )
992 for f in to_drop:
1001 for f in to_drop:
993 self._map.reset_state(f)
1002 self._map.reset_state(f)
994
1003
995 self._dirty = True
1004 self._dirty = True
996
1005
997 def identity(self):
1006 def identity(self):
998 """Return identity of dirstate itself to detect changing in storage
1007 """Return identity of dirstate itself to detect changing in storage
999
1008
1000 If identity of previous dirstate is equal to this, writing
1009 If identity of previous dirstate is equal to this, writing
1001 changes based on the former dirstate out can keep consistency.
1010 changes based on the former dirstate out can keep consistency.
1002 """
1011 """
1003 return self._map.identity
1012 return self._map.identity
1004
1013
1005 def write(self, tr):
1014 def write(self, tr):
1006 if not self._dirty:
1015 if not self._dirty:
1007 return
1016 return
1008 # make sure we don't request a write of invalidated content
1017 # make sure we don't request a write of invalidated content
1009 # XXX move before the dirty check once `unlock` stop calling `write`
1018 # XXX move before the dirty check once `unlock` stop calling `write`
1010 assert not self._invalidated_context
1019 assert not self._invalidated_context
1011
1020
1012 write_key = self._use_tracked_hint and self._dirty_tracked_set
1021 write_key = self._use_tracked_hint and self._dirty_tracked_set
1013 if tr:
1022 if tr:
1014
1023
1015 def on_abort(tr):
1024 def on_abort(tr):
1016 self._attached_to_a_transaction = False
1025 self._attached_to_a_transaction = False
1017 self.invalidate()
1026 self.invalidate()
1018
1027
1019 # make sure we invalidate the current change on abort
1028 # make sure we invalidate the current change on abort
1020 if tr is not None:
1029 if tr is not None:
1021 tr.addabort(
1030 tr.addabort(
1022 b'dirstate-invalidate%s' % self._tr_key_suffix,
1031 b'dirstate-invalidate%s' % self._tr_key_suffix,
1023 on_abort,
1032 on_abort,
1024 )
1033 )
1025
1034
1026 self._attached_to_a_transaction = True
1035 self._attached_to_a_transaction = True
1027
1036
1028 def on_success(f):
1037 def on_success(f):
1029 self._attached_to_a_transaction = False
1038 self._attached_to_a_transaction = False
1030 self._writedirstate(tr, f),
1039 self._writedirstate(tr, f),
1031
1040
1032 # delay writing in-memory changes out
1041 # delay writing in-memory changes out
1033 tr.addfilegenerator(
1042 tr.addfilegenerator(
1034 b'dirstate-1-main%s' % self._tr_key_suffix,
1043 b'dirstate-1-main%s' % self._tr_key_suffix,
1035 (self._filename,),
1044 (self._filename,),
1036 on_success,
1045 on_success,
1037 location=b'plain',
1046 location=b'plain',
1038 post_finalize=True,
1047 post_finalize=True,
1039 )
1048 )
1040 if write_key:
1049 if write_key:
1041 tr.addfilegenerator(
1050 tr.addfilegenerator(
1042 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1051 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1043 (self._filename_th,),
1052 (self._filename_th,),
1044 lambda f: self._write_tracked_hint(tr, f),
1053 lambda f: self._write_tracked_hint(tr, f),
1045 location=b'plain',
1054 location=b'plain',
1046 post_finalize=True,
1055 post_finalize=True,
1047 )
1056 )
1048 return
1057 return
1049
1058
1050 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1059 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1051 with file(self._filename) as f:
1060 with file(self._filename) as f:
1052 self._writedirstate(tr, f)
1061 self._writedirstate(tr, f)
1053 if write_key:
1062 if write_key:
1054 # we update the key-file after writing to make sure reader have a
1063 # we update the key-file after writing to make sure reader have a
1055 # key that match the newly written content
1064 # key that match the newly written content
1056 with file(self._filename_th) as f:
1065 with file(self._filename_th) as f:
1057 self._write_tracked_hint(tr, f)
1066 self._write_tracked_hint(tr, f)
1058
1067
1059 def delete_tracked_hint(self):
1068 def delete_tracked_hint(self):
1060 """remove the tracked_hint file
1069 """remove the tracked_hint file
1061
1070
1062 To be used by format downgrades operation"""
1071 To be used by format downgrades operation"""
1063 self._opener.unlink(self._filename_th)
1072 self._opener.unlink(self._filename_th)
1064 self._use_tracked_hint = False
1073 self._use_tracked_hint = False
1065
1074
1066 def addparentchangecallback(self, category, callback):
1075 def addparentchangecallback(self, category, callback):
1067 """add a callback to be called when the wd parents are changed
1076 """add a callback to be called when the wd parents are changed
1068
1077
1069 Callback will be called with the following arguments:
1078 Callback will be called with the following arguments:
1070 dirstate, (oldp1, oldp2), (newp1, newp2)
1079 dirstate, (oldp1, oldp2), (newp1, newp2)
1071
1080
1072 Category is a unique identifier to allow overwriting an old callback
1081 Category is a unique identifier to allow overwriting an old callback
1073 with a newer callback.
1082 with a newer callback.
1074 """
1083 """
1075 self._plchangecallbacks[category] = callback
1084 self._plchangecallbacks[category] = callback
1076
1085
1077 def _writedirstate(self, tr, st):
1086 def _writedirstate(self, tr, st):
1078 # make sure we don't write invalidated content
1087 # make sure we don't write invalidated content
1079 assert not self._invalidated_context
1088 assert not self._invalidated_context
1080 # notify callbacks about parents change
1089 # notify callbacks about parents change
1081 if self._origpl is not None and self._origpl != self._pl:
1090 if self._origpl is not None and self._origpl != self._pl:
1082 for c, callback in sorted(self._plchangecallbacks.items()):
1091 for c, callback in sorted(self._plchangecallbacks.items()):
1083 callback(self, self._origpl, self._pl)
1092 callback(self, self._origpl, self._pl)
1084 self._origpl = None
1093 self._origpl = None
1085 self._map.write(tr, st)
1094 self._map.write(tr, st)
1086 self._dirty = False
1095 self._dirty = False
1087 self._dirty_tracked_set = False
1096 self._dirty_tracked_set = False
1088
1097
1089 def _write_tracked_hint(self, tr, f):
1098 def _write_tracked_hint(self, tr, f):
1090 key = node.hex(uuid.uuid4().bytes)
1099 key = node.hex(uuid.uuid4().bytes)
1091 f.write(b"1\n%s\n" % key) # 1 is the format version
1100 f.write(b"1\n%s\n" % key) # 1 is the format version
1092
1101
1093 def _dirignore(self, f):
1102 def _dirignore(self, f):
1094 if self._ignore(f):
1103 if self._ignore(f):
1095 return True
1104 return True
1096 for p in pathutil.finddirs(f):
1105 for p in pathutil.finddirs(f):
1097 if self._ignore(p):
1106 if self._ignore(p):
1098 return True
1107 return True
1099 return False
1108 return False
1100
1109
1101 def _ignorefiles(self):
1110 def _ignorefiles(self):
1102 files = []
1111 files = []
1103 if os.path.exists(self._join(b'.hgignore')):
1112 if os.path.exists(self._join(b'.hgignore')):
1104 files.append(self._join(b'.hgignore'))
1113 files.append(self._join(b'.hgignore'))
1105 for name, path in self._ui.configitems(b"ui"):
1114 for name, path in self._ui.configitems(b"ui"):
1106 if name == b'ignore' or name.startswith(b'ignore.'):
1115 if name == b'ignore' or name.startswith(b'ignore.'):
1107 # we need to use os.path.join here rather than self._join
1116 # we need to use os.path.join here rather than self._join
1108 # because path is arbitrary and user-specified
1117 # because path is arbitrary and user-specified
1109 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1118 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1110 return files
1119 return files
1111
1120
1112 def _ignorefileandline(self, f):
1121 def _ignorefileandline(self, f):
1113 files = collections.deque(self._ignorefiles())
1122 files = collections.deque(self._ignorefiles())
1114 visited = set()
1123 visited = set()
1115 while files:
1124 while files:
1116 i = files.popleft()
1125 i = files.popleft()
1117 patterns = matchmod.readpatternfile(
1126 patterns = matchmod.readpatternfile(
1118 i, self._ui.warn, sourceinfo=True
1127 i, self._ui.warn, sourceinfo=True
1119 )
1128 )
1120 for pattern, lineno, line in patterns:
1129 for pattern, lineno, line in patterns:
1121 kind, p = matchmod._patsplit(pattern, b'glob')
1130 kind, p = matchmod._patsplit(pattern, b'glob')
1122 if kind == b"subinclude":
1131 if kind == b"subinclude":
1123 if p not in visited:
1132 if p not in visited:
1124 files.append(p)
1133 files.append(p)
1125 continue
1134 continue
1126 m = matchmod.match(
1135 m = matchmod.match(
1127 self._root, b'', [], [pattern], warn=self._ui.warn
1136 self._root, b'', [], [pattern], warn=self._ui.warn
1128 )
1137 )
1129 if m(f):
1138 if m(f):
1130 return (i, lineno, line)
1139 return (i, lineno, line)
1131 visited.add(i)
1140 visited.add(i)
1132 return (None, -1, b"")
1141 return (None, -1, b"")
1133
1142
1134 def _walkexplicit(self, match, subrepos):
1143 def _walkexplicit(self, match, subrepos):
1135 """Get stat data about the files explicitly specified by match.
1144 """Get stat data about the files explicitly specified by match.
1136
1145
1137 Return a triple (results, dirsfound, dirsnotfound).
1146 Return a triple (results, dirsfound, dirsnotfound).
1138 - results is a mapping from filename to stat result. It also contains
1147 - results is a mapping from filename to stat result. It also contains
1139 listings mapping subrepos and .hg to None.
1148 listings mapping subrepos and .hg to None.
1140 - dirsfound is a list of files found to be directories.
1149 - dirsfound is a list of files found to be directories.
1141 - dirsnotfound is a list of files that the dirstate thinks are
1150 - dirsnotfound is a list of files that the dirstate thinks are
1142 directories and that were not found."""
1151 directories and that were not found."""
1143
1152
1144 def badtype(mode):
1153 def badtype(mode):
1145 kind = _(b'unknown')
1154 kind = _(b'unknown')
1146 if stat.S_ISCHR(mode):
1155 if stat.S_ISCHR(mode):
1147 kind = _(b'character device')
1156 kind = _(b'character device')
1148 elif stat.S_ISBLK(mode):
1157 elif stat.S_ISBLK(mode):
1149 kind = _(b'block device')
1158 kind = _(b'block device')
1150 elif stat.S_ISFIFO(mode):
1159 elif stat.S_ISFIFO(mode):
1151 kind = _(b'fifo')
1160 kind = _(b'fifo')
1152 elif stat.S_ISSOCK(mode):
1161 elif stat.S_ISSOCK(mode):
1153 kind = _(b'socket')
1162 kind = _(b'socket')
1154 elif stat.S_ISDIR(mode):
1163 elif stat.S_ISDIR(mode):
1155 kind = _(b'directory')
1164 kind = _(b'directory')
1156 return _(b'unsupported file type (type is %s)') % kind
1165 return _(b'unsupported file type (type is %s)') % kind
1157
1166
1158 badfn = match.bad
1167 badfn = match.bad
1159 dmap = self._map
1168 dmap = self._map
1160 lstat = os.lstat
1169 lstat = os.lstat
1161 getkind = stat.S_IFMT
1170 getkind = stat.S_IFMT
1162 dirkind = stat.S_IFDIR
1171 dirkind = stat.S_IFDIR
1163 regkind = stat.S_IFREG
1172 regkind = stat.S_IFREG
1164 lnkkind = stat.S_IFLNK
1173 lnkkind = stat.S_IFLNK
1165 join = self._join
1174 join = self._join
1166 dirsfound = []
1175 dirsfound = []
1167 foundadd = dirsfound.append
1176 foundadd = dirsfound.append
1168 dirsnotfound = []
1177 dirsnotfound = []
1169 notfoundadd = dirsnotfound.append
1178 notfoundadd = dirsnotfound.append
1170
1179
1171 if not match.isexact() and self._checkcase:
1180 if not match.isexact() and self._checkcase:
1172 normalize = self._normalize
1181 normalize = self._normalize
1173 else:
1182 else:
1174 normalize = None
1183 normalize = None
1175
1184
1176 files = sorted(match.files())
1185 files = sorted(match.files())
1177 subrepos.sort()
1186 subrepos.sort()
1178 i, j = 0, 0
1187 i, j = 0, 0
1179 while i < len(files) and j < len(subrepos):
1188 while i < len(files) and j < len(subrepos):
1180 subpath = subrepos[j] + b"/"
1189 subpath = subrepos[j] + b"/"
1181 if files[i] < subpath:
1190 if files[i] < subpath:
1182 i += 1
1191 i += 1
1183 continue
1192 continue
1184 while i < len(files) and files[i].startswith(subpath):
1193 while i < len(files) and files[i].startswith(subpath):
1185 del files[i]
1194 del files[i]
1186 j += 1
1195 j += 1
1187
1196
1188 if not files or b'' in files:
1197 if not files or b'' in files:
1189 files = [b'']
1198 files = [b'']
1190 # constructing the foldmap is expensive, so don't do it for the
1199 # constructing the foldmap is expensive, so don't do it for the
1191 # common case where files is ['']
1200 # common case where files is ['']
1192 normalize = None
1201 normalize = None
1193 results = dict.fromkeys(subrepos)
1202 results = dict.fromkeys(subrepos)
1194 results[b'.hg'] = None
1203 results[b'.hg'] = None
1195
1204
1196 for ff in files:
1205 for ff in files:
1197 if normalize:
1206 if normalize:
1198 nf = normalize(ff, False, True)
1207 nf = normalize(ff, False, True)
1199 else:
1208 else:
1200 nf = ff
1209 nf = ff
1201 if nf in results:
1210 if nf in results:
1202 continue
1211 continue
1203
1212
1204 try:
1213 try:
1205 st = lstat(join(nf))
1214 st = lstat(join(nf))
1206 kind = getkind(st.st_mode)
1215 kind = getkind(st.st_mode)
1207 if kind == dirkind:
1216 if kind == dirkind:
1208 if nf in dmap:
1217 if nf in dmap:
1209 # file replaced by dir on disk but still in dirstate
1218 # file replaced by dir on disk but still in dirstate
1210 results[nf] = None
1219 results[nf] = None
1211 foundadd((nf, ff))
1220 foundadd((nf, ff))
1212 elif kind == regkind or kind == lnkkind:
1221 elif kind == regkind or kind == lnkkind:
1213 results[nf] = st
1222 results[nf] = st
1214 else:
1223 else:
1215 badfn(ff, badtype(kind))
1224 badfn(ff, badtype(kind))
1216 if nf in dmap:
1225 if nf in dmap:
1217 results[nf] = None
1226 results[nf] = None
1218 except (OSError) as inst:
1227 except (OSError) as inst:
1219 # nf not found on disk - it is dirstate only
1228 # nf not found on disk - it is dirstate only
1220 if nf in dmap: # does it exactly match a missing file?
1229 if nf in dmap: # does it exactly match a missing file?
1221 results[nf] = None
1230 results[nf] = None
1222 else: # does it match a missing directory?
1231 else: # does it match a missing directory?
1223 if self._map.hasdir(nf):
1232 if self._map.hasdir(nf):
1224 notfoundadd(nf)
1233 notfoundadd(nf)
1225 else:
1234 else:
1226 badfn(ff, encoding.strtolocal(inst.strerror))
1235 badfn(ff, encoding.strtolocal(inst.strerror))
1227
1236
1228 # match.files() may contain explicitly-specified paths that shouldn't
1237 # match.files() may contain explicitly-specified paths that shouldn't
1229 # be taken; drop them from the list of files found. dirsfound/notfound
1238 # be taken; drop them from the list of files found. dirsfound/notfound
1230 # aren't filtered here because they will be tested later.
1239 # aren't filtered here because they will be tested later.
1231 if match.anypats():
1240 if match.anypats():
1232 for f in list(results):
1241 for f in list(results):
1233 if f == b'.hg' or f in subrepos:
1242 if f == b'.hg' or f in subrepos:
1234 # keep sentinel to disable further out-of-repo walks
1243 # keep sentinel to disable further out-of-repo walks
1235 continue
1244 continue
1236 if not match(f):
1245 if not match(f):
1237 del results[f]
1246 del results[f]
1238
1247
1239 # Case insensitive filesystems cannot rely on lstat() failing to detect
1248 # Case insensitive filesystems cannot rely on lstat() failing to detect
1240 # a case-only rename. Prune the stat object for any file that does not
1249 # a case-only rename. Prune the stat object for any file that does not
1241 # match the case in the filesystem, if there are multiple files that
1250 # match the case in the filesystem, if there are multiple files that
1242 # normalize to the same path.
1251 # normalize to the same path.
1243 if match.isexact() and self._checkcase:
1252 if match.isexact() and self._checkcase:
1244 normed = {}
1253 normed = {}
1245
1254
1246 for f, st in results.items():
1255 for f, st in results.items():
1247 if st is None:
1256 if st is None:
1248 continue
1257 continue
1249
1258
1250 nc = util.normcase(f)
1259 nc = util.normcase(f)
1251 paths = normed.get(nc)
1260 paths = normed.get(nc)
1252
1261
1253 if paths is None:
1262 if paths is None:
1254 paths = set()
1263 paths = set()
1255 normed[nc] = paths
1264 normed[nc] = paths
1256
1265
1257 paths.add(f)
1266 paths.add(f)
1258
1267
1259 for norm, paths in normed.items():
1268 for norm, paths in normed.items():
1260 if len(paths) > 1:
1269 if len(paths) > 1:
1261 for path in paths:
1270 for path in paths:
1262 folded = self._discoverpath(
1271 folded = self._discoverpath(
1263 path, norm, True, None, self._map.dirfoldmap
1272 path, norm, True, None, self._map.dirfoldmap
1264 )
1273 )
1265 if path != folded:
1274 if path != folded:
1266 results[path] = None
1275 results[path] = None
1267
1276
1268 return results, dirsfound, dirsnotfound
1277 return results, dirsfound, dirsnotfound
1269
1278
1270 def walk(self, match, subrepos, unknown, ignored, full=True):
1279 def walk(self, match, subrepos, unknown, ignored, full=True):
1271 """
1280 """
1272 Walk recursively through the directory tree, finding all files
1281 Walk recursively through the directory tree, finding all files
1273 matched by match.
1282 matched by match.
1274
1283
1275 If full is False, maybe skip some known-clean files.
1284 If full is False, maybe skip some known-clean files.
1276
1285
1277 Return a dict mapping filename to stat-like object (either
1286 Return a dict mapping filename to stat-like object (either
1278 mercurial.osutil.stat instance or return value of os.stat()).
1287 mercurial.osutil.stat instance or return value of os.stat()).
1279
1288
1280 """
1289 """
1281 # full is a flag that extensions that hook into walk can use -- this
1290 # full is a flag that extensions that hook into walk can use -- this
1282 # implementation doesn't use it at all. This satisfies the contract
1291 # implementation doesn't use it at all. This satisfies the contract
1283 # because we only guarantee a "maybe".
1292 # because we only guarantee a "maybe".
1284
1293
1285 if ignored:
1294 if ignored:
1286 ignore = util.never
1295 ignore = util.never
1287 dirignore = util.never
1296 dirignore = util.never
1288 elif unknown:
1297 elif unknown:
1289 ignore = self._ignore
1298 ignore = self._ignore
1290 dirignore = self._dirignore
1299 dirignore = self._dirignore
1291 else:
1300 else:
1292 # if not unknown and not ignored, drop dir recursion and step 2
1301 # if not unknown and not ignored, drop dir recursion and step 2
1293 ignore = util.always
1302 ignore = util.always
1294 dirignore = util.always
1303 dirignore = util.always
1295
1304
1296 if self._sparsematchfn is not None:
1305 if self._sparsematchfn is not None:
1297 em = matchmod.exact(match.files())
1306 em = matchmod.exact(match.files())
1298 sm = matchmod.unionmatcher([self._sparsematcher, em])
1307 sm = matchmod.unionmatcher([self._sparsematcher, em])
1299 match = matchmod.intersectmatchers(match, sm)
1308 match = matchmod.intersectmatchers(match, sm)
1300
1309
1301 matchfn = match.matchfn
1310 matchfn = match.matchfn
1302 matchalways = match.always()
1311 matchalways = match.always()
1303 matchtdir = match.traversedir
1312 matchtdir = match.traversedir
1304 dmap = self._map
1313 dmap = self._map
1305 listdir = util.listdir
1314 listdir = util.listdir
1306 lstat = os.lstat
1315 lstat = os.lstat
1307 dirkind = stat.S_IFDIR
1316 dirkind = stat.S_IFDIR
1308 regkind = stat.S_IFREG
1317 regkind = stat.S_IFREG
1309 lnkkind = stat.S_IFLNK
1318 lnkkind = stat.S_IFLNK
1310 join = self._join
1319 join = self._join
1311
1320
1312 exact = skipstep3 = False
1321 exact = skipstep3 = False
1313 if match.isexact(): # match.exact
1322 if match.isexact(): # match.exact
1314 exact = True
1323 exact = True
1315 dirignore = util.always # skip step 2
1324 dirignore = util.always # skip step 2
1316 elif match.prefix(): # match.match, no patterns
1325 elif match.prefix(): # match.match, no patterns
1317 skipstep3 = True
1326 skipstep3 = True
1318
1327
1319 if not exact and self._checkcase:
1328 if not exact and self._checkcase:
1320 normalize = self._normalize
1329 normalize = self._normalize
1321 normalizefile = self._normalizefile
1330 normalizefile = self._normalizefile
1322 skipstep3 = False
1331 skipstep3 = False
1323 else:
1332 else:
1324 normalize = self._normalize
1333 normalize = self._normalize
1325 normalizefile = None
1334 normalizefile = None
1326
1335
1327 # step 1: find all explicit files
1336 # step 1: find all explicit files
1328 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1337 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1329 if matchtdir:
1338 if matchtdir:
1330 for d in work:
1339 for d in work:
1331 matchtdir(d[0])
1340 matchtdir(d[0])
1332 for d in dirsnotfound:
1341 for d in dirsnotfound:
1333 matchtdir(d)
1342 matchtdir(d)
1334
1343
1335 skipstep3 = skipstep3 and not (work or dirsnotfound)
1344 skipstep3 = skipstep3 and not (work or dirsnotfound)
1336 work = [d for d in work if not dirignore(d[0])]
1345 work = [d for d in work if not dirignore(d[0])]
1337
1346
1338 # step 2: visit subdirectories
1347 # step 2: visit subdirectories
1339 def traverse(work, alreadynormed):
1348 def traverse(work, alreadynormed):
1340 wadd = work.append
1349 wadd = work.append
1341 while work:
1350 while work:
1342 tracing.counter('dirstate.walk work', len(work))
1351 tracing.counter('dirstate.walk work', len(work))
1343 nd = work.pop()
1352 nd = work.pop()
1344 visitentries = match.visitchildrenset(nd)
1353 visitentries = match.visitchildrenset(nd)
1345 if not visitentries:
1354 if not visitentries:
1346 continue
1355 continue
1347 if visitentries == b'this' or visitentries == b'all':
1356 if visitentries == b'this' or visitentries == b'all':
1348 visitentries = None
1357 visitentries = None
1349 skip = None
1358 skip = None
1350 if nd != b'':
1359 if nd != b'':
1351 skip = b'.hg'
1360 skip = b'.hg'
1352 try:
1361 try:
1353 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1362 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1354 entries = listdir(join(nd), stat=True, skip=skip)
1363 entries = listdir(join(nd), stat=True, skip=skip)
1355 except (PermissionError, FileNotFoundError) as inst:
1364 except (PermissionError, FileNotFoundError) as inst:
1356 match.bad(
1365 match.bad(
1357 self.pathto(nd), encoding.strtolocal(inst.strerror)
1366 self.pathto(nd), encoding.strtolocal(inst.strerror)
1358 )
1367 )
1359 continue
1368 continue
1360 for f, kind, st in entries:
1369 for f, kind, st in entries:
1361 # Some matchers may return files in the visitentries set,
1370 # Some matchers may return files in the visitentries set,
1362 # instead of 'this', if the matcher explicitly mentions them
1371 # instead of 'this', if the matcher explicitly mentions them
1363 # and is not an exactmatcher. This is acceptable; we do not
1372 # and is not an exactmatcher. This is acceptable; we do not
1364 # make any hard assumptions about file-or-directory below
1373 # make any hard assumptions about file-or-directory below
1365 # based on the presence of `f` in visitentries. If
1374 # based on the presence of `f` in visitentries. If
1366 # visitchildrenset returned a set, we can always skip the
1375 # visitchildrenset returned a set, we can always skip the
1367 # entries *not* in the set it provided regardless of whether
1376 # entries *not* in the set it provided regardless of whether
1368 # they're actually a file or a directory.
1377 # they're actually a file or a directory.
1369 if visitentries and f not in visitentries:
1378 if visitentries and f not in visitentries:
1370 continue
1379 continue
1371 if normalizefile:
1380 if normalizefile:
1372 # even though f might be a directory, we're only
1381 # even though f might be a directory, we're only
1373 # interested in comparing it to files currently in the
1382 # interested in comparing it to files currently in the
1374 # dmap -- therefore normalizefile is enough
1383 # dmap -- therefore normalizefile is enough
1375 nf = normalizefile(
1384 nf = normalizefile(
1376 nd and (nd + b"/" + f) or f, True, True
1385 nd and (nd + b"/" + f) or f, True, True
1377 )
1386 )
1378 else:
1387 else:
1379 nf = nd and (nd + b"/" + f) or f
1388 nf = nd and (nd + b"/" + f) or f
1380 if nf not in results:
1389 if nf not in results:
1381 if kind == dirkind:
1390 if kind == dirkind:
1382 if not ignore(nf):
1391 if not ignore(nf):
1383 if matchtdir:
1392 if matchtdir:
1384 matchtdir(nf)
1393 matchtdir(nf)
1385 wadd(nf)
1394 wadd(nf)
1386 if nf in dmap and (matchalways or matchfn(nf)):
1395 if nf in dmap and (matchalways or matchfn(nf)):
1387 results[nf] = None
1396 results[nf] = None
1388 elif kind == regkind or kind == lnkkind:
1397 elif kind == regkind or kind == lnkkind:
1389 if nf in dmap:
1398 if nf in dmap:
1390 if matchalways or matchfn(nf):
1399 if matchalways or matchfn(nf):
1391 results[nf] = st
1400 results[nf] = st
1392 elif (matchalways or matchfn(nf)) and not ignore(
1401 elif (matchalways or matchfn(nf)) and not ignore(
1393 nf
1402 nf
1394 ):
1403 ):
1395 # unknown file -- normalize if necessary
1404 # unknown file -- normalize if necessary
1396 if not alreadynormed:
1405 if not alreadynormed:
1397 nf = normalize(nf, False, True)
1406 nf = normalize(nf, False, True)
1398 results[nf] = st
1407 results[nf] = st
1399 elif nf in dmap and (matchalways or matchfn(nf)):
1408 elif nf in dmap and (matchalways or matchfn(nf)):
1400 results[nf] = None
1409 results[nf] = None
1401
1410
1402 for nd, d in work:
1411 for nd, d in work:
1403 # alreadynormed means that processwork doesn't have to do any
1412 # alreadynormed means that processwork doesn't have to do any
1404 # expensive directory normalization
1413 # expensive directory normalization
1405 alreadynormed = not normalize or nd == d
1414 alreadynormed = not normalize or nd == d
1406 traverse([d], alreadynormed)
1415 traverse([d], alreadynormed)
1407
1416
1408 for s in subrepos:
1417 for s in subrepos:
1409 del results[s]
1418 del results[s]
1410 del results[b'.hg']
1419 del results[b'.hg']
1411
1420
1412 # step 3: visit remaining files from dmap
1421 # step 3: visit remaining files from dmap
1413 if not skipstep3 and not exact:
1422 if not skipstep3 and not exact:
1414 # If a dmap file is not in results yet, it was either
1423 # If a dmap file is not in results yet, it was either
1415 # a) not matching matchfn b) ignored, c) missing, or d) under a
1424 # a) not matching matchfn b) ignored, c) missing, or d) under a
1416 # symlink directory.
1425 # symlink directory.
1417 if not results and matchalways:
1426 if not results and matchalways:
1418 visit = [f for f in dmap]
1427 visit = [f for f in dmap]
1419 else:
1428 else:
1420 visit = [f for f in dmap if f not in results and matchfn(f)]
1429 visit = [f for f in dmap if f not in results and matchfn(f)]
1421 visit.sort()
1430 visit.sort()
1422
1431
1423 if unknown:
1432 if unknown:
1424 # unknown == True means we walked all dirs under the roots
1433 # unknown == True means we walked all dirs under the roots
1425 # that wasn't ignored, and everything that matched was stat'ed
1434 # that wasn't ignored, and everything that matched was stat'ed
1426 # and is already in results.
1435 # and is already in results.
1427 # The rest must thus be ignored or under a symlink.
1436 # The rest must thus be ignored or under a symlink.
1428 audit_path = pathutil.pathauditor(self._root, cached=True)
1437 audit_path = pathutil.pathauditor(self._root, cached=True)
1429
1438
1430 for nf in iter(visit):
1439 for nf in iter(visit):
1431 # If a stat for the same file was already added with a
1440 # If a stat for the same file was already added with a
1432 # different case, don't add one for this, since that would
1441 # different case, don't add one for this, since that would
1433 # make it appear as if the file exists under both names
1442 # make it appear as if the file exists under both names
1434 # on disk.
1443 # on disk.
1435 if (
1444 if (
1436 normalizefile
1445 normalizefile
1437 and normalizefile(nf, True, True) in results
1446 and normalizefile(nf, True, True) in results
1438 ):
1447 ):
1439 results[nf] = None
1448 results[nf] = None
1440 # Report ignored items in the dmap as long as they are not
1449 # Report ignored items in the dmap as long as they are not
1441 # under a symlink directory.
1450 # under a symlink directory.
1442 elif audit_path.check(nf):
1451 elif audit_path.check(nf):
1443 try:
1452 try:
1444 results[nf] = lstat(join(nf))
1453 results[nf] = lstat(join(nf))
1445 # file was just ignored, no links, and exists
1454 # file was just ignored, no links, and exists
1446 except OSError:
1455 except OSError:
1447 # file doesn't exist
1456 # file doesn't exist
1448 results[nf] = None
1457 results[nf] = None
1449 else:
1458 else:
1450 # It's either missing or under a symlink directory
1459 # It's either missing or under a symlink directory
1451 # which we in this case report as missing
1460 # which we in this case report as missing
1452 results[nf] = None
1461 results[nf] = None
1453 else:
1462 else:
1454 # We may not have walked the full directory tree above,
1463 # We may not have walked the full directory tree above,
1455 # so stat and check everything we missed.
1464 # so stat and check everything we missed.
1456 iv = iter(visit)
1465 iv = iter(visit)
1457 for st in util.statfiles([join(i) for i in visit]):
1466 for st in util.statfiles([join(i) for i in visit]):
1458 results[next(iv)] = st
1467 results[next(iv)] = st
1459 return results
1468 return results
1460
1469
1461 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1470 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1462 if self._sparsematchfn is not None:
1471 if self._sparsematchfn is not None:
1463 em = matchmod.exact(matcher.files())
1472 em = matchmod.exact(matcher.files())
1464 sm = matchmod.unionmatcher([self._sparsematcher, em])
1473 sm = matchmod.unionmatcher([self._sparsematcher, em])
1465 matcher = matchmod.intersectmatchers(matcher, sm)
1474 matcher = matchmod.intersectmatchers(matcher, sm)
1466 # Force Rayon (Rust parallelism library) to respect the number of
1475 # Force Rayon (Rust parallelism library) to respect the number of
1467 # workers. This is a temporary workaround until Rust code knows
1476 # workers. This is a temporary workaround until Rust code knows
1468 # how to read the config file.
1477 # how to read the config file.
1469 numcpus = self._ui.configint(b"worker", b"numcpus")
1478 numcpus = self._ui.configint(b"worker", b"numcpus")
1470 if numcpus is not None:
1479 if numcpus is not None:
1471 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1480 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1472
1481
1473 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1482 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1474 if not workers_enabled:
1483 if not workers_enabled:
1475 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1484 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1476
1485
1477 (
1486 (
1478 lookup,
1487 lookup,
1479 modified,
1488 modified,
1480 added,
1489 added,
1481 removed,
1490 removed,
1482 deleted,
1491 deleted,
1483 clean,
1492 clean,
1484 ignored,
1493 ignored,
1485 unknown,
1494 unknown,
1486 warnings,
1495 warnings,
1487 bad,
1496 bad,
1488 traversed,
1497 traversed,
1489 dirty,
1498 dirty,
1490 ) = rustmod.status(
1499 ) = rustmod.status(
1491 self._map._map,
1500 self._map._map,
1492 matcher,
1501 matcher,
1493 self._rootdir,
1502 self._rootdir,
1494 self._ignorefiles(),
1503 self._ignorefiles(),
1495 self._checkexec,
1504 self._checkexec,
1496 bool(list_clean),
1505 bool(list_clean),
1497 bool(list_ignored),
1506 bool(list_ignored),
1498 bool(list_unknown),
1507 bool(list_unknown),
1499 bool(matcher.traversedir),
1508 bool(matcher.traversedir),
1500 )
1509 )
1501
1510
1502 self._dirty |= dirty
1511 self._dirty |= dirty
1503
1512
1504 if matcher.traversedir:
1513 if matcher.traversedir:
1505 for dir in traversed:
1514 for dir in traversed:
1506 matcher.traversedir(dir)
1515 matcher.traversedir(dir)
1507
1516
1508 if self._ui.warn:
1517 if self._ui.warn:
1509 for item in warnings:
1518 for item in warnings:
1510 if isinstance(item, tuple):
1519 if isinstance(item, tuple):
1511 file_path, syntax = item
1520 file_path, syntax = item
1512 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1521 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1513 file_path,
1522 file_path,
1514 syntax,
1523 syntax,
1515 )
1524 )
1516 self._ui.warn(msg)
1525 self._ui.warn(msg)
1517 else:
1526 else:
1518 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1527 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1519 self._ui.warn(
1528 self._ui.warn(
1520 msg
1529 msg
1521 % (
1530 % (
1522 pathutil.canonpath(
1531 pathutil.canonpath(
1523 self._rootdir, self._rootdir, item
1532 self._rootdir, self._rootdir, item
1524 ),
1533 ),
1525 b"No such file or directory",
1534 b"No such file or directory",
1526 )
1535 )
1527 )
1536 )
1528
1537
1529 for fn, message in bad:
1538 for fn, message in bad:
1530 matcher.bad(fn, encoding.strtolocal(message))
1539 matcher.bad(fn, encoding.strtolocal(message))
1531
1540
1532 status = scmutil.status(
1541 status = scmutil.status(
1533 modified=modified,
1542 modified=modified,
1534 added=added,
1543 added=added,
1535 removed=removed,
1544 removed=removed,
1536 deleted=deleted,
1545 deleted=deleted,
1537 unknown=unknown,
1546 unknown=unknown,
1538 ignored=ignored,
1547 ignored=ignored,
1539 clean=clean,
1548 clean=clean,
1540 )
1549 )
1541 return (lookup, status)
1550 return (lookup, status)
1542
1551
1543 # XXX since this can make the dirstate dirty (through rust), we should
1552 # XXX since this can make the dirstate dirty (through rust), we should
1544 # enforce that it is done withing an appropriate change-context that scope
1553 # enforce that it is done withing an appropriate change-context that scope
1545 # the change and ensure it eventually get written on disk (or rolled back)
1554 # the change and ensure it eventually get written on disk (or rolled back)
1546 def status(self, match, subrepos, ignored, clean, unknown):
1555 def status(self, match, subrepos, ignored, clean, unknown):
1547 """Determine the status of the working copy relative to the
1556 """Determine the status of the working copy relative to the
1548 dirstate and return a pair of (unsure, status), where status is of type
1557 dirstate and return a pair of (unsure, status), where status is of type
1549 scmutil.status and:
1558 scmutil.status and:
1550
1559
1551 unsure:
1560 unsure:
1552 files that might have been modified since the dirstate was
1561 files that might have been modified since the dirstate was
1553 written, but need to be read to be sure (size is the same
1562 written, but need to be read to be sure (size is the same
1554 but mtime differs)
1563 but mtime differs)
1555 status.modified:
1564 status.modified:
1556 files that have definitely been modified since the dirstate
1565 files that have definitely been modified since the dirstate
1557 was written (different size or mode)
1566 was written (different size or mode)
1558 status.clean:
1567 status.clean:
1559 files that have definitely not been modified since the
1568 files that have definitely not been modified since the
1560 dirstate was written
1569 dirstate was written
1561 """
1570 """
1562 listignored, listclean, listunknown = ignored, clean, unknown
1571 listignored, listclean, listunknown = ignored, clean, unknown
1563 lookup, modified, added, unknown, ignored = [], [], [], [], []
1572 lookup, modified, added, unknown, ignored = [], [], [], [], []
1564 removed, deleted, clean = [], [], []
1573 removed, deleted, clean = [], [], []
1565
1574
1566 dmap = self._map
1575 dmap = self._map
1567 dmap.preload()
1576 dmap.preload()
1568
1577
1569 use_rust = True
1578 use_rust = True
1570
1579
1571 allowed_matchers = (
1580 allowed_matchers = (
1572 matchmod.alwaysmatcher,
1581 matchmod.alwaysmatcher,
1573 matchmod.differencematcher,
1582 matchmod.differencematcher,
1574 matchmod.exactmatcher,
1583 matchmod.exactmatcher,
1575 matchmod.includematcher,
1584 matchmod.includematcher,
1576 matchmod.intersectionmatcher,
1585 matchmod.intersectionmatcher,
1577 matchmod.nevermatcher,
1586 matchmod.nevermatcher,
1578 matchmod.unionmatcher,
1587 matchmod.unionmatcher,
1579 )
1588 )
1580
1589
1581 if rustmod is None:
1590 if rustmod is None:
1582 use_rust = False
1591 use_rust = False
1583 elif self._checkcase:
1592 elif self._checkcase:
1584 # Case-insensitive filesystems are not handled yet
1593 # Case-insensitive filesystems are not handled yet
1585 use_rust = False
1594 use_rust = False
1586 elif subrepos:
1595 elif subrepos:
1587 use_rust = False
1596 use_rust = False
1588 elif not isinstance(match, allowed_matchers):
1597 elif not isinstance(match, allowed_matchers):
1589 # Some matchers have yet to be implemented
1598 # Some matchers have yet to be implemented
1590 use_rust = False
1599 use_rust = False
1591
1600
1592 # Get the time from the filesystem so we can disambiguate files that
1601 # Get the time from the filesystem so we can disambiguate files that
1593 # appear modified in the present or future.
1602 # appear modified in the present or future.
1594 try:
1603 try:
1595 mtime_boundary = timestamp.get_fs_now(self._opener)
1604 mtime_boundary = timestamp.get_fs_now(self._opener)
1596 except OSError:
1605 except OSError:
1597 # In largefiles or readonly context
1606 # In largefiles or readonly context
1598 mtime_boundary = None
1607 mtime_boundary = None
1599
1608
1600 if use_rust:
1609 if use_rust:
1601 try:
1610 try:
1602 res = self._rust_status(
1611 res = self._rust_status(
1603 match, listclean, listignored, listunknown
1612 match, listclean, listignored, listunknown
1604 )
1613 )
1605 return res + (mtime_boundary,)
1614 return res + (mtime_boundary,)
1606 except rustmod.FallbackError:
1615 except rustmod.FallbackError:
1607 pass
1616 pass
1608
1617
1609 def noop(f):
1618 def noop(f):
1610 pass
1619 pass
1611
1620
1612 dcontains = dmap.__contains__
1621 dcontains = dmap.__contains__
1613 dget = dmap.__getitem__
1622 dget = dmap.__getitem__
1614 ladd = lookup.append # aka "unsure"
1623 ladd = lookup.append # aka "unsure"
1615 madd = modified.append
1624 madd = modified.append
1616 aadd = added.append
1625 aadd = added.append
1617 uadd = unknown.append if listunknown else noop
1626 uadd = unknown.append if listunknown else noop
1618 iadd = ignored.append if listignored else noop
1627 iadd = ignored.append if listignored else noop
1619 radd = removed.append
1628 radd = removed.append
1620 dadd = deleted.append
1629 dadd = deleted.append
1621 cadd = clean.append if listclean else noop
1630 cadd = clean.append if listclean else noop
1622 mexact = match.exact
1631 mexact = match.exact
1623 dirignore = self._dirignore
1632 dirignore = self._dirignore
1624 checkexec = self._checkexec
1633 checkexec = self._checkexec
1625 checklink = self._checklink
1634 checklink = self._checklink
1626 copymap = self._map.copymap
1635 copymap = self._map.copymap
1627
1636
1628 # We need to do full walks when either
1637 # We need to do full walks when either
1629 # - we're listing all clean files, or
1638 # - we're listing all clean files, or
1630 # - match.traversedir does something, because match.traversedir should
1639 # - match.traversedir does something, because match.traversedir should
1631 # be called for every dir in the working dir
1640 # be called for every dir in the working dir
1632 full = listclean or match.traversedir is not None
1641 full = listclean or match.traversedir is not None
1633 for fn, st in self.walk(
1642 for fn, st in self.walk(
1634 match, subrepos, listunknown, listignored, full=full
1643 match, subrepos, listunknown, listignored, full=full
1635 ).items():
1644 ).items():
1636 if not dcontains(fn):
1645 if not dcontains(fn):
1637 if (listignored or mexact(fn)) and dirignore(fn):
1646 if (listignored or mexact(fn)) and dirignore(fn):
1638 if listignored:
1647 if listignored:
1639 iadd(fn)
1648 iadd(fn)
1640 else:
1649 else:
1641 uadd(fn)
1650 uadd(fn)
1642 continue
1651 continue
1643
1652
1644 t = dget(fn)
1653 t = dget(fn)
1645 mode = t.mode
1654 mode = t.mode
1646 size = t.size
1655 size = t.size
1647
1656
1648 if not st and t.tracked:
1657 if not st and t.tracked:
1649 dadd(fn)
1658 dadd(fn)
1650 elif t.p2_info:
1659 elif t.p2_info:
1651 madd(fn)
1660 madd(fn)
1652 elif t.added:
1661 elif t.added:
1653 aadd(fn)
1662 aadd(fn)
1654 elif t.removed:
1663 elif t.removed:
1655 radd(fn)
1664 radd(fn)
1656 elif t.tracked:
1665 elif t.tracked:
1657 if not checklink and t.has_fallback_symlink:
1666 if not checklink and t.has_fallback_symlink:
1658 # If the file system does not support symlink, the mode
1667 # If the file system does not support symlink, the mode
1659 # might not be correctly stored in the dirstate, so do not
1668 # might not be correctly stored in the dirstate, so do not
1660 # trust it.
1669 # trust it.
1661 ladd(fn)
1670 ladd(fn)
1662 elif not checkexec and t.has_fallback_exec:
1671 elif not checkexec and t.has_fallback_exec:
1663 # If the file system does not support exec bits, the mode
1672 # If the file system does not support exec bits, the mode
1664 # might not be correctly stored in the dirstate, so do not
1673 # might not be correctly stored in the dirstate, so do not
1665 # trust it.
1674 # trust it.
1666 ladd(fn)
1675 ladd(fn)
1667 elif (
1676 elif (
1668 size >= 0
1677 size >= 0
1669 and (
1678 and (
1670 (size != st.st_size and size != st.st_size & _rangemask)
1679 (size != st.st_size and size != st.st_size & _rangemask)
1671 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1680 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1672 )
1681 )
1673 or fn in copymap
1682 or fn in copymap
1674 ):
1683 ):
1675 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1684 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1676 # issue6456: Size returned may be longer due to
1685 # issue6456: Size returned may be longer due to
1677 # encryption on EXT-4 fscrypt, undecided.
1686 # encryption on EXT-4 fscrypt, undecided.
1678 ladd(fn)
1687 ladd(fn)
1679 else:
1688 else:
1680 madd(fn)
1689 madd(fn)
1681 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1690 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1682 # There might be a change in the future if for example the
1691 # There might be a change in the future if for example the
1683 # internal clock is off, but this is a case where the issues
1692 # internal clock is off, but this is a case where the issues
1684 # the user would face would be a lot worse and there is
1693 # the user would face would be a lot worse and there is
1685 # nothing we can really do.
1694 # nothing we can really do.
1686 ladd(fn)
1695 ladd(fn)
1687 elif listclean:
1696 elif listclean:
1688 cadd(fn)
1697 cadd(fn)
1689 status = scmutil.status(
1698 status = scmutil.status(
1690 modified, added, removed, deleted, unknown, ignored, clean
1699 modified, added, removed, deleted, unknown, ignored, clean
1691 )
1700 )
1692 return (lookup, status, mtime_boundary)
1701 return (lookup, status, mtime_boundary)
1693
1702
1694 def matches(self, match):
1703 def matches(self, match):
1695 """
1704 """
1696 return files in the dirstate (in whatever state) filtered by match
1705 return files in the dirstate (in whatever state) filtered by match
1697 """
1706 """
1698 dmap = self._map
1707 dmap = self._map
1699 if rustmod is not None:
1708 if rustmod is not None:
1700 dmap = self._map._map
1709 dmap = self._map._map
1701
1710
1702 if match.always():
1711 if match.always():
1703 return dmap.keys()
1712 return dmap.keys()
1704 files = match.files()
1713 files = match.files()
1705 if match.isexact():
1714 if match.isexact():
1706 # fast path -- filter the other way around, since typically files is
1715 # fast path -- filter the other way around, since typically files is
1707 # much smaller than dmap
1716 # much smaller than dmap
1708 return [f for f in files if f in dmap]
1717 return [f for f in files if f in dmap]
1709 if match.prefix() and all(fn in dmap for fn in files):
1718 if match.prefix() and all(fn in dmap for fn in files):
1710 # fast path -- all the values are known to be files, so just return
1719 # fast path -- all the values are known to be files, so just return
1711 # that
1720 # that
1712 return list(files)
1721 return list(files)
1713 return [f for f in dmap if match(f)]
1722 return [f for f in dmap if match(f)]
1714
1723
1715 def _actualfilename(self, tr):
1724 def _actualfilename(self, tr):
1716 if tr:
1725 if tr:
1717 return self._pendingfilename
1726 return self._pendingfilename
1718 else:
1727 else:
1719 return self._filename
1728 return self._filename
1720
1729
1721 def all_file_names(self):
1730 def all_file_names(self):
1722 """list all filename currently used by this dirstate
1731 """list all filename currently used by this dirstate
1723
1732
1724 This is only used to do `hg rollback` related backup in the transaction
1733 This is only used to do `hg rollback` related backup in the transaction
1725 """
1734 """
1726 if not self._opener.exists(self._filename):
1735 if not self._opener.exists(self._filename):
1727 # no data every written to disk yet
1736 # no data every written to disk yet
1728 return ()
1737 return ()
1729 elif self._use_dirstate_v2:
1738 elif self._use_dirstate_v2:
1730 return (
1739 return (
1731 self._filename,
1740 self._filename,
1732 self._map.docket.data_filename(),
1741 self._map.docket.data_filename(),
1733 )
1742 )
1734 else:
1743 else:
1735 return (self._filename,)
1744 return (self._filename,)
1736
1745
1737 def verify(self, m1, m2, p1, narrow_matcher=None):
1746 def verify(self, m1, m2, p1, narrow_matcher=None):
1738 """
1747 """
1739 check the dirstate contents against the parent manifest and yield errors
1748 check the dirstate contents against the parent manifest and yield errors
1740 """
1749 """
1741 missing_from_p1 = _(
1750 missing_from_p1 = _(
1742 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1751 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1743 )
1752 )
1744 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1753 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1745 missing_from_ps = _(
1754 missing_from_ps = _(
1746 b"%s marked as modified, but not in either manifest\n"
1755 b"%s marked as modified, but not in either manifest\n"
1747 )
1756 )
1748 missing_from_ds = _(
1757 missing_from_ds = _(
1749 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1758 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1750 )
1759 )
1751 for f, entry in self.items():
1760 for f, entry in self.items():
1752 if entry.p1_tracked:
1761 if entry.p1_tracked:
1753 if entry.modified and f not in m1 and f not in m2:
1762 if entry.modified and f not in m1 and f not in m2:
1754 yield missing_from_ps % f
1763 yield missing_from_ps % f
1755 elif f not in m1:
1764 elif f not in m1:
1756 yield missing_from_p1 % (f, node.short(p1))
1765 yield missing_from_p1 % (f, node.short(p1))
1757 if entry.added and f in m1:
1766 if entry.added and f in m1:
1758 yield unexpected_in_p1 % f
1767 yield unexpected_in_p1 % f
1759 for f in m1:
1768 for f in m1:
1760 if narrow_matcher is not None and not narrow_matcher(f):
1769 if narrow_matcher is not None and not narrow_matcher(f):
1761 continue
1770 continue
1762 entry = self.get_entry(f)
1771 entry = self.get_entry(f)
1763 if not entry.p1_tracked:
1772 if not entry.p1_tracked:
1764 yield missing_from_ds % (f, node.short(p1))
1773 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now