##// END OF EJS Templates
cleanup: drop `dirstate.is_changing_parent` deprecated since 6.5...
marmoute -
r52028:88ef8021 default
parent child Browse files
Show More
@@ -1,1842 +1,1832 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16
16
17 from hgdemandimport import tracing
17 from hgdemandimport import tracing
18
18
19 from . import (
19 from . import (
20 dirstatemap,
20 dirstatemap,
21 encoding,
21 encoding,
22 error,
22 error,
23 match as matchmod,
23 match as matchmod,
24 node,
24 node,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 txnutil,
29 txnutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 # use to detect lack of a parameter
45 # use to detect lack of a parameter
46 SENTINEL = object()
46 SENTINEL = object()
47
47
48 HAS_FAST_DIRSTATE_V2 = rustmod is not None
48 HAS_FAST_DIRSTATE_V2 = rustmod is not None
49
49
50 propertycache = util.propertycache
50 propertycache = util.propertycache
51 filecache = scmutil.filecache
51 filecache = scmutil.filecache
52 _rangemask = dirstatemap.rangemask
52 _rangemask = dirstatemap.rangemask
53
53
54 DirstateItem = dirstatemap.DirstateItem
54 DirstateItem = dirstatemap.DirstateItem
55
55
56
56
57 class repocache(filecache):
57 class repocache(filecache):
58 """filecache for files in .hg/"""
58 """filecache for files in .hg/"""
59
59
60 def join(self, obj, fname):
60 def join(self, obj, fname):
61 return obj._opener.join(fname)
61 return obj._opener.join(fname)
62
62
63
63
64 class rootcache(filecache):
64 class rootcache(filecache):
65 """filecache for files in the repository root"""
65 """filecache for files in the repository root"""
66
66
67 def join(self, obj, fname):
67 def join(self, obj, fname):
68 return obj._join(fname)
68 return obj._join(fname)
69
69
70
70
71 def check_invalidated(func):
71 def check_invalidated(func):
72 """check that the func is called with a non-invalidated dirstate
72 """check that the func is called with a non-invalidated dirstate
73
73
74 The dirstate is in an "invalidated state" after an error occured during its
74 The dirstate is in an "invalidated state" after an error occured during its
75 modification and remains so until we exited the top level scope that framed
75 modification and remains so until we exited the top level scope that framed
76 such change.
76 such change.
77 """
77 """
78
78
79 def wrap(self, *args, **kwargs):
79 def wrap(self, *args, **kwargs):
80 if self._invalidated_context:
80 if self._invalidated_context:
81 msg = 'calling `%s` after the dirstate was invalidated'
81 msg = 'calling `%s` after the dirstate was invalidated'
82 msg %= func.__name__
82 msg %= func.__name__
83 raise error.ProgrammingError(msg)
83 raise error.ProgrammingError(msg)
84 return func(self, *args, **kwargs)
84 return func(self, *args, **kwargs)
85
85
86 return wrap
86 return wrap
87
87
88
88
89 def requires_changing_parents(func):
89 def requires_changing_parents(func):
90 def wrap(self, *args, **kwargs):
90 def wrap(self, *args, **kwargs):
91 if not self.is_changing_parents:
91 if not self.is_changing_parents:
92 msg = 'calling `%s` outside of a changing_parents context'
92 msg = 'calling `%s` outside of a changing_parents context'
93 msg %= func.__name__
93 msg %= func.__name__
94 raise error.ProgrammingError(msg)
94 raise error.ProgrammingError(msg)
95 return func(self, *args, **kwargs)
95 return func(self, *args, **kwargs)
96
96
97 return check_invalidated(wrap)
97 return check_invalidated(wrap)
98
98
99
99
100 def requires_changing_files(func):
100 def requires_changing_files(func):
101 def wrap(self, *args, **kwargs):
101 def wrap(self, *args, **kwargs):
102 if not self.is_changing_files:
102 if not self.is_changing_files:
103 msg = 'calling `%s` outside of a `changing_files`'
103 msg = 'calling `%s` outside of a `changing_files`'
104 msg %= func.__name__
104 msg %= func.__name__
105 raise error.ProgrammingError(msg)
105 raise error.ProgrammingError(msg)
106 return func(self, *args, **kwargs)
106 return func(self, *args, **kwargs)
107
107
108 return check_invalidated(wrap)
108 return check_invalidated(wrap)
109
109
110
110
111 def requires_changing_any(func):
111 def requires_changing_any(func):
112 def wrap(self, *args, **kwargs):
112 def wrap(self, *args, **kwargs):
113 if not self.is_changing_any:
113 if not self.is_changing_any:
114 msg = 'calling `%s` outside of a changing context'
114 msg = 'calling `%s` outside of a changing context'
115 msg %= func.__name__
115 msg %= func.__name__
116 raise error.ProgrammingError(msg)
116 raise error.ProgrammingError(msg)
117 return func(self, *args, **kwargs)
117 return func(self, *args, **kwargs)
118
118
119 return check_invalidated(wrap)
119 return check_invalidated(wrap)
120
120
121
121
122 def requires_changing_files_or_status(func):
122 def requires_changing_files_or_status(func):
123 def wrap(self, *args, **kwargs):
123 def wrap(self, *args, **kwargs):
124 if not (self.is_changing_files or self._running_status > 0):
124 if not (self.is_changing_files or self._running_status > 0):
125 msg = (
125 msg = (
126 'calling `%s` outside of a changing_files '
126 'calling `%s` outside of a changing_files '
127 'or running_status context'
127 'or running_status context'
128 )
128 )
129 msg %= func.__name__
129 msg %= func.__name__
130 raise error.ProgrammingError(msg)
130 raise error.ProgrammingError(msg)
131 return func(self, *args, **kwargs)
131 return func(self, *args, **kwargs)
132
132
133 return check_invalidated(wrap)
133 return check_invalidated(wrap)
134
134
135
135
136 CHANGE_TYPE_PARENTS = "parents"
136 CHANGE_TYPE_PARENTS = "parents"
137 CHANGE_TYPE_FILES = "files"
137 CHANGE_TYPE_FILES = "files"
138
138
139
139
140 @interfaceutil.implementer(intdirstate.idirstate)
140 @interfaceutil.implementer(intdirstate.idirstate)
141 class dirstate:
141 class dirstate:
142
142
143 # used by largefile to avoid overwritting transaction callback
143 # used by largefile to avoid overwritting transaction callback
144 _tr_key_suffix = b''
144 _tr_key_suffix = b''
145
145
146 def __init__(
146 def __init__(
147 self,
147 self,
148 opener,
148 opener,
149 ui,
149 ui,
150 root,
150 root,
151 validate,
151 validate,
152 sparsematchfn,
152 sparsematchfn,
153 nodeconstants,
153 nodeconstants,
154 use_dirstate_v2,
154 use_dirstate_v2,
155 use_tracked_hint=False,
155 use_tracked_hint=False,
156 ):
156 ):
157 """Create a new dirstate object.
157 """Create a new dirstate object.
158
158
159 opener is an open()-like callable that can be used to open the
159 opener is an open()-like callable that can be used to open the
160 dirstate file; root is the root of the directory tracked by
160 dirstate file; root is the root of the directory tracked by
161 the dirstate.
161 the dirstate.
162 """
162 """
163 self._use_dirstate_v2 = use_dirstate_v2
163 self._use_dirstate_v2 = use_dirstate_v2
164 self._use_tracked_hint = use_tracked_hint
164 self._use_tracked_hint = use_tracked_hint
165 self._nodeconstants = nodeconstants
165 self._nodeconstants = nodeconstants
166 self._opener = opener
166 self._opener = opener
167 self._validate = validate
167 self._validate = validate
168 self._root = root
168 self._root = root
169 # Either build a sparse-matcher or None if sparse is disabled
169 # Either build a sparse-matcher or None if sparse is disabled
170 self._sparsematchfn = sparsematchfn
170 self._sparsematchfn = sparsematchfn
171 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
171 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
172 # UNC path pointing to root share (issue4557)
172 # UNC path pointing to root share (issue4557)
173 self._rootdir = pathutil.normasprefix(root)
173 self._rootdir = pathutil.normasprefix(root)
174 # True is any internal state may be different
174 # True is any internal state may be different
175 self._dirty = False
175 self._dirty = False
176 # True if the set of tracked file may be different
176 # True if the set of tracked file may be different
177 self._dirty_tracked_set = False
177 self._dirty_tracked_set = False
178 self._ui = ui
178 self._ui = ui
179 self._filecache = {}
179 self._filecache = {}
180 # nesting level of `changing_parents` context
180 # nesting level of `changing_parents` context
181 self._changing_level = 0
181 self._changing_level = 0
182 # the change currently underway
182 # the change currently underway
183 self._change_type = None
183 self._change_type = None
184 # number of open _running_status context
184 # number of open _running_status context
185 self._running_status = 0
185 self._running_status = 0
186 # True if the current dirstate changing operations have been
186 # True if the current dirstate changing operations have been
187 # invalidated (used to make sure all nested contexts have been exited)
187 # invalidated (used to make sure all nested contexts have been exited)
188 self._invalidated_context = False
188 self._invalidated_context = False
189 self._attached_to_a_transaction = False
189 self._attached_to_a_transaction = False
190 self._filename = b'dirstate'
190 self._filename = b'dirstate'
191 self._filename_th = b'dirstate-tracked-hint'
191 self._filename_th = b'dirstate-tracked-hint'
192 self._pendingfilename = b'%s.pending' % self._filename
192 self._pendingfilename = b'%s.pending' % self._filename
193 self._plchangecallbacks = {}
193 self._plchangecallbacks = {}
194 self._origpl = None
194 self._origpl = None
195 self._mapcls = dirstatemap.dirstatemap
195 self._mapcls = dirstatemap.dirstatemap
196 # Access and cache cwd early, so we don't access it for the first time
196 # Access and cache cwd early, so we don't access it for the first time
197 # after a working-copy update caused it to not exist (accessing it then
197 # after a working-copy update caused it to not exist (accessing it then
198 # raises an exception).
198 # raises an exception).
199 self._cwd
199 self._cwd
200
200
201 def refresh(self):
201 def refresh(self):
202 # XXX if this happens, you likely did not enter the `changing_xxx`
202 # XXX if this happens, you likely did not enter the `changing_xxx`
203 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
203 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
204 # `refresh`.
204 # `refresh`.
205 if self.is_changing_any:
205 if self.is_changing_any:
206 msg = "refreshing the dirstate in the middle of a change"
206 msg = "refreshing the dirstate in the middle of a change"
207 raise error.ProgrammingError(msg)
207 raise error.ProgrammingError(msg)
208 if '_branch' in vars(self):
208 if '_branch' in vars(self):
209 del self._branch
209 del self._branch
210 if '_map' in vars(self) and self._map.may_need_refresh():
210 if '_map' in vars(self) and self._map.may_need_refresh():
211 self.invalidate()
211 self.invalidate()
212
212
213 def prefetch_parents(self):
213 def prefetch_parents(self):
214 """make sure the parents are loaded
214 """make sure the parents are loaded
215
215
216 Used to avoid a race condition.
216 Used to avoid a race condition.
217 """
217 """
218 self._pl
218 self._pl
219
219
220 @contextlib.contextmanager
220 @contextlib.contextmanager
221 @check_invalidated
221 @check_invalidated
222 def running_status(self, repo):
222 def running_status(self, repo):
223 """Wrap a status operation
223 """Wrap a status operation
224
224
225 This context is not mutally exclusive with the `changing_*` context. It
225 This context is not mutally exclusive with the `changing_*` context. It
226 also do not warrant for the `wlock` to be taken.
226 also do not warrant for the `wlock` to be taken.
227
227
228 If the wlock is taken, this context will behave in a simple way, and
228 If the wlock is taken, this context will behave in a simple way, and
229 ensure the data are scheduled for write when leaving the top level
229 ensure the data are scheduled for write when leaving the top level
230 context.
230 context.
231
231
232 If the lock is not taken, it will only warrant that the data are either
232 If the lock is not taken, it will only warrant that the data are either
233 committed (written) and rolled back (invalidated) when exiting the top
233 committed (written) and rolled back (invalidated) when exiting the top
234 level context. The write/invalidate action must be performed by the
234 level context. The write/invalidate action must be performed by the
235 wrapped code.
235 wrapped code.
236
236
237
237
238 The expected logic is:
238 The expected logic is:
239
239
240 A: read the dirstate
240 A: read the dirstate
241 B: run status
241 B: run status
242 This might make the dirstate dirty by updating cache,
242 This might make the dirstate dirty by updating cache,
243 especially in Rust.
243 especially in Rust.
244 C: do more "post status fixup if relevant
244 C: do more "post status fixup if relevant
245 D: try to take the w-lock (this will invalidate the changes if they were raced)
245 D: try to take the w-lock (this will invalidate the changes if they were raced)
246 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
246 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
247 E1: elif lock was acquired β†’ write the changes
247 E1: elif lock was acquired β†’ write the changes
248 E2: else β†’ discard the changes
248 E2: else β†’ discard the changes
249 """
249 """
250 has_lock = repo.currentwlock() is not None
250 has_lock = repo.currentwlock() is not None
251 is_changing = self.is_changing_any
251 is_changing = self.is_changing_any
252 tr = repo.currenttransaction()
252 tr = repo.currenttransaction()
253 has_tr = tr is not None
253 has_tr = tr is not None
254 nested = bool(self._running_status)
254 nested = bool(self._running_status)
255
255
256 first_and_alone = not (is_changing or has_tr or nested)
256 first_and_alone = not (is_changing or has_tr or nested)
257
257
258 # enforce no change happened outside of a proper context.
258 # enforce no change happened outside of a proper context.
259 if first_and_alone and self._dirty:
259 if first_and_alone and self._dirty:
260 has_tr = repo.currenttransaction() is not None
260 has_tr = repo.currenttransaction() is not None
261 if not has_tr and self._changing_level == 0 and self._dirty:
261 if not has_tr and self._changing_level == 0 and self._dirty:
262 msg = "entering a status context, but dirstate is already dirty"
262 msg = "entering a status context, but dirstate is already dirty"
263 raise error.ProgrammingError(msg)
263 raise error.ProgrammingError(msg)
264
264
265 should_write = has_lock and not (nested or is_changing)
265 should_write = has_lock and not (nested or is_changing)
266
266
267 self._running_status += 1
267 self._running_status += 1
268 try:
268 try:
269 yield
269 yield
270 except Exception:
270 except Exception:
271 self.invalidate()
271 self.invalidate()
272 raise
272 raise
273 finally:
273 finally:
274 self._running_status -= 1
274 self._running_status -= 1
275 if self._invalidated_context:
275 if self._invalidated_context:
276 should_write = False
276 should_write = False
277 self.invalidate()
277 self.invalidate()
278
278
279 if should_write:
279 if should_write:
280 assert repo.currenttransaction() is tr
280 assert repo.currenttransaction() is tr
281 self.write(tr)
281 self.write(tr)
282 elif not has_lock:
282 elif not has_lock:
283 if self._dirty:
283 if self._dirty:
284 msg = b'dirstate dirty while exiting an isolated status context'
284 msg = b'dirstate dirty while exiting an isolated status context'
285 repo.ui.develwarn(msg)
285 repo.ui.develwarn(msg)
286 self.invalidate()
286 self.invalidate()
287
287
288 @contextlib.contextmanager
288 @contextlib.contextmanager
289 @check_invalidated
289 @check_invalidated
290 def _changing(self, repo, change_type):
290 def _changing(self, repo, change_type):
291 if repo.currentwlock() is None:
291 if repo.currentwlock() is None:
292 msg = b"trying to change the dirstate without holding the wlock"
292 msg = b"trying to change the dirstate without holding the wlock"
293 raise error.ProgrammingError(msg)
293 raise error.ProgrammingError(msg)
294
294
295 has_tr = repo.currenttransaction() is not None
295 has_tr = repo.currenttransaction() is not None
296 if not has_tr and self._changing_level == 0 and self._dirty:
296 if not has_tr and self._changing_level == 0 and self._dirty:
297 msg = b"entering a changing context, but dirstate is already dirty"
297 msg = b"entering a changing context, but dirstate is already dirty"
298 repo.ui.develwarn(msg)
298 repo.ui.develwarn(msg)
299
299
300 assert self._changing_level >= 0
300 assert self._changing_level >= 0
301 # different type of change are mutually exclusive
301 # different type of change are mutually exclusive
302 if self._change_type is None:
302 if self._change_type is None:
303 assert self._changing_level == 0
303 assert self._changing_level == 0
304 self._change_type = change_type
304 self._change_type = change_type
305 elif self._change_type != change_type:
305 elif self._change_type != change_type:
306 msg = (
306 msg = (
307 'trying to open "%s" dirstate-changing context while a "%s" is'
307 'trying to open "%s" dirstate-changing context while a "%s" is'
308 ' already open'
308 ' already open'
309 )
309 )
310 msg %= (change_type, self._change_type)
310 msg %= (change_type, self._change_type)
311 raise error.ProgrammingError(msg)
311 raise error.ProgrammingError(msg)
312 should_write = False
312 should_write = False
313 self._changing_level += 1
313 self._changing_level += 1
314 try:
314 try:
315 yield
315 yield
316 except: # re-raises
316 except: # re-raises
317 self.invalidate() # this will set `_invalidated_context`
317 self.invalidate() # this will set `_invalidated_context`
318 raise
318 raise
319 finally:
319 finally:
320 assert self._changing_level > 0
320 assert self._changing_level > 0
321 self._changing_level -= 1
321 self._changing_level -= 1
322 # If the dirstate is being invalidated, call invalidate again.
322 # If the dirstate is being invalidated, call invalidate again.
323 # This will throw away anything added by a upper context and
323 # This will throw away anything added by a upper context and
324 # reset the `_invalidated_context` flag when relevant
324 # reset the `_invalidated_context` flag when relevant
325 if self._changing_level <= 0:
325 if self._changing_level <= 0:
326 self._change_type = None
326 self._change_type = None
327 assert self._changing_level == 0
327 assert self._changing_level == 0
328 if self._invalidated_context:
328 if self._invalidated_context:
329 # make sure we invalidate anything an upper context might
329 # make sure we invalidate anything an upper context might
330 # have changed.
330 # have changed.
331 self.invalidate()
331 self.invalidate()
332 else:
332 else:
333 should_write = self._changing_level <= 0
333 should_write = self._changing_level <= 0
334 tr = repo.currenttransaction()
334 tr = repo.currenttransaction()
335 if has_tr != (tr is not None):
335 if has_tr != (tr is not None):
336 if has_tr:
336 if has_tr:
337 m = "transaction vanished while changing dirstate"
337 m = "transaction vanished while changing dirstate"
338 else:
338 else:
339 m = "transaction appeared while changing dirstate"
339 m = "transaction appeared while changing dirstate"
340 raise error.ProgrammingError(m)
340 raise error.ProgrammingError(m)
341 if should_write:
341 if should_write:
342 self.write(tr)
342 self.write(tr)
343
343
344 @contextlib.contextmanager
344 @contextlib.contextmanager
345 def changing_parents(self, repo):
345 def changing_parents(self, repo):
346 """Wrap a dirstate change related to a change of working copy parents
346 """Wrap a dirstate change related to a change of working copy parents
347
347
348 This context scopes a series of dirstate modifications that match an
348 This context scopes a series of dirstate modifications that match an
349 update of the working copy parents (typically `hg update`, `hg merge`
349 update of the working copy parents (typically `hg update`, `hg merge`
350 etc).
350 etc).
351
351
352 The dirstate's methods that perform this kind of modifications require
352 The dirstate's methods that perform this kind of modifications require
353 this context to be present before being called.
353 this context to be present before being called.
354 Such methods are decorated with `@requires_changing_parents`.
354 Such methods are decorated with `@requires_changing_parents`.
355
355
356 The new dirstate contents will be written to disk when the top-most
356 The new dirstate contents will be written to disk when the top-most
357 `changing_parents` context exits successfully. If an exception is
357 `changing_parents` context exits successfully. If an exception is
358 raised during a `changing_parents` context of any level, all changes
358 raised during a `changing_parents` context of any level, all changes
359 are invalidated. If this context is open within an open transaction,
359 are invalidated. If this context is open within an open transaction,
360 the dirstate writing is delayed until that transaction is successfully
360 the dirstate writing is delayed until that transaction is successfully
361 committed (and the dirstate is invalidated on transaction abort).
361 committed (and the dirstate is invalidated on transaction abort).
362
362
363 The `changing_parents` operation is mutually exclusive with the
363 The `changing_parents` operation is mutually exclusive with the
364 `changing_files` one.
364 `changing_files` one.
365 """
365 """
366 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
366 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
367 yield c
367 yield c
368
368
369 @contextlib.contextmanager
369 @contextlib.contextmanager
370 def changing_files(self, repo):
370 def changing_files(self, repo):
371 """Wrap a dirstate change related to the set of tracked files
371 """Wrap a dirstate change related to the set of tracked files
372
372
373 This context scopes a series of dirstate modifications that change the
373 This context scopes a series of dirstate modifications that change the
374 set of tracked files. (typically `hg add`, `hg remove` etc) or some
374 set of tracked files. (typically `hg add`, `hg remove` etc) or some
375 dirstate stored information (like `hg rename --after`) but preserve
375 dirstate stored information (like `hg rename --after`) but preserve
376 the working copy parents.
376 the working copy parents.
377
377
378 The dirstate's methods that perform this kind of modifications require
378 The dirstate's methods that perform this kind of modifications require
379 this context to be present before being called.
379 this context to be present before being called.
380 Such methods are decorated with `@requires_changing_files`.
380 Such methods are decorated with `@requires_changing_files`.
381
381
382 The new dirstate contents will be written to disk when the top-most
382 The new dirstate contents will be written to disk when the top-most
383 `changing_files` context exits successfully. If an exception is raised
383 `changing_files` context exits successfully. If an exception is raised
384 during a `changing_files` context of any level, all changes are
384 during a `changing_files` context of any level, all changes are
385 invalidated. If this context is open within an open transaction, the
385 invalidated. If this context is open within an open transaction, the
386 dirstate writing is delayed until that transaction is successfully
386 dirstate writing is delayed until that transaction is successfully
387 committed (and the dirstate is invalidated on transaction abort).
387 committed (and the dirstate is invalidated on transaction abort).
388
388
389 The `changing_files` operation is mutually exclusive with the
389 The `changing_files` operation is mutually exclusive with the
390 `changing_parents` one.
390 `changing_parents` one.
391 """
391 """
392 with self._changing(repo, CHANGE_TYPE_FILES) as c:
392 with self._changing(repo, CHANGE_TYPE_FILES) as c:
393 yield c
393 yield c
394
394
395 # here to help migration to the new code
395 # here to help migration to the new code
396 def parentchange(self):
396 def parentchange(self):
397 msg = (
397 msg = (
398 "Mercurial 6.4 and later requires call to "
398 "Mercurial 6.4 and later requires call to "
399 "`dirstate.changing_parents(repo)`"
399 "`dirstate.changing_parents(repo)`"
400 )
400 )
401 raise error.ProgrammingError(msg)
401 raise error.ProgrammingError(msg)
402
402
403 @property
403 @property
404 def is_changing_any(self):
404 def is_changing_any(self):
405 """Returns true if the dirstate is in the middle of a set of changes.
405 """Returns true if the dirstate is in the middle of a set of changes.
406
406
407 This returns True for any kind of change.
407 This returns True for any kind of change.
408 """
408 """
409 return self._changing_level > 0
409 return self._changing_level > 0
410
410
411 def pendingparentchange(self):
412 return self.is_changing_parent()
413
414 def is_changing_parent(self):
415 """Returns true if the dirstate is in the middle of a set of changes
416 that modify the dirstate parent.
417 """
418 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
419 return self.is_changing_parents
420
421 @property
411 @property
422 def is_changing_parents(self):
412 def is_changing_parents(self):
423 """Returns true if the dirstate is in the middle of a set of changes
413 """Returns true if the dirstate is in the middle of a set of changes
424 that modify the dirstate parent.
414 that modify the dirstate parent.
425 """
415 """
426 if self._changing_level <= 0:
416 if self._changing_level <= 0:
427 return False
417 return False
428 return self._change_type == CHANGE_TYPE_PARENTS
418 return self._change_type == CHANGE_TYPE_PARENTS
429
419
430 @property
420 @property
431 def is_changing_files(self):
421 def is_changing_files(self):
432 """Returns true if the dirstate is in the middle of a set of changes
422 """Returns true if the dirstate is in the middle of a set of changes
433 that modify the files tracked or their sources.
423 that modify the files tracked or their sources.
434 """
424 """
435 if self._changing_level <= 0:
425 if self._changing_level <= 0:
436 return False
426 return False
437 return self._change_type == CHANGE_TYPE_FILES
427 return self._change_type == CHANGE_TYPE_FILES
438
428
439 @propertycache
429 @propertycache
440 def _map(self):
430 def _map(self):
441 """Return the dirstate contents (see documentation for dirstatemap)."""
431 """Return the dirstate contents (see documentation for dirstatemap)."""
442 return self._mapcls(
432 return self._mapcls(
443 self._ui,
433 self._ui,
444 self._opener,
434 self._opener,
445 self._root,
435 self._root,
446 self._nodeconstants,
436 self._nodeconstants,
447 self._use_dirstate_v2,
437 self._use_dirstate_v2,
448 )
438 )
449
439
450 @property
440 @property
451 def _sparsematcher(self):
441 def _sparsematcher(self):
452 """The matcher for the sparse checkout.
442 """The matcher for the sparse checkout.
453
443
454 The working directory may not include every file from a manifest. The
444 The working directory may not include every file from a manifest. The
455 matcher obtained by this property will match a path if it is to be
445 matcher obtained by this property will match a path if it is to be
456 included in the working directory.
446 included in the working directory.
457
447
458 When sparse if disabled, return None.
448 When sparse if disabled, return None.
459 """
449 """
460 if self._sparsematchfn is None:
450 if self._sparsematchfn is None:
461 return None
451 return None
462 # TODO there is potential to cache this property. For now, the matcher
452 # TODO there is potential to cache this property. For now, the matcher
463 # is resolved on every access. (But the called function does use a
453 # is resolved on every access. (But the called function does use a
464 # cache to keep the lookup fast.)
454 # cache to keep the lookup fast.)
465 return self._sparsematchfn()
455 return self._sparsematchfn()
466
456
467 @repocache(b'branch')
457 @repocache(b'branch')
468 def _branch(self):
458 def _branch(self):
469 f = None
459 f = None
470 data = b''
460 data = b''
471 try:
461 try:
472 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
462 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
473 data = f.read().strip()
463 data = f.read().strip()
474 except FileNotFoundError:
464 except FileNotFoundError:
475 pass
465 pass
476 finally:
466 finally:
477 if f is not None:
467 if f is not None:
478 f.close()
468 f.close()
479 if not data:
469 if not data:
480 return b"default"
470 return b"default"
481 return data
471 return data
482
472
483 @property
473 @property
484 def _pl(self):
474 def _pl(self):
485 return self._map.parents()
475 return self._map.parents()
486
476
487 def hasdir(self, d):
477 def hasdir(self, d):
488 return self._map.hastrackeddir(d)
478 return self._map.hastrackeddir(d)
489
479
490 @rootcache(b'.hgignore')
480 @rootcache(b'.hgignore')
491 def _ignore(self):
481 def _ignore(self):
492 files = self._ignorefiles()
482 files = self._ignorefiles()
493 if not files:
483 if not files:
494 return matchmod.never()
484 return matchmod.never()
495
485
496 pats = [b'include:%s' % f for f in files]
486 pats = [b'include:%s' % f for f in files]
497 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
487 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
498
488
499 @propertycache
489 @propertycache
500 def _slash(self):
490 def _slash(self):
501 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
491 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
502
492
503 @propertycache
493 @propertycache
504 def _checklink(self):
494 def _checklink(self):
505 return util.checklink(self._root)
495 return util.checklink(self._root)
506
496
507 @propertycache
497 @propertycache
508 def _checkexec(self):
498 def _checkexec(self):
509 return bool(util.checkexec(self._root))
499 return bool(util.checkexec(self._root))
510
500
511 @propertycache
501 @propertycache
512 def _checkcase(self):
502 def _checkcase(self):
513 return not util.fscasesensitive(self._join(b'.hg'))
503 return not util.fscasesensitive(self._join(b'.hg'))
514
504
515 def _join(self, f):
505 def _join(self, f):
516 # much faster than os.path.join()
506 # much faster than os.path.join()
517 # it's safe because f is always a relative path
507 # it's safe because f is always a relative path
518 return self._rootdir + f
508 return self._rootdir + f
519
509
520 def flagfunc(self, buildfallback):
510 def flagfunc(self, buildfallback):
521 """build a callable that returns flags associated with a filename
511 """build a callable that returns flags associated with a filename
522
512
523 The information is extracted from three possible layers:
513 The information is extracted from three possible layers:
524 1. the file system if it supports the information
514 1. the file system if it supports the information
525 2. the "fallback" information stored in the dirstate if any
515 2. the "fallback" information stored in the dirstate if any
526 3. a more expensive mechanism inferring the flags from the parents.
516 3. a more expensive mechanism inferring the flags from the parents.
527 """
517 """
528
518
529 # small hack to cache the result of buildfallback()
519 # small hack to cache the result of buildfallback()
530 fallback_func = []
520 fallback_func = []
531
521
532 def get_flags(x):
522 def get_flags(x):
533 entry = None
523 entry = None
534 fallback_value = None
524 fallback_value = None
535 try:
525 try:
536 st = os.lstat(self._join(x))
526 st = os.lstat(self._join(x))
537 except OSError:
527 except OSError:
538 return b''
528 return b''
539
529
540 if self._checklink:
530 if self._checklink:
541 if util.statislink(st):
531 if util.statislink(st):
542 return b'l'
532 return b'l'
543 else:
533 else:
544 entry = self.get_entry(x)
534 entry = self.get_entry(x)
545 if entry.has_fallback_symlink:
535 if entry.has_fallback_symlink:
546 if entry.fallback_symlink:
536 if entry.fallback_symlink:
547 return b'l'
537 return b'l'
548 else:
538 else:
549 if not fallback_func:
539 if not fallback_func:
550 fallback_func.append(buildfallback())
540 fallback_func.append(buildfallback())
551 fallback_value = fallback_func[0](x)
541 fallback_value = fallback_func[0](x)
552 if b'l' in fallback_value:
542 if b'l' in fallback_value:
553 return b'l'
543 return b'l'
554
544
555 if self._checkexec:
545 if self._checkexec:
556 if util.statisexec(st):
546 if util.statisexec(st):
557 return b'x'
547 return b'x'
558 else:
548 else:
559 if entry is None:
549 if entry is None:
560 entry = self.get_entry(x)
550 entry = self.get_entry(x)
561 if entry.has_fallback_exec:
551 if entry.has_fallback_exec:
562 if entry.fallback_exec:
552 if entry.fallback_exec:
563 return b'x'
553 return b'x'
564 else:
554 else:
565 if fallback_value is None:
555 if fallback_value is None:
566 if not fallback_func:
556 if not fallback_func:
567 fallback_func.append(buildfallback())
557 fallback_func.append(buildfallback())
568 fallback_value = fallback_func[0](x)
558 fallback_value = fallback_func[0](x)
569 if b'x' in fallback_value:
559 if b'x' in fallback_value:
570 return b'x'
560 return b'x'
571 return b''
561 return b''
572
562
573 return get_flags
563 return get_flags
574
564
575 @propertycache
565 @propertycache
576 def _cwd(self):
566 def _cwd(self):
577 # internal config: ui.forcecwd
567 # internal config: ui.forcecwd
578 forcecwd = self._ui.config(b'ui', b'forcecwd')
568 forcecwd = self._ui.config(b'ui', b'forcecwd')
579 if forcecwd:
569 if forcecwd:
580 return forcecwd
570 return forcecwd
581 return encoding.getcwd()
571 return encoding.getcwd()
582
572
583 def getcwd(self):
573 def getcwd(self):
584 """Return the path from which a canonical path is calculated.
574 """Return the path from which a canonical path is calculated.
585
575
586 This path should be used to resolve file patterns or to convert
576 This path should be used to resolve file patterns or to convert
587 canonical paths back to file paths for display. It shouldn't be
577 canonical paths back to file paths for display. It shouldn't be
588 used to get real file paths. Use vfs functions instead.
578 used to get real file paths. Use vfs functions instead.
589 """
579 """
590 cwd = self._cwd
580 cwd = self._cwd
591 if cwd == self._root:
581 if cwd == self._root:
592 return b''
582 return b''
593 # self._root ends with a path separator if self._root is '/' or 'C:\'
583 # self._root ends with a path separator if self._root is '/' or 'C:\'
594 rootsep = self._root
584 rootsep = self._root
595 if not util.endswithsep(rootsep):
585 if not util.endswithsep(rootsep):
596 rootsep += pycompat.ossep
586 rootsep += pycompat.ossep
597 if cwd.startswith(rootsep):
587 if cwd.startswith(rootsep):
598 return cwd[len(rootsep) :]
588 return cwd[len(rootsep) :]
599 else:
589 else:
600 # we're outside the repo. return an absolute path.
590 # we're outside the repo. return an absolute path.
601 return cwd
591 return cwd
602
592
603 def pathto(self, f, cwd=None):
593 def pathto(self, f, cwd=None):
604 if cwd is None:
594 if cwd is None:
605 cwd = self.getcwd()
595 cwd = self.getcwd()
606 path = util.pathto(self._root, cwd, f)
596 path = util.pathto(self._root, cwd, f)
607 if self._slash:
597 if self._slash:
608 return util.pconvert(path)
598 return util.pconvert(path)
609 return path
599 return path
610
600
611 def get_entry(self, path):
601 def get_entry(self, path):
612 """return a DirstateItem for the associated path"""
602 """return a DirstateItem for the associated path"""
613 entry = self._map.get(path)
603 entry = self._map.get(path)
614 if entry is None:
604 if entry is None:
615 return DirstateItem()
605 return DirstateItem()
616 return entry
606 return entry
617
607
618 def __contains__(self, key):
608 def __contains__(self, key):
619 return key in self._map
609 return key in self._map
620
610
621 def __iter__(self):
611 def __iter__(self):
622 return iter(sorted(self._map))
612 return iter(sorted(self._map))
623
613
624 def items(self):
614 def items(self):
625 return self._map.items()
615 return self._map.items()
626
616
627 iteritems = items
617 iteritems = items
628
618
629 def parents(self):
619 def parents(self):
630 return [self._validate(p) for p in self._pl]
620 return [self._validate(p) for p in self._pl]
631
621
632 def p1(self):
622 def p1(self):
633 return self._validate(self._pl[0])
623 return self._validate(self._pl[0])
634
624
635 def p2(self):
625 def p2(self):
636 return self._validate(self._pl[1])
626 return self._validate(self._pl[1])
637
627
638 @property
628 @property
639 def in_merge(self):
629 def in_merge(self):
640 """True if a merge is in progress"""
630 """True if a merge is in progress"""
641 return self._pl[1] != self._nodeconstants.nullid
631 return self._pl[1] != self._nodeconstants.nullid
642
632
643 def branch(self):
633 def branch(self):
644 return encoding.tolocal(self._branch)
634 return encoding.tolocal(self._branch)
645
635
646 @requires_changing_parents
636 @requires_changing_parents
647 def setparents(self, p1, p2=None):
637 def setparents(self, p1, p2=None):
648 """Set dirstate parents to p1 and p2.
638 """Set dirstate parents to p1 and p2.
649
639
650 When moving from two parents to one, "merged" entries a
640 When moving from two parents to one, "merged" entries a
651 adjusted to normal and previous copy records discarded and
641 adjusted to normal and previous copy records discarded and
652 returned by the call.
642 returned by the call.
653
643
654 See localrepo.setparents()
644 See localrepo.setparents()
655 """
645 """
656 if p2 is None:
646 if p2 is None:
657 p2 = self._nodeconstants.nullid
647 p2 = self._nodeconstants.nullid
658 if self._changing_level == 0:
648 if self._changing_level == 0:
659 raise ValueError(
649 raise ValueError(
660 b"cannot set dirstate parent outside of "
650 b"cannot set dirstate parent outside of "
661 b"dirstate.changing_parents context manager"
651 b"dirstate.changing_parents context manager"
662 )
652 )
663
653
664 self._dirty = True
654 self._dirty = True
665 oldp2 = self._pl[1]
655 oldp2 = self._pl[1]
666 if self._origpl is None:
656 if self._origpl is None:
667 self._origpl = self._pl
657 self._origpl = self._pl
668 nullid = self._nodeconstants.nullid
658 nullid = self._nodeconstants.nullid
669 # True if we need to fold p2 related state back to a linear case
659 # True if we need to fold p2 related state back to a linear case
670 fold_p2 = oldp2 != nullid and p2 == nullid
660 fold_p2 = oldp2 != nullid and p2 == nullid
671 return self._map.setparents(p1, p2, fold_p2=fold_p2)
661 return self._map.setparents(p1, p2, fold_p2=fold_p2)
672
662
673 def setbranch(self, branch, transaction=SENTINEL):
663 def setbranch(self, branch, transaction=SENTINEL):
674 self.__class__._branch.set(self, encoding.fromlocal(branch))
664 self.__class__._branch.set(self, encoding.fromlocal(branch))
675 if transaction is SENTINEL:
665 if transaction is SENTINEL:
676 msg = b"setbranch needs a `transaction` argument"
666 msg = b"setbranch needs a `transaction` argument"
677 self._ui.deprecwarn(msg, b'6.5')
667 self._ui.deprecwarn(msg, b'6.5')
678 transaction = None
668 transaction = None
679 if transaction is not None:
669 if transaction is not None:
680 self._setup_tr_abort(transaction)
670 self._setup_tr_abort(transaction)
681 transaction.addfilegenerator(
671 transaction.addfilegenerator(
682 b'dirstate-3-branch%s' % self._tr_key_suffix,
672 b'dirstate-3-branch%s' % self._tr_key_suffix,
683 (b'branch',),
673 (b'branch',),
684 self._write_branch,
674 self._write_branch,
685 location=b'plain',
675 location=b'plain',
686 post_finalize=True,
676 post_finalize=True,
687 )
677 )
688 return
678 return
689
679
690 vfs = self._opener
680 vfs = self._opener
691 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
681 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
692 self._write_branch(f)
682 self._write_branch(f)
693 # make sure filecache has the correct stat info for _branch after
683 # make sure filecache has the correct stat info for _branch after
694 # replacing the underlying file
684 # replacing the underlying file
695 #
685 #
696 # XXX do we actually need this,
686 # XXX do we actually need this,
697 # refreshing the attribute is quite cheap
687 # refreshing the attribute is quite cheap
698 ce = self._filecache[b'_branch']
688 ce = self._filecache[b'_branch']
699 if ce:
689 if ce:
700 ce.refresh()
690 ce.refresh()
701
691
702 def _write_branch(self, file_obj):
692 def _write_branch(self, file_obj):
703 file_obj.write(self._branch + b'\n')
693 file_obj.write(self._branch + b'\n')
704
694
705 def invalidate(self):
695 def invalidate(self):
706 """Causes the next access to reread the dirstate.
696 """Causes the next access to reread the dirstate.
707
697
708 This is different from localrepo.invalidatedirstate() because it always
698 This is different from localrepo.invalidatedirstate() because it always
709 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
699 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
710 check whether the dirstate has changed before rereading it."""
700 check whether the dirstate has changed before rereading it."""
711
701
712 for a in ("_map", "_branch", "_ignore"):
702 for a in ("_map", "_branch", "_ignore"):
713 if a in self.__dict__:
703 if a in self.__dict__:
714 delattr(self, a)
704 delattr(self, a)
715 self._dirty = False
705 self._dirty = False
716 self._dirty_tracked_set = False
706 self._dirty_tracked_set = False
717 self._invalidated_context = bool(
707 self._invalidated_context = bool(
718 self._changing_level > 0
708 self._changing_level > 0
719 or self._attached_to_a_transaction
709 or self._attached_to_a_transaction
720 or self._running_status
710 or self._running_status
721 )
711 )
722 self._origpl = None
712 self._origpl = None
723
713
724 @requires_changing_any
714 @requires_changing_any
725 def copy(self, source, dest):
715 def copy(self, source, dest):
726 """Mark dest as a copy of source. Unmark dest if source is None."""
716 """Mark dest as a copy of source. Unmark dest if source is None."""
727 if source == dest:
717 if source == dest:
728 return
718 return
729 self._dirty = True
719 self._dirty = True
730 if source is not None:
720 if source is not None:
731 self._check_sparse(source)
721 self._check_sparse(source)
732 self._map.copymap[dest] = source
722 self._map.copymap[dest] = source
733 else:
723 else:
734 self._map.copymap.pop(dest, None)
724 self._map.copymap.pop(dest, None)
735
725
736 def copied(self, file):
726 def copied(self, file):
737 return self._map.copymap.get(file, None)
727 return self._map.copymap.get(file, None)
738
728
739 def copies(self):
729 def copies(self):
740 return self._map.copymap
730 return self._map.copymap
741
731
742 @requires_changing_files
732 @requires_changing_files
743 def set_tracked(self, filename, reset_copy=False):
733 def set_tracked(self, filename, reset_copy=False):
744 """a "public" method for generic code to mark a file as tracked
734 """a "public" method for generic code to mark a file as tracked
745
735
746 This function is to be called outside of "update/merge" case. For
736 This function is to be called outside of "update/merge" case. For
747 example by a command like `hg add X`.
737 example by a command like `hg add X`.
748
738
749 if reset_copy is set, any existing copy information will be dropped.
739 if reset_copy is set, any existing copy information will be dropped.
750
740
751 return True the file was previously untracked, False otherwise.
741 return True the file was previously untracked, False otherwise.
752 """
742 """
753 self._dirty = True
743 self._dirty = True
754 entry = self._map.get(filename)
744 entry = self._map.get(filename)
755 if entry is None or not entry.tracked:
745 if entry is None or not entry.tracked:
756 self._check_new_tracked_filename(filename)
746 self._check_new_tracked_filename(filename)
757 pre_tracked = self._map.set_tracked(filename)
747 pre_tracked = self._map.set_tracked(filename)
758 if reset_copy:
748 if reset_copy:
759 self._map.copymap.pop(filename, None)
749 self._map.copymap.pop(filename, None)
760 if pre_tracked:
750 if pre_tracked:
761 self._dirty_tracked_set = True
751 self._dirty_tracked_set = True
762 return pre_tracked
752 return pre_tracked
763
753
764 @requires_changing_files
754 @requires_changing_files
765 def set_untracked(self, filename):
755 def set_untracked(self, filename):
766 """a "public" method for generic code to mark a file as untracked
756 """a "public" method for generic code to mark a file as untracked
767
757
768 This function is to be called outside of "update/merge" case. For
758 This function is to be called outside of "update/merge" case. For
769 example by a command like `hg remove X`.
759 example by a command like `hg remove X`.
770
760
771 return True the file was previously tracked, False otherwise.
761 return True the file was previously tracked, False otherwise.
772 """
762 """
773 ret = self._map.set_untracked(filename)
763 ret = self._map.set_untracked(filename)
774 if ret:
764 if ret:
775 self._dirty = True
765 self._dirty = True
776 self._dirty_tracked_set = True
766 self._dirty_tracked_set = True
777 return ret
767 return ret
778
768
779 @requires_changing_files_or_status
769 @requires_changing_files_or_status
780 def set_clean(self, filename, parentfiledata):
770 def set_clean(self, filename, parentfiledata):
781 """record that the current state of the file on disk is known to be clean"""
771 """record that the current state of the file on disk is known to be clean"""
782 self._dirty = True
772 self._dirty = True
783 if not self._map[filename].tracked:
773 if not self._map[filename].tracked:
784 self._check_new_tracked_filename(filename)
774 self._check_new_tracked_filename(filename)
785 (mode, size, mtime) = parentfiledata
775 (mode, size, mtime) = parentfiledata
786 self._map.set_clean(filename, mode, size, mtime)
776 self._map.set_clean(filename, mode, size, mtime)
787
777
788 @requires_changing_files_or_status
778 @requires_changing_files_or_status
789 def set_possibly_dirty(self, filename):
779 def set_possibly_dirty(self, filename):
790 """record that the current state of the file on disk is unknown"""
780 """record that the current state of the file on disk is unknown"""
791 self._dirty = True
781 self._dirty = True
792 self._map.set_possibly_dirty(filename)
782 self._map.set_possibly_dirty(filename)
793
783
794 @requires_changing_parents
784 @requires_changing_parents
795 def update_file_p1(
785 def update_file_p1(
796 self,
786 self,
797 filename,
787 filename,
798 p1_tracked,
788 p1_tracked,
799 ):
789 ):
800 """Set a file as tracked in the parent (or not)
790 """Set a file as tracked in the parent (or not)
801
791
802 This is to be called when adjust the dirstate to a new parent after an history
792 This is to be called when adjust the dirstate to a new parent after an history
803 rewriting operation.
793 rewriting operation.
804
794
805 It should not be called during a merge (p2 != nullid) and only within
795 It should not be called during a merge (p2 != nullid) and only within
806 a `with dirstate.changing_parents(repo):` context.
796 a `with dirstate.changing_parents(repo):` context.
807 """
797 """
808 if self.in_merge:
798 if self.in_merge:
809 msg = b'update_file_reference should not be called when merging'
799 msg = b'update_file_reference should not be called when merging'
810 raise error.ProgrammingError(msg)
800 raise error.ProgrammingError(msg)
811 entry = self._map.get(filename)
801 entry = self._map.get(filename)
812 if entry is None:
802 if entry is None:
813 wc_tracked = False
803 wc_tracked = False
814 else:
804 else:
815 wc_tracked = entry.tracked
805 wc_tracked = entry.tracked
816 if not (p1_tracked or wc_tracked):
806 if not (p1_tracked or wc_tracked):
817 # the file is no longer relevant to anyone
807 # the file is no longer relevant to anyone
818 if self._map.get(filename) is not None:
808 if self._map.get(filename) is not None:
819 self._map.reset_state(filename)
809 self._map.reset_state(filename)
820 self._dirty = True
810 self._dirty = True
821 elif (not p1_tracked) and wc_tracked:
811 elif (not p1_tracked) and wc_tracked:
822 if entry is not None and entry.added:
812 if entry is not None and entry.added:
823 return # avoid dropping copy information (maybe?)
813 return # avoid dropping copy information (maybe?)
824
814
825 self._map.reset_state(
815 self._map.reset_state(
826 filename,
816 filename,
827 wc_tracked,
817 wc_tracked,
828 p1_tracked,
818 p1_tracked,
829 # the underlying reference might have changed, we will have to
819 # the underlying reference might have changed, we will have to
830 # check it.
820 # check it.
831 has_meaningful_mtime=False,
821 has_meaningful_mtime=False,
832 )
822 )
833
823
834 @requires_changing_parents
824 @requires_changing_parents
835 def update_file(
825 def update_file(
836 self,
826 self,
837 filename,
827 filename,
838 wc_tracked,
828 wc_tracked,
839 p1_tracked,
829 p1_tracked,
840 p2_info=False,
830 p2_info=False,
841 possibly_dirty=False,
831 possibly_dirty=False,
842 parentfiledata=None,
832 parentfiledata=None,
843 ):
833 ):
844 """update the information about a file in the dirstate
834 """update the information about a file in the dirstate
845
835
846 This is to be called when the direstates parent changes to keep track
836 This is to be called when the direstates parent changes to keep track
847 of what is the file situation in regards to the working copy and its parent.
837 of what is the file situation in regards to the working copy and its parent.
848
838
849 This function must be called within a `dirstate.changing_parents` context.
839 This function must be called within a `dirstate.changing_parents` context.
850
840
851 note: the API is at an early stage and we might need to adjust it
841 note: the API is at an early stage and we might need to adjust it
852 depending of what information ends up being relevant and useful to
842 depending of what information ends up being relevant and useful to
853 other processing.
843 other processing.
854 """
844 """
855 self._update_file(
845 self._update_file(
856 filename=filename,
846 filename=filename,
857 wc_tracked=wc_tracked,
847 wc_tracked=wc_tracked,
858 p1_tracked=p1_tracked,
848 p1_tracked=p1_tracked,
859 p2_info=p2_info,
849 p2_info=p2_info,
860 possibly_dirty=possibly_dirty,
850 possibly_dirty=possibly_dirty,
861 parentfiledata=parentfiledata,
851 parentfiledata=parentfiledata,
862 )
852 )
863
853
864 def hacky_extension_update_file(self, *args, **kwargs):
854 def hacky_extension_update_file(self, *args, **kwargs):
865 """NEVER USE THIS, YOU DO NOT NEED IT
855 """NEVER USE THIS, YOU DO NOT NEED IT
866
856
867 This function is a variant of "update_file" to be called by a small set
857 This function is a variant of "update_file" to be called by a small set
868 of extensions, it also adjust the internal state of file, but can be
858 of extensions, it also adjust the internal state of file, but can be
869 called outside an `changing_parents` context.
859 called outside an `changing_parents` context.
870
860
871 A very small number of extension meddle with the working copy content
861 A very small number of extension meddle with the working copy content
872 in a way that requires to adjust the dirstate accordingly. At the time
862 in a way that requires to adjust the dirstate accordingly. At the time
873 this command is written they are :
863 this command is written they are :
874 - keyword,
864 - keyword,
875 - largefile,
865 - largefile,
876 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
866 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
877
867
878 This function could probably be replaced by more semantic one (like
868 This function could probably be replaced by more semantic one (like
879 "adjust expected size" or "always revalidate file content", etc)
869 "adjust expected size" or "always revalidate file content", etc)
880 however at the time where this is writen, this is too much of a detour
870 however at the time where this is writen, this is too much of a detour
881 to be considered.
871 to be considered.
882 """
872 """
883 if not (self._changing_level > 0 or self._running_status > 0):
873 if not (self._changing_level > 0 or self._running_status > 0):
884 msg = "requires a changes context"
874 msg = "requires a changes context"
885 raise error.ProgrammingError(msg)
875 raise error.ProgrammingError(msg)
886 self._update_file(
876 self._update_file(
887 *args,
877 *args,
888 **kwargs,
878 **kwargs,
889 )
879 )
890
880
891 def _update_file(
881 def _update_file(
892 self,
882 self,
893 filename,
883 filename,
894 wc_tracked,
884 wc_tracked,
895 p1_tracked,
885 p1_tracked,
896 p2_info=False,
886 p2_info=False,
897 possibly_dirty=False,
887 possibly_dirty=False,
898 parentfiledata=None,
888 parentfiledata=None,
899 ):
889 ):
900
890
901 # note: I do not think we need to double check name clash here since we
891 # note: I do not think we need to double check name clash here since we
902 # are in a update/merge case that should already have taken care of
892 # are in a update/merge case that should already have taken care of
903 # this. The test agrees
893 # this. The test agrees
904
894
905 self._dirty = True
895 self._dirty = True
906 old_entry = self._map.get(filename)
896 old_entry = self._map.get(filename)
907 if old_entry is None:
897 if old_entry is None:
908 prev_tracked = False
898 prev_tracked = False
909 else:
899 else:
910 prev_tracked = old_entry.tracked
900 prev_tracked = old_entry.tracked
911 if prev_tracked != wc_tracked:
901 if prev_tracked != wc_tracked:
912 self._dirty_tracked_set = True
902 self._dirty_tracked_set = True
913
903
914 self._map.reset_state(
904 self._map.reset_state(
915 filename,
905 filename,
916 wc_tracked,
906 wc_tracked,
917 p1_tracked,
907 p1_tracked,
918 p2_info=p2_info,
908 p2_info=p2_info,
919 has_meaningful_mtime=not possibly_dirty,
909 has_meaningful_mtime=not possibly_dirty,
920 parentfiledata=parentfiledata,
910 parentfiledata=parentfiledata,
921 )
911 )
922
912
923 def _check_new_tracked_filename(self, filename):
913 def _check_new_tracked_filename(self, filename):
924 scmutil.checkfilename(filename)
914 scmutil.checkfilename(filename)
925 if self._map.hastrackeddir(filename):
915 if self._map.hastrackeddir(filename):
926 msg = _(b'directory %r already in dirstate')
916 msg = _(b'directory %r already in dirstate')
927 msg %= pycompat.bytestr(filename)
917 msg %= pycompat.bytestr(filename)
928 raise error.Abort(msg)
918 raise error.Abort(msg)
929 # shadows
919 # shadows
930 for d in pathutil.finddirs(filename):
920 for d in pathutil.finddirs(filename):
931 if self._map.hastrackeddir(d):
921 if self._map.hastrackeddir(d):
932 break
922 break
933 entry = self._map.get(d)
923 entry = self._map.get(d)
934 if entry is not None and not entry.removed:
924 if entry is not None and not entry.removed:
935 msg = _(b'file %r in dirstate clashes with %r')
925 msg = _(b'file %r in dirstate clashes with %r')
936 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
926 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
937 raise error.Abort(msg)
927 raise error.Abort(msg)
938 self._check_sparse(filename)
928 self._check_sparse(filename)
939
929
940 def _check_sparse(self, filename):
930 def _check_sparse(self, filename):
941 """Check that a filename is inside the sparse profile"""
931 """Check that a filename is inside the sparse profile"""
942 sparsematch = self._sparsematcher
932 sparsematch = self._sparsematcher
943 if sparsematch is not None and not sparsematch.always():
933 if sparsematch is not None and not sparsematch.always():
944 if not sparsematch(filename):
934 if not sparsematch(filename):
945 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
935 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
946 hint = _(
936 hint = _(
947 b'include file with `hg debugsparse --include <pattern>` or use '
937 b'include file with `hg debugsparse --include <pattern>` or use '
948 b'`hg add -s <file>` to include file directory while adding'
938 b'`hg add -s <file>` to include file directory while adding'
949 )
939 )
950 raise error.Abort(msg % filename, hint=hint)
940 raise error.Abort(msg % filename, hint=hint)
951
941
952 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
942 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
953 if exists is None:
943 if exists is None:
954 exists = os.path.lexists(os.path.join(self._root, path))
944 exists = os.path.lexists(os.path.join(self._root, path))
955 if not exists:
945 if not exists:
956 # Maybe a path component exists
946 # Maybe a path component exists
957 if not ignoremissing and b'/' in path:
947 if not ignoremissing and b'/' in path:
958 d, f = path.rsplit(b'/', 1)
948 d, f = path.rsplit(b'/', 1)
959 d = self._normalize(d, False, ignoremissing, None)
949 d = self._normalize(d, False, ignoremissing, None)
960 folded = d + b"/" + f
950 folded = d + b"/" + f
961 else:
951 else:
962 # No path components, preserve original case
952 # No path components, preserve original case
963 folded = path
953 folded = path
964 else:
954 else:
965 # recursively normalize leading directory components
955 # recursively normalize leading directory components
966 # against dirstate
956 # against dirstate
967 if b'/' in normed:
957 if b'/' in normed:
968 d, f = normed.rsplit(b'/', 1)
958 d, f = normed.rsplit(b'/', 1)
969 d = self._normalize(d, False, ignoremissing, True)
959 d = self._normalize(d, False, ignoremissing, True)
970 r = self._root + b"/" + d
960 r = self._root + b"/" + d
971 folded = d + b"/" + util.fspath(f, r)
961 folded = d + b"/" + util.fspath(f, r)
972 else:
962 else:
973 folded = util.fspath(normed, self._root)
963 folded = util.fspath(normed, self._root)
974 storemap[normed] = folded
964 storemap[normed] = folded
975
965
976 return folded
966 return folded
977
967
978 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
968 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
979 normed = util.normcase(path)
969 normed = util.normcase(path)
980 folded = self._map.filefoldmap.get(normed, None)
970 folded = self._map.filefoldmap.get(normed, None)
981 if folded is None:
971 if folded is None:
982 if isknown:
972 if isknown:
983 folded = path
973 folded = path
984 else:
974 else:
985 folded = self._discoverpath(
975 folded = self._discoverpath(
986 path, normed, ignoremissing, exists, self._map.filefoldmap
976 path, normed, ignoremissing, exists, self._map.filefoldmap
987 )
977 )
988 return folded
978 return folded
989
979
990 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
980 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
991 normed = util.normcase(path)
981 normed = util.normcase(path)
992 folded = self._map.filefoldmap.get(normed, None)
982 folded = self._map.filefoldmap.get(normed, None)
993 if folded is None:
983 if folded is None:
994 folded = self._map.dirfoldmap.get(normed, None)
984 folded = self._map.dirfoldmap.get(normed, None)
995 if folded is None:
985 if folded is None:
996 if isknown:
986 if isknown:
997 folded = path
987 folded = path
998 else:
988 else:
999 # store discovered result in dirfoldmap so that future
989 # store discovered result in dirfoldmap so that future
1000 # normalizefile calls don't start matching directories
990 # normalizefile calls don't start matching directories
1001 folded = self._discoverpath(
991 folded = self._discoverpath(
1002 path, normed, ignoremissing, exists, self._map.dirfoldmap
992 path, normed, ignoremissing, exists, self._map.dirfoldmap
1003 )
993 )
1004 return folded
994 return folded
1005
995
1006 def normalize(self, path, isknown=False, ignoremissing=False):
996 def normalize(self, path, isknown=False, ignoremissing=False):
1007 """
997 """
1008 normalize the case of a pathname when on a casefolding filesystem
998 normalize the case of a pathname when on a casefolding filesystem
1009
999
1010 isknown specifies whether the filename came from walking the
1000 isknown specifies whether the filename came from walking the
1011 disk, to avoid extra filesystem access.
1001 disk, to avoid extra filesystem access.
1012
1002
1013 If ignoremissing is True, missing path are returned
1003 If ignoremissing is True, missing path are returned
1014 unchanged. Otherwise, we try harder to normalize possibly
1004 unchanged. Otherwise, we try harder to normalize possibly
1015 existing path components.
1005 existing path components.
1016
1006
1017 The normalized case is determined based on the following precedence:
1007 The normalized case is determined based on the following precedence:
1018
1008
1019 - version of name already stored in the dirstate
1009 - version of name already stored in the dirstate
1020 - version of name stored on disk
1010 - version of name stored on disk
1021 - version provided via command arguments
1011 - version provided via command arguments
1022 """
1012 """
1023
1013
1024 if self._checkcase:
1014 if self._checkcase:
1025 return self._normalize(path, isknown, ignoremissing)
1015 return self._normalize(path, isknown, ignoremissing)
1026 return path
1016 return path
1027
1017
1028 # XXX this method is barely used, as a result:
1018 # XXX this method is barely used, as a result:
1029 # - its semantic is unclear
1019 # - its semantic is unclear
1030 # - do we really needs it ?
1020 # - do we really needs it ?
1031 @requires_changing_parents
1021 @requires_changing_parents
1032 def clear(self):
1022 def clear(self):
1033 self._map.clear()
1023 self._map.clear()
1034 self._dirty = True
1024 self._dirty = True
1035
1025
1036 @requires_changing_parents
1026 @requires_changing_parents
1037 def rebuild(self, parent, allfiles, changedfiles=None):
1027 def rebuild(self, parent, allfiles, changedfiles=None):
1038 matcher = self._sparsematcher
1028 matcher = self._sparsematcher
1039 if matcher is not None and not matcher.always():
1029 if matcher is not None and not matcher.always():
1040 # should not add non-matching files
1030 # should not add non-matching files
1041 allfiles = [f for f in allfiles if matcher(f)]
1031 allfiles = [f for f in allfiles if matcher(f)]
1042 if changedfiles:
1032 if changedfiles:
1043 changedfiles = [f for f in changedfiles if matcher(f)]
1033 changedfiles = [f for f in changedfiles if matcher(f)]
1044
1034
1045 if changedfiles is not None:
1035 if changedfiles is not None:
1046 # these files will be deleted from the dirstate when they are
1036 # these files will be deleted from the dirstate when they are
1047 # not found to be in allfiles
1037 # not found to be in allfiles
1048 dirstatefilestoremove = {f for f in self if not matcher(f)}
1038 dirstatefilestoremove = {f for f in self if not matcher(f)}
1049 changedfiles = dirstatefilestoremove.union(changedfiles)
1039 changedfiles = dirstatefilestoremove.union(changedfiles)
1050
1040
1051 if changedfiles is None:
1041 if changedfiles is None:
1052 # Rebuild entire dirstate
1042 # Rebuild entire dirstate
1053 to_lookup = allfiles
1043 to_lookup = allfiles
1054 to_drop = []
1044 to_drop = []
1055 self.clear()
1045 self.clear()
1056 elif len(changedfiles) < 10:
1046 elif len(changedfiles) < 10:
1057 # Avoid turning allfiles into a set, which can be expensive if it's
1047 # Avoid turning allfiles into a set, which can be expensive if it's
1058 # large.
1048 # large.
1059 to_lookup = []
1049 to_lookup = []
1060 to_drop = []
1050 to_drop = []
1061 for f in changedfiles:
1051 for f in changedfiles:
1062 if f in allfiles:
1052 if f in allfiles:
1063 to_lookup.append(f)
1053 to_lookup.append(f)
1064 else:
1054 else:
1065 to_drop.append(f)
1055 to_drop.append(f)
1066 else:
1056 else:
1067 changedfilesset = set(changedfiles)
1057 changedfilesset = set(changedfiles)
1068 to_lookup = changedfilesset & set(allfiles)
1058 to_lookup = changedfilesset & set(allfiles)
1069 to_drop = changedfilesset - to_lookup
1059 to_drop = changedfilesset - to_lookup
1070
1060
1071 if self._origpl is None:
1061 if self._origpl is None:
1072 self._origpl = self._pl
1062 self._origpl = self._pl
1073 self._map.setparents(parent, self._nodeconstants.nullid)
1063 self._map.setparents(parent, self._nodeconstants.nullid)
1074
1064
1075 for f in to_lookup:
1065 for f in to_lookup:
1076 if self.in_merge:
1066 if self.in_merge:
1077 self.set_tracked(f)
1067 self.set_tracked(f)
1078 else:
1068 else:
1079 self._map.reset_state(
1069 self._map.reset_state(
1080 f,
1070 f,
1081 wc_tracked=True,
1071 wc_tracked=True,
1082 p1_tracked=True,
1072 p1_tracked=True,
1083 )
1073 )
1084 for f in to_drop:
1074 for f in to_drop:
1085 self._map.reset_state(f)
1075 self._map.reset_state(f)
1086
1076
1087 self._dirty = True
1077 self._dirty = True
1088
1078
1089 def _setup_tr_abort(self, tr):
1079 def _setup_tr_abort(self, tr):
1090 """make sure we invalidate the current change on abort"""
1080 """make sure we invalidate the current change on abort"""
1091 if tr is None:
1081 if tr is None:
1092 return
1082 return
1093
1083
1094 def on_abort(tr):
1084 def on_abort(tr):
1095 self._attached_to_a_transaction = False
1085 self._attached_to_a_transaction = False
1096 self.invalidate()
1086 self.invalidate()
1097
1087
1098 tr.addabort(
1088 tr.addabort(
1099 b'dirstate-invalidate%s' % self._tr_key_suffix,
1089 b'dirstate-invalidate%s' % self._tr_key_suffix,
1100 on_abort,
1090 on_abort,
1101 )
1091 )
1102
1092
1103 def write(self, tr):
1093 def write(self, tr):
1104 if not self._dirty:
1094 if not self._dirty:
1105 return
1095 return
1106 # make sure we don't request a write of invalidated content
1096 # make sure we don't request a write of invalidated content
1107 # XXX move before the dirty check once `unlock` stop calling `write`
1097 # XXX move before the dirty check once `unlock` stop calling `write`
1108 assert not self._invalidated_context
1098 assert not self._invalidated_context
1109
1099
1110 write_key = self._use_tracked_hint and self._dirty_tracked_set
1100 write_key = self._use_tracked_hint and self._dirty_tracked_set
1111 if tr:
1101 if tr:
1112
1102
1113 self._setup_tr_abort(tr)
1103 self._setup_tr_abort(tr)
1114 self._attached_to_a_transaction = True
1104 self._attached_to_a_transaction = True
1115
1105
1116 def on_success(f):
1106 def on_success(f):
1117 self._attached_to_a_transaction = False
1107 self._attached_to_a_transaction = False
1118 self._writedirstate(tr, f),
1108 self._writedirstate(tr, f),
1119
1109
1120 # delay writing in-memory changes out
1110 # delay writing in-memory changes out
1121 tr.addfilegenerator(
1111 tr.addfilegenerator(
1122 b'dirstate-1-main%s' % self._tr_key_suffix,
1112 b'dirstate-1-main%s' % self._tr_key_suffix,
1123 (self._filename,),
1113 (self._filename,),
1124 on_success,
1114 on_success,
1125 location=b'plain',
1115 location=b'plain',
1126 post_finalize=True,
1116 post_finalize=True,
1127 )
1117 )
1128 if write_key:
1118 if write_key:
1129 tr.addfilegenerator(
1119 tr.addfilegenerator(
1130 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1120 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1131 (self._filename_th,),
1121 (self._filename_th,),
1132 lambda f: self._write_tracked_hint(tr, f),
1122 lambda f: self._write_tracked_hint(tr, f),
1133 location=b'plain',
1123 location=b'plain',
1134 post_finalize=True,
1124 post_finalize=True,
1135 )
1125 )
1136 return
1126 return
1137
1127
1138 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1128 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1139 with file(self._filename) as f:
1129 with file(self._filename) as f:
1140 self._writedirstate(tr, f)
1130 self._writedirstate(tr, f)
1141 if write_key:
1131 if write_key:
1142 # we update the key-file after writing to make sure reader have a
1132 # we update the key-file after writing to make sure reader have a
1143 # key that match the newly written content
1133 # key that match the newly written content
1144 with file(self._filename_th) as f:
1134 with file(self._filename_th) as f:
1145 self._write_tracked_hint(tr, f)
1135 self._write_tracked_hint(tr, f)
1146
1136
1147 def delete_tracked_hint(self):
1137 def delete_tracked_hint(self):
1148 """remove the tracked_hint file
1138 """remove the tracked_hint file
1149
1139
1150 To be used by format downgrades operation"""
1140 To be used by format downgrades operation"""
1151 self._opener.unlink(self._filename_th)
1141 self._opener.unlink(self._filename_th)
1152 self._use_tracked_hint = False
1142 self._use_tracked_hint = False
1153
1143
1154 def addparentchangecallback(self, category, callback):
1144 def addparentchangecallback(self, category, callback):
1155 """add a callback to be called when the wd parents are changed
1145 """add a callback to be called when the wd parents are changed
1156
1146
1157 Callback will be called with the following arguments:
1147 Callback will be called with the following arguments:
1158 dirstate, (oldp1, oldp2), (newp1, newp2)
1148 dirstate, (oldp1, oldp2), (newp1, newp2)
1159
1149
1160 Category is a unique identifier to allow overwriting an old callback
1150 Category is a unique identifier to allow overwriting an old callback
1161 with a newer callback.
1151 with a newer callback.
1162 """
1152 """
1163 self._plchangecallbacks[category] = callback
1153 self._plchangecallbacks[category] = callback
1164
1154
1165 def _writedirstate(self, tr, st):
1155 def _writedirstate(self, tr, st):
1166 # make sure we don't write invalidated content
1156 # make sure we don't write invalidated content
1167 assert not self._invalidated_context
1157 assert not self._invalidated_context
1168 # notify callbacks about parents change
1158 # notify callbacks about parents change
1169 if self._origpl is not None and self._origpl != self._pl:
1159 if self._origpl is not None and self._origpl != self._pl:
1170 for c, callback in sorted(self._plchangecallbacks.items()):
1160 for c, callback in sorted(self._plchangecallbacks.items()):
1171 callback(self, self._origpl, self._pl)
1161 callback(self, self._origpl, self._pl)
1172 self._origpl = None
1162 self._origpl = None
1173 self._map.write(tr, st)
1163 self._map.write(tr, st)
1174 self._dirty = False
1164 self._dirty = False
1175 self._dirty_tracked_set = False
1165 self._dirty_tracked_set = False
1176
1166
1177 def _write_tracked_hint(self, tr, f):
1167 def _write_tracked_hint(self, tr, f):
1178 key = node.hex(uuid.uuid4().bytes)
1168 key = node.hex(uuid.uuid4().bytes)
1179 f.write(b"1\n%s\n" % key) # 1 is the format version
1169 f.write(b"1\n%s\n" % key) # 1 is the format version
1180
1170
1181 def _dirignore(self, f):
1171 def _dirignore(self, f):
1182 if self._ignore(f):
1172 if self._ignore(f):
1183 return True
1173 return True
1184 for p in pathutil.finddirs(f):
1174 for p in pathutil.finddirs(f):
1185 if self._ignore(p):
1175 if self._ignore(p):
1186 return True
1176 return True
1187 return False
1177 return False
1188
1178
1189 def _ignorefiles(self):
1179 def _ignorefiles(self):
1190 files = []
1180 files = []
1191 if os.path.exists(self._join(b'.hgignore')):
1181 if os.path.exists(self._join(b'.hgignore')):
1192 files.append(self._join(b'.hgignore'))
1182 files.append(self._join(b'.hgignore'))
1193 for name, path in self._ui.configitems(b"ui"):
1183 for name, path in self._ui.configitems(b"ui"):
1194 if name == b'ignore' or name.startswith(b'ignore.'):
1184 if name == b'ignore' or name.startswith(b'ignore.'):
1195 # we need to use os.path.join here rather than self._join
1185 # we need to use os.path.join here rather than self._join
1196 # because path is arbitrary and user-specified
1186 # because path is arbitrary and user-specified
1197 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1187 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1198 return files
1188 return files
1199
1189
1200 def _ignorefileandline(self, f):
1190 def _ignorefileandline(self, f):
1201 files = collections.deque(self._ignorefiles())
1191 files = collections.deque(self._ignorefiles())
1202 visited = set()
1192 visited = set()
1203 while files:
1193 while files:
1204 i = files.popleft()
1194 i = files.popleft()
1205 patterns = matchmod.readpatternfile(
1195 patterns = matchmod.readpatternfile(
1206 i, self._ui.warn, sourceinfo=True
1196 i, self._ui.warn, sourceinfo=True
1207 )
1197 )
1208 for pattern, lineno, line in patterns:
1198 for pattern, lineno, line in patterns:
1209 kind, p = matchmod._patsplit(pattern, b'glob')
1199 kind, p = matchmod._patsplit(pattern, b'glob')
1210 if kind == b"subinclude":
1200 if kind == b"subinclude":
1211 if p not in visited:
1201 if p not in visited:
1212 files.append(p)
1202 files.append(p)
1213 continue
1203 continue
1214 m = matchmod.match(
1204 m = matchmod.match(
1215 self._root, b'', [], [pattern], warn=self._ui.warn
1205 self._root, b'', [], [pattern], warn=self._ui.warn
1216 )
1206 )
1217 if m(f):
1207 if m(f):
1218 return (i, lineno, line)
1208 return (i, lineno, line)
1219 visited.add(i)
1209 visited.add(i)
1220 return (None, -1, b"")
1210 return (None, -1, b"")
1221
1211
1222 def _walkexplicit(self, match, subrepos):
1212 def _walkexplicit(self, match, subrepos):
1223 """Get stat data about the files explicitly specified by match.
1213 """Get stat data about the files explicitly specified by match.
1224
1214
1225 Return a triple (results, dirsfound, dirsnotfound).
1215 Return a triple (results, dirsfound, dirsnotfound).
1226 - results is a mapping from filename to stat result. It also contains
1216 - results is a mapping from filename to stat result. It also contains
1227 listings mapping subrepos and .hg to None.
1217 listings mapping subrepos and .hg to None.
1228 - dirsfound is a list of files found to be directories.
1218 - dirsfound is a list of files found to be directories.
1229 - dirsnotfound is a list of files that the dirstate thinks are
1219 - dirsnotfound is a list of files that the dirstate thinks are
1230 directories and that were not found."""
1220 directories and that were not found."""
1231
1221
1232 def badtype(mode):
1222 def badtype(mode):
1233 kind = _(b'unknown')
1223 kind = _(b'unknown')
1234 if stat.S_ISCHR(mode):
1224 if stat.S_ISCHR(mode):
1235 kind = _(b'character device')
1225 kind = _(b'character device')
1236 elif stat.S_ISBLK(mode):
1226 elif stat.S_ISBLK(mode):
1237 kind = _(b'block device')
1227 kind = _(b'block device')
1238 elif stat.S_ISFIFO(mode):
1228 elif stat.S_ISFIFO(mode):
1239 kind = _(b'fifo')
1229 kind = _(b'fifo')
1240 elif stat.S_ISSOCK(mode):
1230 elif stat.S_ISSOCK(mode):
1241 kind = _(b'socket')
1231 kind = _(b'socket')
1242 elif stat.S_ISDIR(mode):
1232 elif stat.S_ISDIR(mode):
1243 kind = _(b'directory')
1233 kind = _(b'directory')
1244 return _(b'unsupported file type (type is %s)') % kind
1234 return _(b'unsupported file type (type is %s)') % kind
1245
1235
1246 badfn = match.bad
1236 badfn = match.bad
1247 dmap = self._map
1237 dmap = self._map
1248 lstat = os.lstat
1238 lstat = os.lstat
1249 getkind = stat.S_IFMT
1239 getkind = stat.S_IFMT
1250 dirkind = stat.S_IFDIR
1240 dirkind = stat.S_IFDIR
1251 regkind = stat.S_IFREG
1241 regkind = stat.S_IFREG
1252 lnkkind = stat.S_IFLNK
1242 lnkkind = stat.S_IFLNK
1253 join = self._join
1243 join = self._join
1254 dirsfound = []
1244 dirsfound = []
1255 foundadd = dirsfound.append
1245 foundadd = dirsfound.append
1256 dirsnotfound = []
1246 dirsnotfound = []
1257 notfoundadd = dirsnotfound.append
1247 notfoundadd = dirsnotfound.append
1258
1248
1259 if not match.isexact() and self._checkcase:
1249 if not match.isexact() and self._checkcase:
1260 normalize = self._normalize
1250 normalize = self._normalize
1261 else:
1251 else:
1262 normalize = None
1252 normalize = None
1263
1253
1264 files = sorted(match.files())
1254 files = sorted(match.files())
1265 subrepos.sort()
1255 subrepos.sort()
1266 i, j = 0, 0
1256 i, j = 0, 0
1267 while i < len(files) and j < len(subrepos):
1257 while i < len(files) and j < len(subrepos):
1268 subpath = subrepos[j] + b"/"
1258 subpath = subrepos[j] + b"/"
1269 if files[i] < subpath:
1259 if files[i] < subpath:
1270 i += 1
1260 i += 1
1271 continue
1261 continue
1272 while i < len(files) and files[i].startswith(subpath):
1262 while i < len(files) and files[i].startswith(subpath):
1273 del files[i]
1263 del files[i]
1274 j += 1
1264 j += 1
1275
1265
1276 if not files or b'' in files:
1266 if not files or b'' in files:
1277 files = [b'']
1267 files = [b'']
1278 # constructing the foldmap is expensive, so don't do it for the
1268 # constructing the foldmap is expensive, so don't do it for the
1279 # common case where files is ['']
1269 # common case where files is ['']
1280 normalize = None
1270 normalize = None
1281 results = dict.fromkeys(subrepos)
1271 results = dict.fromkeys(subrepos)
1282 results[b'.hg'] = None
1272 results[b'.hg'] = None
1283
1273
1284 for ff in files:
1274 for ff in files:
1285 if normalize:
1275 if normalize:
1286 nf = normalize(ff, False, True)
1276 nf = normalize(ff, False, True)
1287 else:
1277 else:
1288 nf = ff
1278 nf = ff
1289 if nf in results:
1279 if nf in results:
1290 continue
1280 continue
1291
1281
1292 try:
1282 try:
1293 st = lstat(join(nf))
1283 st = lstat(join(nf))
1294 kind = getkind(st.st_mode)
1284 kind = getkind(st.st_mode)
1295 if kind == dirkind:
1285 if kind == dirkind:
1296 if nf in dmap:
1286 if nf in dmap:
1297 # file replaced by dir on disk but still in dirstate
1287 # file replaced by dir on disk but still in dirstate
1298 results[nf] = None
1288 results[nf] = None
1299 foundadd((nf, ff))
1289 foundadd((nf, ff))
1300 elif kind == regkind or kind == lnkkind:
1290 elif kind == regkind or kind == lnkkind:
1301 results[nf] = st
1291 results[nf] = st
1302 else:
1292 else:
1303 badfn(ff, badtype(kind))
1293 badfn(ff, badtype(kind))
1304 if nf in dmap:
1294 if nf in dmap:
1305 results[nf] = None
1295 results[nf] = None
1306 except (OSError) as inst:
1296 except (OSError) as inst:
1307 # nf not found on disk - it is dirstate only
1297 # nf not found on disk - it is dirstate only
1308 if nf in dmap: # does it exactly match a missing file?
1298 if nf in dmap: # does it exactly match a missing file?
1309 results[nf] = None
1299 results[nf] = None
1310 else: # does it match a missing directory?
1300 else: # does it match a missing directory?
1311 if self._map.hasdir(nf):
1301 if self._map.hasdir(nf):
1312 notfoundadd(nf)
1302 notfoundadd(nf)
1313 else:
1303 else:
1314 badfn(ff, encoding.strtolocal(inst.strerror))
1304 badfn(ff, encoding.strtolocal(inst.strerror))
1315
1305
1316 # match.files() may contain explicitly-specified paths that shouldn't
1306 # match.files() may contain explicitly-specified paths that shouldn't
1317 # be taken; drop them from the list of files found. dirsfound/notfound
1307 # be taken; drop them from the list of files found. dirsfound/notfound
1318 # aren't filtered here because they will be tested later.
1308 # aren't filtered here because they will be tested later.
1319 if match.anypats():
1309 if match.anypats():
1320 for f in list(results):
1310 for f in list(results):
1321 if f == b'.hg' or f in subrepos:
1311 if f == b'.hg' or f in subrepos:
1322 # keep sentinel to disable further out-of-repo walks
1312 # keep sentinel to disable further out-of-repo walks
1323 continue
1313 continue
1324 if not match(f):
1314 if not match(f):
1325 del results[f]
1315 del results[f]
1326
1316
1327 # Case insensitive filesystems cannot rely on lstat() failing to detect
1317 # Case insensitive filesystems cannot rely on lstat() failing to detect
1328 # a case-only rename. Prune the stat object for any file that does not
1318 # a case-only rename. Prune the stat object for any file that does not
1329 # match the case in the filesystem, if there are multiple files that
1319 # match the case in the filesystem, if there are multiple files that
1330 # normalize to the same path.
1320 # normalize to the same path.
1331 if match.isexact() and self._checkcase:
1321 if match.isexact() and self._checkcase:
1332 normed = {}
1322 normed = {}
1333
1323
1334 for f, st in results.items():
1324 for f, st in results.items():
1335 if st is None:
1325 if st is None:
1336 continue
1326 continue
1337
1327
1338 nc = util.normcase(f)
1328 nc = util.normcase(f)
1339 paths = normed.get(nc)
1329 paths = normed.get(nc)
1340
1330
1341 if paths is None:
1331 if paths is None:
1342 paths = set()
1332 paths = set()
1343 normed[nc] = paths
1333 normed[nc] = paths
1344
1334
1345 paths.add(f)
1335 paths.add(f)
1346
1336
1347 for norm, paths in normed.items():
1337 for norm, paths in normed.items():
1348 if len(paths) > 1:
1338 if len(paths) > 1:
1349 for path in paths:
1339 for path in paths:
1350 folded = self._discoverpath(
1340 folded = self._discoverpath(
1351 path, norm, True, None, self._map.dirfoldmap
1341 path, norm, True, None, self._map.dirfoldmap
1352 )
1342 )
1353 if path != folded:
1343 if path != folded:
1354 results[path] = None
1344 results[path] = None
1355
1345
1356 return results, dirsfound, dirsnotfound
1346 return results, dirsfound, dirsnotfound
1357
1347
1358 def walk(self, match, subrepos, unknown, ignored, full=True):
1348 def walk(self, match, subrepos, unknown, ignored, full=True):
1359 """
1349 """
1360 Walk recursively through the directory tree, finding all files
1350 Walk recursively through the directory tree, finding all files
1361 matched by match.
1351 matched by match.
1362
1352
1363 If full is False, maybe skip some known-clean files.
1353 If full is False, maybe skip some known-clean files.
1364
1354
1365 Return a dict mapping filename to stat-like object (either
1355 Return a dict mapping filename to stat-like object (either
1366 mercurial.osutil.stat instance or return value of os.stat()).
1356 mercurial.osutil.stat instance or return value of os.stat()).
1367
1357
1368 """
1358 """
1369 # full is a flag that extensions that hook into walk can use -- this
1359 # full is a flag that extensions that hook into walk can use -- this
1370 # implementation doesn't use it at all. This satisfies the contract
1360 # implementation doesn't use it at all. This satisfies the contract
1371 # because we only guarantee a "maybe".
1361 # because we only guarantee a "maybe".
1372
1362
1373 if ignored:
1363 if ignored:
1374 ignore = util.never
1364 ignore = util.never
1375 dirignore = util.never
1365 dirignore = util.never
1376 elif unknown:
1366 elif unknown:
1377 ignore = self._ignore
1367 ignore = self._ignore
1378 dirignore = self._dirignore
1368 dirignore = self._dirignore
1379 else:
1369 else:
1380 # if not unknown and not ignored, drop dir recursion and step 2
1370 # if not unknown and not ignored, drop dir recursion and step 2
1381 ignore = util.always
1371 ignore = util.always
1382 dirignore = util.always
1372 dirignore = util.always
1383
1373
1384 if self._sparsematchfn is not None:
1374 if self._sparsematchfn is not None:
1385 em = matchmod.exact(match.files())
1375 em = matchmod.exact(match.files())
1386 sm = matchmod.unionmatcher([self._sparsematcher, em])
1376 sm = matchmod.unionmatcher([self._sparsematcher, em])
1387 match = matchmod.intersectmatchers(match, sm)
1377 match = matchmod.intersectmatchers(match, sm)
1388
1378
1389 matchfn = match.matchfn
1379 matchfn = match.matchfn
1390 matchalways = match.always()
1380 matchalways = match.always()
1391 matchtdir = match.traversedir
1381 matchtdir = match.traversedir
1392 dmap = self._map
1382 dmap = self._map
1393 listdir = util.listdir
1383 listdir = util.listdir
1394 lstat = os.lstat
1384 lstat = os.lstat
1395 dirkind = stat.S_IFDIR
1385 dirkind = stat.S_IFDIR
1396 regkind = stat.S_IFREG
1386 regkind = stat.S_IFREG
1397 lnkkind = stat.S_IFLNK
1387 lnkkind = stat.S_IFLNK
1398 join = self._join
1388 join = self._join
1399
1389
1400 exact = skipstep3 = False
1390 exact = skipstep3 = False
1401 if match.isexact(): # match.exact
1391 if match.isexact(): # match.exact
1402 exact = True
1392 exact = True
1403 dirignore = util.always # skip step 2
1393 dirignore = util.always # skip step 2
1404 elif match.prefix(): # match.match, no patterns
1394 elif match.prefix(): # match.match, no patterns
1405 skipstep3 = True
1395 skipstep3 = True
1406
1396
1407 if not exact and self._checkcase:
1397 if not exact and self._checkcase:
1408 normalize = self._normalize
1398 normalize = self._normalize
1409 normalizefile = self._normalizefile
1399 normalizefile = self._normalizefile
1410 skipstep3 = False
1400 skipstep3 = False
1411 else:
1401 else:
1412 normalize = self._normalize
1402 normalize = self._normalize
1413 normalizefile = None
1403 normalizefile = None
1414
1404
1415 # step 1: find all explicit files
1405 # step 1: find all explicit files
1416 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1406 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1417 if matchtdir:
1407 if matchtdir:
1418 for d in work:
1408 for d in work:
1419 matchtdir(d[0])
1409 matchtdir(d[0])
1420 for d in dirsnotfound:
1410 for d in dirsnotfound:
1421 matchtdir(d)
1411 matchtdir(d)
1422
1412
1423 skipstep3 = skipstep3 and not (work or dirsnotfound)
1413 skipstep3 = skipstep3 and not (work or dirsnotfound)
1424 work = [d for d in work if not dirignore(d[0])]
1414 work = [d for d in work if not dirignore(d[0])]
1425
1415
1426 # step 2: visit subdirectories
1416 # step 2: visit subdirectories
1427 def traverse(work, alreadynormed):
1417 def traverse(work, alreadynormed):
1428 wadd = work.append
1418 wadd = work.append
1429 while work:
1419 while work:
1430 tracing.counter('dirstate.walk work', len(work))
1420 tracing.counter('dirstate.walk work', len(work))
1431 nd = work.pop()
1421 nd = work.pop()
1432 visitentries = match.visitchildrenset(nd)
1422 visitentries = match.visitchildrenset(nd)
1433 if not visitentries:
1423 if not visitentries:
1434 continue
1424 continue
1435 if visitentries == b'this' or visitentries == b'all':
1425 if visitentries == b'this' or visitentries == b'all':
1436 visitentries = None
1426 visitentries = None
1437 skip = None
1427 skip = None
1438 if nd != b'':
1428 if nd != b'':
1439 skip = b'.hg'
1429 skip = b'.hg'
1440 try:
1430 try:
1441 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1431 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1442 entries = listdir(join(nd), stat=True, skip=skip)
1432 entries = listdir(join(nd), stat=True, skip=skip)
1443 except (PermissionError, FileNotFoundError) as inst:
1433 except (PermissionError, FileNotFoundError) as inst:
1444 match.bad(
1434 match.bad(
1445 self.pathto(nd), encoding.strtolocal(inst.strerror)
1435 self.pathto(nd), encoding.strtolocal(inst.strerror)
1446 )
1436 )
1447 continue
1437 continue
1448 for f, kind, st in entries:
1438 for f, kind, st in entries:
1449 # Some matchers may return files in the visitentries set,
1439 # Some matchers may return files in the visitentries set,
1450 # instead of 'this', if the matcher explicitly mentions them
1440 # instead of 'this', if the matcher explicitly mentions them
1451 # and is not an exactmatcher. This is acceptable; we do not
1441 # and is not an exactmatcher. This is acceptable; we do not
1452 # make any hard assumptions about file-or-directory below
1442 # make any hard assumptions about file-or-directory below
1453 # based on the presence of `f` in visitentries. If
1443 # based on the presence of `f` in visitentries. If
1454 # visitchildrenset returned a set, we can always skip the
1444 # visitchildrenset returned a set, we can always skip the
1455 # entries *not* in the set it provided regardless of whether
1445 # entries *not* in the set it provided regardless of whether
1456 # they're actually a file or a directory.
1446 # they're actually a file or a directory.
1457 if visitentries and f not in visitentries:
1447 if visitentries and f not in visitentries:
1458 continue
1448 continue
1459 if normalizefile:
1449 if normalizefile:
1460 # even though f might be a directory, we're only
1450 # even though f might be a directory, we're only
1461 # interested in comparing it to files currently in the
1451 # interested in comparing it to files currently in the
1462 # dmap -- therefore normalizefile is enough
1452 # dmap -- therefore normalizefile is enough
1463 nf = normalizefile(
1453 nf = normalizefile(
1464 nd and (nd + b"/" + f) or f, True, True
1454 nd and (nd + b"/" + f) or f, True, True
1465 )
1455 )
1466 else:
1456 else:
1467 nf = nd and (nd + b"/" + f) or f
1457 nf = nd and (nd + b"/" + f) or f
1468 if nf not in results:
1458 if nf not in results:
1469 if kind == dirkind:
1459 if kind == dirkind:
1470 if not ignore(nf):
1460 if not ignore(nf):
1471 if matchtdir:
1461 if matchtdir:
1472 matchtdir(nf)
1462 matchtdir(nf)
1473 wadd(nf)
1463 wadd(nf)
1474 if nf in dmap and (matchalways or matchfn(nf)):
1464 if nf in dmap and (matchalways or matchfn(nf)):
1475 results[nf] = None
1465 results[nf] = None
1476 elif kind == regkind or kind == lnkkind:
1466 elif kind == regkind or kind == lnkkind:
1477 if nf in dmap:
1467 if nf in dmap:
1478 if matchalways or matchfn(nf):
1468 if matchalways or matchfn(nf):
1479 results[nf] = st
1469 results[nf] = st
1480 elif (matchalways or matchfn(nf)) and not ignore(
1470 elif (matchalways or matchfn(nf)) and not ignore(
1481 nf
1471 nf
1482 ):
1472 ):
1483 # unknown file -- normalize if necessary
1473 # unknown file -- normalize if necessary
1484 if not alreadynormed:
1474 if not alreadynormed:
1485 nf = normalize(nf, False, True)
1475 nf = normalize(nf, False, True)
1486 results[nf] = st
1476 results[nf] = st
1487 elif nf in dmap and (matchalways or matchfn(nf)):
1477 elif nf in dmap and (matchalways or matchfn(nf)):
1488 results[nf] = None
1478 results[nf] = None
1489
1479
1490 for nd, d in work:
1480 for nd, d in work:
1491 # alreadynormed means that processwork doesn't have to do any
1481 # alreadynormed means that processwork doesn't have to do any
1492 # expensive directory normalization
1482 # expensive directory normalization
1493 alreadynormed = not normalize or nd == d
1483 alreadynormed = not normalize or nd == d
1494 traverse([d], alreadynormed)
1484 traverse([d], alreadynormed)
1495
1485
1496 for s in subrepos:
1486 for s in subrepos:
1497 del results[s]
1487 del results[s]
1498 del results[b'.hg']
1488 del results[b'.hg']
1499
1489
1500 # step 3: visit remaining files from dmap
1490 # step 3: visit remaining files from dmap
1501 if not skipstep3 and not exact:
1491 if not skipstep3 and not exact:
1502 # If a dmap file is not in results yet, it was either
1492 # If a dmap file is not in results yet, it was either
1503 # a) not matching matchfn b) ignored, c) missing, or d) under a
1493 # a) not matching matchfn b) ignored, c) missing, or d) under a
1504 # symlink directory.
1494 # symlink directory.
1505 if not results and matchalways:
1495 if not results and matchalways:
1506 visit = [f for f in dmap]
1496 visit = [f for f in dmap]
1507 else:
1497 else:
1508 visit = [f for f in dmap if f not in results and matchfn(f)]
1498 visit = [f for f in dmap if f not in results and matchfn(f)]
1509 visit.sort()
1499 visit.sort()
1510
1500
1511 if unknown:
1501 if unknown:
1512 # unknown == True means we walked all dirs under the roots
1502 # unknown == True means we walked all dirs under the roots
1513 # that wasn't ignored, and everything that matched was stat'ed
1503 # that wasn't ignored, and everything that matched was stat'ed
1514 # and is already in results.
1504 # and is already in results.
1515 # The rest must thus be ignored or under a symlink.
1505 # The rest must thus be ignored or under a symlink.
1516 audit_path = pathutil.pathauditor(self._root, cached=True)
1506 audit_path = pathutil.pathauditor(self._root, cached=True)
1517
1507
1518 for nf in iter(visit):
1508 for nf in iter(visit):
1519 # If a stat for the same file was already added with a
1509 # If a stat for the same file was already added with a
1520 # different case, don't add one for this, since that would
1510 # different case, don't add one for this, since that would
1521 # make it appear as if the file exists under both names
1511 # make it appear as if the file exists under both names
1522 # on disk.
1512 # on disk.
1523 if (
1513 if (
1524 normalizefile
1514 normalizefile
1525 and normalizefile(nf, True, True) in results
1515 and normalizefile(nf, True, True) in results
1526 ):
1516 ):
1527 results[nf] = None
1517 results[nf] = None
1528 # Report ignored items in the dmap as long as they are not
1518 # Report ignored items in the dmap as long as they are not
1529 # under a symlink directory.
1519 # under a symlink directory.
1530 elif audit_path.check(nf):
1520 elif audit_path.check(nf):
1531 try:
1521 try:
1532 results[nf] = lstat(join(nf))
1522 results[nf] = lstat(join(nf))
1533 # file was just ignored, no links, and exists
1523 # file was just ignored, no links, and exists
1534 except OSError:
1524 except OSError:
1535 # file doesn't exist
1525 # file doesn't exist
1536 results[nf] = None
1526 results[nf] = None
1537 else:
1527 else:
1538 # It's either missing or under a symlink directory
1528 # It's either missing or under a symlink directory
1539 # which we in this case report as missing
1529 # which we in this case report as missing
1540 results[nf] = None
1530 results[nf] = None
1541 else:
1531 else:
1542 # We may not have walked the full directory tree above,
1532 # We may not have walked the full directory tree above,
1543 # so stat and check everything we missed.
1533 # so stat and check everything we missed.
1544 iv = iter(visit)
1534 iv = iter(visit)
1545 for st in util.statfiles([join(i) for i in visit]):
1535 for st in util.statfiles([join(i) for i in visit]):
1546 results[next(iv)] = st
1536 results[next(iv)] = st
1547 return results
1537 return results
1548
1538
1549 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1539 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1550 if self._sparsematchfn is not None:
1540 if self._sparsematchfn is not None:
1551 em = matchmod.exact(matcher.files())
1541 em = matchmod.exact(matcher.files())
1552 sm = matchmod.unionmatcher([self._sparsematcher, em])
1542 sm = matchmod.unionmatcher([self._sparsematcher, em])
1553 matcher = matchmod.intersectmatchers(matcher, sm)
1543 matcher = matchmod.intersectmatchers(matcher, sm)
1554 # Force Rayon (Rust parallelism library) to respect the number of
1544 # Force Rayon (Rust parallelism library) to respect the number of
1555 # workers. This is a temporary workaround until Rust code knows
1545 # workers. This is a temporary workaround until Rust code knows
1556 # how to read the config file.
1546 # how to read the config file.
1557 numcpus = self._ui.configint(b"worker", b"numcpus")
1547 numcpus = self._ui.configint(b"worker", b"numcpus")
1558 if numcpus is not None:
1548 if numcpus is not None:
1559 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1549 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1560
1550
1561 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1551 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1562 if not workers_enabled:
1552 if not workers_enabled:
1563 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1553 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1564
1554
1565 (
1555 (
1566 lookup,
1556 lookup,
1567 modified,
1557 modified,
1568 added,
1558 added,
1569 removed,
1559 removed,
1570 deleted,
1560 deleted,
1571 clean,
1561 clean,
1572 ignored,
1562 ignored,
1573 unknown,
1563 unknown,
1574 warnings,
1564 warnings,
1575 bad,
1565 bad,
1576 traversed,
1566 traversed,
1577 dirty,
1567 dirty,
1578 ) = rustmod.status(
1568 ) = rustmod.status(
1579 self._map._map,
1569 self._map._map,
1580 matcher,
1570 matcher,
1581 self._rootdir,
1571 self._rootdir,
1582 self._ignorefiles(),
1572 self._ignorefiles(),
1583 self._checkexec,
1573 self._checkexec,
1584 bool(list_clean),
1574 bool(list_clean),
1585 bool(list_ignored),
1575 bool(list_ignored),
1586 bool(list_unknown),
1576 bool(list_unknown),
1587 bool(matcher.traversedir),
1577 bool(matcher.traversedir),
1588 )
1578 )
1589
1579
1590 self._dirty |= dirty
1580 self._dirty |= dirty
1591
1581
1592 if matcher.traversedir:
1582 if matcher.traversedir:
1593 for dir in traversed:
1583 for dir in traversed:
1594 matcher.traversedir(dir)
1584 matcher.traversedir(dir)
1595
1585
1596 if self._ui.warn:
1586 if self._ui.warn:
1597 for item in warnings:
1587 for item in warnings:
1598 if isinstance(item, tuple):
1588 if isinstance(item, tuple):
1599 file_path, syntax = item
1589 file_path, syntax = item
1600 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1590 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1601 file_path,
1591 file_path,
1602 syntax,
1592 syntax,
1603 )
1593 )
1604 self._ui.warn(msg)
1594 self._ui.warn(msg)
1605 else:
1595 else:
1606 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1596 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1607 self._ui.warn(
1597 self._ui.warn(
1608 msg
1598 msg
1609 % (
1599 % (
1610 pathutil.canonpath(
1600 pathutil.canonpath(
1611 self._rootdir, self._rootdir, item
1601 self._rootdir, self._rootdir, item
1612 ),
1602 ),
1613 b"No such file or directory",
1603 b"No such file or directory",
1614 )
1604 )
1615 )
1605 )
1616
1606
1617 for fn, message in bad:
1607 for fn, message in bad:
1618 matcher.bad(fn, encoding.strtolocal(message))
1608 matcher.bad(fn, encoding.strtolocal(message))
1619
1609
1620 status = scmutil.status(
1610 status = scmutil.status(
1621 modified=modified,
1611 modified=modified,
1622 added=added,
1612 added=added,
1623 removed=removed,
1613 removed=removed,
1624 deleted=deleted,
1614 deleted=deleted,
1625 unknown=unknown,
1615 unknown=unknown,
1626 ignored=ignored,
1616 ignored=ignored,
1627 clean=clean,
1617 clean=clean,
1628 )
1618 )
1629 return (lookup, status)
1619 return (lookup, status)
1630
1620
1631 def status(self, match, subrepos, ignored, clean, unknown):
1621 def status(self, match, subrepos, ignored, clean, unknown):
1632 """Determine the status of the working copy relative to the
1622 """Determine the status of the working copy relative to the
1633 dirstate and return a pair of (unsure, status), where status is of type
1623 dirstate and return a pair of (unsure, status), where status is of type
1634 scmutil.status and:
1624 scmutil.status and:
1635
1625
1636 unsure:
1626 unsure:
1637 files that might have been modified since the dirstate was
1627 files that might have been modified since the dirstate was
1638 written, but need to be read to be sure (size is the same
1628 written, but need to be read to be sure (size is the same
1639 but mtime differs)
1629 but mtime differs)
1640 status.modified:
1630 status.modified:
1641 files that have definitely been modified since the dirstate
1631 files that have definitely been modified since the dirstate
1642 was written (different size or mode)
1632 was written (different size or mode)
1643 status.clean:
1633 status.clean:
1644 files that have definitely not been modified since the
1634 files that have definitely not been modified since the
1645 dirstate was written
1635 dirstate was written
1646 """
1636 """
1647 if not self._running_status:
1637 if not self._running_status:
1648 msg = "Calling `status` outside a `running_status` context"
1638 msg = "Calling `status` outside a `running_status` context"
1649 raise error.ProgrammingError(msg)
1639 raise error.ProgrammingError(msg)
1650 listignored, listclean, listunknown = ignored, clean, unknown
1640 listignored, listclean, listunknown = ignored, clean, unknown
1651 lookup, modified, added, unknown, ignored = [], [], [], [], []
1641 lookup, modified, added, unknown, ignored = [], [], [], [], []
1652 removed, deleted, clean = [], [], []
1642 removed, deleted, clean = [], [], []
1653
1643
1654 dmap = self._map
1644 dmap = self._map
1655 dmap.preload()
1645 dmap.preload()
1656
1646
1657 use_rust = True
1647 use_rust = True
1658
1648
1659 allowed_matchers = (
1649 allowed_matchers = (
1660 matchmod.alwaysmatcher,
1650 matchmod.alwaysmatcher,
1661 matchmod.differencematcher,
1651 matchmod.differencematcher,
1662 matchmod.exactmatcher,
1652 matchmod.exactmatcher,
1663 matchmod.includematcher,
1653 matchmod.includematcher,
1664 matchmod.intersectionmatcher,
1654 matchmod.intersectionmatcher,
1665 matchmod.nevermatcher,
1655 matchmod.nevermatcher,
1666 matchmod.unionmatcher,
1656 matchmod.unionmatcher,
1667 )
1657 )
1668
1658
1669 if rustmod is None:
1659 if rustmod is None:
1670 use_rust = False
1660 use_rust = False
1671 elif self._checkcase:
1661 elif self._checkcase:
1672 # Case-insensitive filesystems are not handled yet
1662 # Case-insensitive filesystems are not handled yet
1673 use_rust = False
1663 use_rust = False
1674 elif subrepos:
1664 elif subrepos:
1675 use_rust = False
1665 use_rust = False
1676 elif not isinstance(match, allowed_matchers):
1666 elif not isinstance(match, allowed_matchers):
1677 # Some matchers have yet to be implemented
1667 # Some matchers have yet to be implemented
1678 use_rust = False
1668 use_rust = False
1679
1669
1680 # Get the time from the filesystem so we can disambiguate files that
1670 # Get the time from the filesystem so we can disambiguate files that
1681 # appear modified in the present or future.
1671 # appear modified in the present or future.
1682 try:
1672 try:
1683 mtime_boundary = timestamp.get_fs_now(self._opener)
1673 mtime_boundary = timestamp.get_fs_now(self._opener)
1684 except OSError:
1674 except OSError:
1685 # In largefiles or readonly context
1675 # In largefiles or readonly context
1686 mtime_boundary = None
1676 mtime_boundary = None
1687
1677
1688 if use_rust:
1678 if use_rust:
1689 try:
1679 try:
1690 res = self._rust_status(
1680 res = self._rust_status(
1691 match, listclean, listignored, listunknown
1681 match, listclean, listignored, listunknown
1692 )
1682 )
1693 return res + (mtime_boundary,)
1683 return res + (mtime_boundary,)
1694 except rustmod.FallbackError:
1684 except rustmod.FallbackError:
1695 pass
1685 pass
1696
1686
1697 def noop(f):
1687 def noop(f):
1698 pass
1688 pass
1699
1689
1700 dcontains = dmap.__contains__
1690 dcontains = dmap.__contains__
1701 dget = dmap.__getitem__
1691 dget = dmap.__getitem__
1702 ladd = lookup.append # aka "unsure"
1692 ladd = lookup.append # aka "unsure"
1703 madd = modified.append
1693 madd = modified.append
1704 aadd = added.append
1694 aadd = added.append
1705 uadd = unknown.append if listunknown else noop
1695 uadd = unknown.append if listunknown else noop
1706 iadd = ignored.append if listignored else noop
1696 iadd = ignored.append if listignored else noop
1707 radd = removed.append
1697 radd = removed.append
1708 dadd = deleted.append
1698 dadd = deleted.append
1709 cadd = clean.append if listclean else noop
1699 cadd = clean.append if listclean else noop
1710 mexact = match.exact
1700 mexact = match.exact
1711 dirignore = self._dirignore
1701 dirignore = self._dirignore
1712 checkexec = self._checkexec
1702 checkexec = self._checkexec
1713 checklink = self._checklink
1703 checklink = self._checklink
1714 copymap = self._map.copymap
1704 copymap = self._map.copymap
1715
1705
1716 # We need to do full walks when either
1706 # We need to do full walks when either
1717 # - we're listing all clean files, or
1707 # - we're listing all clean files, or
1718 # - match.traversedir does something, because match.traversedir should
1708 # - match.traversedir does something, because match.traversedir should
1719 # be called for every dir in the working dir
1709 # be called for every dir in the working dir
1720 full = listclean or match.traversedir is not None
1710 full = listclean or match.traversedir is not None
1721 for fn, st in self.walk(
1711 for fn, st in self.walk(
1722 match, subrepos, listunknown, listignored, full=full
1712 match, subrepos, listunknown, listignored, full=full
1723 ).items():
1713 ).items():
1724 if not dcontains(fn):
1714 if not dcontains(fn):
1725 if (listignored or mexact(fn)) and dirignore(fn):
1715 if (listignored or mexact(fn)) and dirignore(fn):
1726 if listignored:
1716 if listignored:
1727 iadd(fn)
1717 iadd(fn)
1728 else:
1718 else:
1729 uadd(fn)
1719 uadd(fn)
1730 continue
1720 continue
1731
1721
1732 t = dget(fn)
1722 t = dget(fn)
1733 mode = t.mode
1723 mode = t.mode
1734 size = t.size
1724 size = t.size
1735
1725
1736 if not st and t.tracked:
1726 if not st and t.tracked:
1737 dadd(fn)
1727 dadd(fn)
1738 elif t.p2_info:
1728 elif t.p2_info:
1739 madd(fn)
1729 madd(fn)
1740 elif t.added:
1730 elif t.added:
1741 aadd(fn)
1731 aadd(fn)
1742 elif t.removed:
1732 elif t.removed:
1743 radd(fn)
1733 radd(fn)
1744 elif t.tracked:
1734 elif t.tracked:
1745 if not checklink and t.has_fallback_symlink:
1735 if not checklink and t.has_fallback_symlink:
1746 # If the file system does not support symlink, the mode
1736 # If the file system does not support symlink, the mode
1747 # might not be correctly stored in the dirstate, so do not
1737 # might not be correctly stored in the dirstate, so do not
1748 # trust it.
1738 # trust it.
1749 ladd(fn)
1739 ladd(fn)
1750 elif not checkexec and t.has_fallback_exec:
1740 elif not checkexec and t.has_fallback_exec:
1751 # If the file system does not support exec bits, the mode
1741 # If the file system does not support exec bits, the mode
1752 # might not be correctly stored in the dirstate, so do not
1742 # might not be correctly stored in the dirstate, so do not
1753 # trust it.
1743 # trust it.
1754 ladd(fn)
1744 ladd(fn)
1755 elif (
1745 elif (
1756 size >= 0
1746 size >= 0
1757 and (
1747 and (
1758 (size != st.st_size and size != st.st_size & _rangemask)
1748 (size != st.st_size and size != st.st_size & _rangemask)
1759 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1749 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1760 )
1750 )
1761 or fn in copymap
1751 or fn in copymap
1762 ):
1752 ):
1763 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1753 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1764 # issue6456: Size returned may be longer due to
1754 # issue6456: Size returned may be longer due to
1765 # encryption on EXT-4 fscrypt, undecided.
1755 # encryption on EXT-4 fscrypt, undecided.
1766 ladd(fn)
1756 ladd(fn)
1767 else:
1757 else:
1768 madd(fn)
1758 madd(fn)
1769 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1759 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1770 # There might be a change in the future if for example the
1760 # There might be a change in the future if for example the
1771 # internal clock is off, but this is a case where the issues
1761 # internal clock is off, but this is a case where the issues
1772 # the user would face would be a lot worse and there is
1762 # the user would face would be a lot worse and there is
1773 # nothing we can really do.
1763 # nothing we can really do.
1774 ladd(fn)
1764 ladd(fn)
1775 elif listclean:
1765 elif listclean:
1776 cadd(fn)
1766 cadd(fn)
1777 status = scmutil.status(
1767 status = scmutil.status(
1778 modified, added, removed, deleted, unknown, ignored, clean
1768 modified, added, removed, deleted, unknown, ignored, clean
1779 )
1769 )
1780 return (lookup, status, mtime_boundary)
1770 return (lookup, status, mtime_boundary)
1781
1771
1782 def matches(self, match):
1772 def matches(self, match):
1783 """
1773 """
1784 return files in the dirstate (in whatever state) filtered by match
1774 return files in the dirstate (in whatever state) filtered by match
1785 """
1775 """
1786 dmap = self._map
1776 dmap = self._map
1787 if rustmod is not None:
1777 if rustmod is not None:
1788 dmap = self._map._map
1778 dmap = self._map._map
1789
1779
1790 if match.always():
1780 if match.always():
1791 return dmap.keys()
1781 return dmap.keys()
1792 files = match.files()
1782 files = match.files()
1793 if match.isexact():
1783 if match.isexact():
1794 # fast path -- filter the other way around, since typically files is
1784 # fast path -- filter the other way around, since typically files is
1795 # much smaller than dmap
1785 # much smaller than dmap
1796 return [f for f in files if f in dmap]
1786 return [f for f in files if f in dmap]
1797 if match.prefix() and all(fn in dmap for fn in files):
1787 if match.prefix() and all(fn in dmap for fn in files):
1798 # fast path -- all the values are known to be files, so just return
1788 # fast path -- all the values are known to be files, so just return
1799 # that
1789 # that
1800 return list(files)
1790 return list(files)
1801 return [f for f in dmap if match(f)]
1791 return [f for f in dmap if match(f)]
1802
1792
1803 def all_file_names(self):
1793 def all_file_names(self):
1804 """list all filename currently used by this dirstate
1794 """list all filename currently used by this dirstate
1805
1795
1806 This is only used to do `hg rollback` related backup in the transaction
1796 This is only used to do `hg rollback` related backup in the transaction
1807 """
1797 """
1808 files = [b'branch']
1798 files = [b'branch']
1809 if self._opener.exists(self._filename):
1799 if self._opener.exists(self._filename):
1810 files.append(self._filename)
1800 files.append(self._filename)
1811 if self._use_dirstate_v2:
1801 if self._use_dirstate_v2:
1812 files.append(self._map.docket.data_filename())
1802 files.append(self._map.docket.data_filename())
1813 return tuple(files)
1803 return tuple(files)
1814
1804
1815 def verify(self, m1, m2, p1, narrow_matcher=None):
1805 def verify(self, m1, m2, p1, narrow_matcher=None):
1816 """
1806 """
1817 check the dirstate contents against the parent manifest and yield errors
1807 check the dirstate contents against the parent manifest and yield errors
1818 """
1808 """
1819 missing_from_p1 = _(
1809 missing_from_p1 = _(
1820 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1810 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1821 )
1811 )
1822 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1812 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1823 missing_from_ps = _(
1813 missing_from_ps = _(
1824 b"%s marked as modified, but not in either manifest\n"
1814 b"%s marked as modified, but not in either manifest\n"
1825 )
1815 )
1826 missing_from_ds = _(
1816 missing_from_ds = _(
1827 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1817 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1828 )
1818 )
1829 for f, entry in self.items():
1819 for f, entry in self.items():
1830 if entry.p1_tracked:
1820 if entry.p1_tracked:
1831 if entry.modified and f not in m1 and f not in m2:
1821 if entry.modified and f not in m1 and f not in m2:
1832 yield missing_from_ps % f
1822 yield missing_from_ps % f
1833 elif f not in m1:
1823 elif f not in m1:
1834 yield missing_from_p1 % (f, node.short(p1))
1824 yield missing_from_p1 % (f, node.short(p1))
1835 if entry.added and f in m1:
1825 if entry.added and f in m1:
1836 yield unexpected_in_p1 % f
1826 yield unexpected_in_p1 % f
1837 for f in m1:
1827 for f in m1:
1838 if narrow_matcher is not None and not narrow_matcher(f):
1828 if narrow_matcher is not None and not narrow_matcher(f):
1839 continue
1829 continue
1840 entry = self.get_entry(f)
1830 entry = self.get_entry(f)
1841 if not entry.p1_tracked:
1831 if not entry.p1_tracked:
1842 yield missing_from_ds % (f, node.short(p1))
1832 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now