##// END OF EJS Templates
dirstate: document the `changing_*` context manager...
marmoute -
r51962:a63e1f79 default
parent child Browse files
Show More
@@ -1,1801 +1,1842 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16
16
17 from hgdemandimport import tracing
17 from hgdemandimport import tracing
18
18
19 from . import (
19 from . import (
20 dirstatemap,
20 dirstatemap,
21 encoding,
21 encoding,
22 error,
22 error,
23 match as matchmod,
23 match as matchmod,
24 node,
24 node,
25 pathutil,
25 pathutil,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 scmutil,
28 scmutil,
29 txnutil,
29 txnutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 # use to detect lack of a parameter
45 # use to detect lack of a parameter
46 SENTINEL = object()
46 SENTINEL = object()
47
47
48 HAS_FAST_DIRSTATE_V2 = rustmod is not None
48 HAS_FAST_DIRSTATE_V2 = rustmod is not None
49
49
50 propertycache = util.propertycache
50 propertycache = util.propertycache
51 filecache = scmutil.filecache
51 filecache = scmutil.filecache
52 _rangemask = dirstatemap.rangemask
52 _rangemask = dirstatemap.rangemask
53
53
54 DirstateItem = dirstatemap.DirstateItem
54 DirstateItem = dirstatemap.DirstateItem
55
55
56
56
57 class repocache(filecache):
57 class repocache(filecache):
58 """filecache for files in .hg/"""
58 """filecache for files in .hg/"""
59
59
60 def join(self, obj, fname):
60 def join(self, obj, fname):
61 return obj._opener.join(fname)
61 return obj._opener.join(fname)
62
62
63
63
64 class rootcache(filecache):
64 class rootcache(filecache):
65 """filecache for files in the repository root"""
65 """filecache for files in the repository root"""
66
66
67 def join(self, obj, fname):
67 def join(self, obj, fname):
68 return obj._join(fname)
68 return obj._join(fname)
69
69
70
70
71 def check_invalidated(func):
71 def check_invalidated(func):
72 """check that the func is called with a non-invalidated dirstate
72 """check that the func is called with a non-invalidated dirstate
73
73
74 The dirstate is in an "invalidated state" after an error occured during its
74 The dirstate is in an "invalidated state" after an error occured during its
75 modification and remains so until we exited the top level scope that framed
75 modification and remains so until we exited the top level scope that framed
76 such change.
76 such change.
77 """
77 """
78
78
79 def wrap(self, *args, **kwargs):
79 def wrap(self, *args, **kwargs):
80 if self._invalidated_context:
80 if self._invalidated_context:
81 msg = 'calling `%s` after the dirstate was invalidated'
81 msg = 'calling `%s` after the dirstate was invalidated'
82 msg %= func.__name__
82 msg %= func.__name__
83 raise error.ProgrammingError(msg)
83 raise error.ProgrammingError(msg)
84 return func(self, *args, **kwargs)
84 return func(self, *args, **kwargs)
85
85
86 return wrap
86 return wrap
87
87
88
88
89 def requires_changing_parents(func):
89 def requires_changing_parents(func):
90 def wrap(self, *args, **kwargs):
90 def wrap(self, *args, **kwargs):
91 if not self.is_changing_parents:
91 if not self.is_changing_parents:
92 msg = 'calling `%s` outside of a changing_parents context'
92 msg = 'calling `%s` outside of a changing_parents context'
93 msg %= func.__name__
93 msg %= func.__name__
94 raise error.ProgrammingError(msg)
94 raise error.ProgrammingError(msg)
95 return func(self, *args, **kwargs)
95 return func(self, *args, **kwargs)
96
96
97 return check_invalidated(wrap)
97 return check_invalidated(wrap)
98
98
99
99
100 def requires_changing_files(func):
100 def requires_changing_files(func):
101 def wrap(self, *args, **kwargs):
101 def wrap(self, *args, **kwargs):
102 if not self.is_changing_files:
102 if not self.is_changing_files:
103 msg = 'calling `%s` outside of a `changing_files`'
103 msg = 'calling `%s` outside of a `changing_files`'
104 msg %= func.__name__
104 msg %= func.__name__
105 raise error.ProgrammingError(msg)
105 raise error.ProgrammingError(msg)
106 return func(self, *args, **kwargs)
106 return func(self, *args, **kwargs)
107
107
108 return check_invalidated(wrap)
108 return check_invalidated(wrap)
109
109
110
110
111 def requires_changing_any(func):
111 def requires_changing_any(func):
112 def wrap(self, *args, **kwargs):
112 def wrap(self, *args, **kwargs):
113 if not self.is_changing_any:
113 if not self.is_changing_any:
114 msg = 'calling `%s` outside of a changing context'
114 msg = 'calling `%s` outside of a changing context'
115 msg %= func.__name__
115 msg %= func.__name__
116 raise error.ProgrammingError(msg)
116 raise error.ProgrammingError(msg)
117 return func(self, *args, **kwargs)
117 return func(self, *args, **kwargs)
118
118
119 return check_invalidated(wrap)
119 return check_invalidated(wrap)
120
120
121
121
122 def requires_changing_files_or_status(func):
122 def requires_changing_files_or_status(func):
123 def wrap(self, *args, **kwargs):
123 def wrap(self, *args, **kwargs):
124 if not (self.is_changing_files or self._running_status > 0):
124 if not (self.is_changing_files or self._running_status > 0):
125 msg = (
125 msg = (
126 'calling `%s` outside of a changing_files '
126 'calling `%s` outside of a changing_files '
127 'or running_status context'
127 'or running_status context'
128 )
128 )
129 msg %= func.__name__
129 msg %= func.__name__
130 raise error.ProgrammingError(msg)
130 raise error.ProgrammingError(msg)
131 return func(self, *args, **kwargs)
131 return func(self, *args, **kwargs)
132
132
133 return check_invalidated(wrap)
133 return check_invalidated(wrap)
134
134
135
135
136 CHANGE_TYPE_PARENTS = "parents"
136 CHANGE_TYPE_PARENTS = "parents"
137 CHANGE_TYPE_FILES = "files"
137 CHANGE_TYPE_FILES = "files"
138
138
139
139
140 @interfaceutil.implementer(intdirstate.idirstate)
140 @interfaceutil.implementer(intdirstate.idirstate)
141 class dirstate:
141 class dirstate:
142
142
143 # used by largefile to avoid overwritting transaction callback
143 # used by largefile to avoid overwritting transaction callback
144 _tr_key_suffix = b''
144 _tr_key_suffix = b''
145
145
146 def __init__(
146 def __init__(
147 self,
147 self,
148 opener,
148 opener,
149 ui,
149 ui,
150 root,
150 root,
151 validate,
151 validate,
152 sparsematchfn,
152 sparsematchfn,
153 nodeconstants,
153 nodeconstants,
154 use_dirstate_v2,
154 use_dirstate_v2,
155 use_tracked_hint=False,
155 use_tracked_hint=False,
156 ):
156 ):
157 """Create a new dirstate object.
157 """Create a new dirstate object.
158
158
159 opener is an open()-like callable that can be used to open the
159 opener is an open()-like callable that can be used to open the
160 dirstate file; root is the root of the directory tracked by
160 dirstate file; root is the root of the directory tracked by
161 the dirstate.
161 the dirstate.
162 """
162 """
163 self._use_dirstate_v2 = use_dirstate_v2
163 self._use_dirstate_v2 = use_dirstate_v2
164 self._use_tracked_hint = use_tracked_hint
164 self._use_tracked_hint = use_tracked_hint
165 self._nodeconstants = nodeconstants
165 self._nodeconstants = nodeconstants
166 self._opener = opener
166 self._opener = opener
167 self._validate = validate
167 self._validate = validate
168 self._root = root
168 self._root = root
169 # Either build a sparse-matcher or None if sparse is disabled
169 # Either build a sparse-matcher or None if sparse is disabled
170 self._sparsematchfn = sparsematchfn
170 self._sparsematchfn = sparsematchfn
171 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
171 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
172 # UNC path pointing to root share (issue4557)
172 # UNC path pointing to root share (issue4557)
173 self._rootdir = pathutil.normasprefix(root)
173 self._rootdir = pathutil.normasprefix(root)
174 # True is any internal state may be different
174 # True is any internal state may be different
175 self._dirty = False
175 self._dirty = False
176 # True if the set of tracked file may be different
176 # True if the set of tracked file may be different
177 self._dirty_tracked_set = False
177 self._dirty_tracked_set = False
178 self._ui = ui
178 self._ui = ui
179 self._filecache = {}
179 self._filecache = {}
180 # nesting level of `changing_parents` context
180 # nesting level of `changing_parents` context
181 self._changing_level = 0
181 self._changing_level = 0
182 # the change currently underway
182 # the change currently underway
183 self._change_type = None
183 self._change_type = None
184 # number of open _running_status context
184 # number of open _running_status context
185 self._running_status = 0
185 self._running_status = 0
186 # True if the current dirstate changing operations have been
186 # True if the current dirstate changing operations have been
187 # invalidated (used to make sure all nested contexts have been exited)
187 # invalidated (used to make sure all nested contexts have been exited)
188 self._invalidated_context = False
188 self._invalidated_context = False
189 self._attached_to_a_transaction = False
189 self._attached_to_a_transaction = False
190 self._filename = b'dirstate'
190 self._filename = b'dirstate'
191 self._filename_th = b'dirstate-tracked-hint'
191 self._filename_th = b'dirstate-tracked-hint'
192 self._pendingfilename = b'%s.pending' % self._filename
192 self._pendingfilename = b'%s.pending' % self._filename
193 self._plchangecallbacks = {}
193 self._plchangecallbacks = {}
194 self._origpl = None
194 self._origpl = None
195 self._mapcls = dirstatemap.dirstatemap
195 self._mapcls = dirstatemap.dirstatemap
196 # Access and cache cwd early, so we don't access it for the first time
196 # Access and cache cwd early, so we don't access it for the first time
197 # after a working-copy update caused it to not exist (accessing it then
197 # after a working-copy update caused it to not exist (accessing it then
198 # raises an exception).
198 # raises an exception).
199 self._cwd
199 self._cwd
200
200
201 def refresh(self):
201 def refresh(self):
202 # XXX if this happens, you likely did not enter the `changing_xxx`
202 # XXX if this happens, you likely did not enter the `changing_xxx`
203 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
203 # using `repo.dirstate`, so a later `repo.dirstate` accesss might call
204 # `refresh`.
204 # `refresh`.
205 if self.is_changing_any:
205 if self.is_changing_any:
206 msg = "refreshing the dirstate in the middle of a change"
206 msg = "refreshing the dirstate in the middle of a change"
207 raise error.ProgrammingError(msg)
207 raise error.ProgrammingError(msg)
208 if '_branch' in vars(self):
208 if '_branch' in vars(self):
209 del self._branch
209 del self._branch
210 if '_map' in vars(self) and self._map.may_need_refresh():
210 if '_map' in vars(self) and self._map.may_need_refresh():
211 self.invalidate()
211 self.invalidate()
212
212
213 def prefetch_parents(self):
213 def prefetch_parents(self):
214 """make sure the parents are loaded
214 """make sure the parents are loaded
215
215
216 Used to avoid a race condition.
216 Used to avoid a race condition.
217 """
217 """
218 self._pl
218 self._pl
219
219
220 @contextlib.contextmanager
220 @contextlib.contextmanager
221 @check_invalidated
221 @check_invalidated
222 def running_status(self, repo):
222 def running_status(self, repo):
223 """Wrap a status operation
223 """Wrap a status operation
224
224
225 This context is not mutally exclusive with the `changing_*` context. It
225 This context is not mutally exclusive with the `changing_*` context. It
226 also do not warrant for the `wlock` to be taken.
226 also do not warrant for the `wlock` to be taken.
227
227
228 If the wlock is taken, this context will behave in a simple way, and
228 If the wlock is taken, this context will behave in a simple way, and
229 ensure the data are scheduled for write when leaving the top level
229 ensure the data are scheduled for write when leaving the top level
230 context.
230 context.
231
231
232 If the lock is not taken, it will only warrant that the data are either
232 If the lock is not taken, it will only warrant that the data are either
233 committed (written) and rolled back (invalidated) when exiting the top
233 committed (written) and rolled back (invalidated) when exiting the top
234 level context. The write/invalidate action must be performed by the
234 level context. The write/invalidate action must be performed by the
235 wrapped code.
235 wrapped code.
236
236
237
237
238 The expected logic is:
238 The expected logic is:
239
239
240 A: read the dirstate
240 A: read the dirstate
241 B: run status
241 B: run status
242 This might make the dirstate dirty by updating cache,
242 This might make the dirstate dirty by updating cache,
243 especially in Rust.
243 especially in Rust.
244 C: do more "post status fixup if relevant
244 C: do more "post status fixup if relevant
245 D: try to take the w-lock (this will invalidate the changes if they were raced)
245 D: try to take the w-lock (this will invalidate the changes if they were raced)
246 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
246 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
247 E1: elif lock was acquired β†’ write the changes
247 E1: elif lock was acquired β†’ write the changes
248 E2: else β†’ discard the changes
248 E2: else β†’ discard the changes
249 """
249 """
250 has_lock = repo.currentwlock() is not None
250 has_lock = repo.currentwlock() is not None
251 is_changing = self.is_changing_any
251 is_changing = self.is_changing_any
252 tr = repo.currenttransaction()
252 tr = repo.currenttransaction()
253 has_tr = tr is not None
253 has_tr = tr is not None
254 nested = bool(self._running_status)
254 nested = bool(self._running_status)
255
255
256 first_and_alone = not (is_changing or has_tr or nested)
256 first_and_alone = not (is_changing or has_tr or nested)
257
257
258 # enforce no change happened outside of a proper context.
258 # enforce no change happened outside of a proper context.
259 if first_and_alone and self._dirty:
259 if first_and_alone and self._dirty:
260 has_tr = repo.currenttransaction() is not None
260 has_tr = repo.currenttransaction() is not None
261 if not has_tr and self._changing_level == 0 and self._dirty:
261 if not has_tr and self._changing_level == 0 and self._dirty:
262 msg = "entering a status context, but dirstate is already dirty"
262 msg = "entering a status context, but dirstate is already dirty"
263 raise error.ProgrammingError(msg)
263 raise error.ProgrammingError(msg)
264
264
265 should_write = has_lock and not (nested or is_changing)
265 should_write = has_lock and not (nested or is_changing)
266
266
267 self._running_status += 1
267 self._running_status += 1
268 try:
268 try:
269 yield
269 yield
270 except Exception:
270 except Exception:
271 self.invalidate()
271 self.invalidate()
272 raise
272 raise
273 finally:
273 finally:
274 self._running_status -= 1
274 self._running_status -= 1
275 if self._invalidated_context:
275 if self._invalidated_context:
276 should_write = False
276 should_write = False
277 self.invalidate()
277 self.invalidate()
278
278
279 if should_write:
279 if should_write:
280 assert repo.currenttransaction() is tr
280 assert repo.currenttransaction() is tr
281 self.write(tr)
281 self.write(tr)
282 elif not has_lock:
282 elif not has_lock:
283 if self._dirty:
283 if self._dirty:
284 msg = b'dirstate dirty while exiting an isolated status context'
284 msg = b'dirstate dirty while exiting an isolated status context'
285 repo.ui.develwarn(msg)
285 repo.ui.develwarn(msg)
286 self.invalidate()
286 self.invalidate()
287
287
288 @contextlib.contextmanager
288 @contextlib.contextmanager
289 @check_invalidated
289 @check_invalidated
290 def _changing(self, repo, change_type):
290 def _changing(self, repo, change_type):
291 if repo.currentwlock() is None:
291 if repo.currentwlock() is None:
292 msg = b"trying to change the dirstate without holding the wlock"
292 msg = b"trying to change the dirstate without holding the wlock"
293 raise error.ProgrammingError(msg)
293 raise error.ProgrammingError(msg)
294
294
295 has_tr = repo.currenttransaction() is not None
295 has_tr = repo.currenttransaction() is not None
296 if not has_tr and self._changing_level == 0 and self._dirty:
296 if not has_tr and self._changing_level == 0 and self._dirty:
297 msg = b"entering a changing context, but dirstate is already dirty"
297 msg = b"entering a changing context, but dirstate is already dirty"
298 repo.ui.develwarn(msg)
298 repo.ui.develwarn(msg)
299
299
300 assert self._changing_level >= 0
300 assert self._changing_level >= 0
301 # different type of change are mutually exclusive
301 # different type of change are mutually exclusive
302 if self._change_type is None:
302 if self._change_type is None:
303 assert self._changing_level == 0
303 assert self._changing_level == 0
304 self._change_type = change_type
304 self._change_type = change_type
305 elif self._change_type != change_type:
305 elif self._change_type != change_type:
306 msg = (
306 msg = (
307 'trying to open "%s" dirstate-changing context while a "%s" is'
307 'trying to open "%s" dirstate-changing context while a "%s" is'
308 ' already open'
308 ' already open'
309 )
309 )
310 msg %= (change_type, self._change_type)
310 msg %= (change_type, self._change_type)
311 raise error.ProgrammingError(msg)
311 raise error.ProgrammingError(msg)
312 should_write = False
312 should_write = False
313 self._changing_level += 1
313 self._changing_level += 1
314 try:
314 try:
315 yield
315 yield
316 except: # re-raises
316 except: # re-raises
317 self.invalidate() # this will set `_invalidated_context`
317 self.invalidate() # this will set `_invalidated_context`
318 raise
318 raise
319 finally:
319 finally:
320 assert self._changing_level > 0
320 assert self._changing_level > 0
321 self._changing_level -= 1
321 self._changing_level -= 1
322 # If the dirstate is being invalidated, call invalidate again.
322 # If the dirstate is being invalidated, call invalidate again.
323 # This will throw away anything added by a upper context and
323 # This will throw away anything added by a upper context and
324 # reset the `_invalidated_context` flag when relevant
324 # reset the `_invalidated_context` flag when relevant
325 if self._changing_level <= 0:
325 if self._changing_level <= 0:
326 self._change_type = None
326 self._change_type = None
327 assert self._changing_level == 0
327 assert self._changing_level == 0
328 if self._invalidated_context:
328 if self._invalidated_context:
329 # make sure we invalidate anything an upper context might
329 # make sure we invalidate anything an upper context might
330 # have changed.
330 # have changed.
331 self.invalidate()
331 self.invalidate()
332 else:
332 else:
333 should_write = self._changing_level <= 0
333 should_write = self._changing_level <= 0
334 tr = repo.currenttransaction()
334 tr = repo.currenttransaction()
335 if has_tr != (tr is not None):
335 if has_tr != (tr is not None):
336 if has_tr:
336 if has_tr:
337 m = "transaction vanished while changing dirstate"
337 m = "transaction vanished while changing dirstate"
338 else:
338 else:
339 m = "transaction appeared while changing dirstate"
339 m = "transaction appeared while changing dirstate"
340 raise error.ProgrammingError(m)
340 raise error.ProgrammingError(m)
341 if should_write:
341 if should_write:
342 self.write(tr)
342 self.write(tr)
343
343
344 @contextlib.contextmanager
344 @contextlib.contextmanager
345 def changing_parents(self, repo):
345 def changing_parents(self, repo):
346 """Wrap a dirstate change related to a change of working copy parents
347
348 This context scopes a series of dirstate modifications that match an
349 update of the working copy parents (typically `hg update`, `hg merge`
350 etc).
351
352 The dirstate's methods that perform this kind of modifications require
353 this context to be present before being called.
354 Such methods are decorated with `@requires_changing_parents`.
355
356 The new dirstate contents will be written to disk when the top-most
357 `changing_parents` context exits successfully. If an exception is
358 raised during a `changing_parents` context of any level, all changes
359 are invalidated. If this context is open within an open transaction,
360 the dirstate writing is delayed until that transaction is successfully
361 committed (and the dirstate is invalidated on transaction abort).
362
363 The `changing_parents` operation is mutually exclusive with the
364 `changing_files` one.
365 """
346 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
366 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
347 yield c
367 yield c
348
368
349 @contextlib.contextmanager
369 @contextlib.contextmanager
350 def changing_files(self, repo):
370 def changing_files(self, repo):
371 """Wrap a dirstate change related to the set of tracked files
372
373 This context scopes a series of dirstate modifications that change the
374 set of tracked files. (typically `hg add`, `hg remove` etc) or some
375 dirstate stored information (like `hg rename --after`) but preserve
376 the working copy parents.
377
378 The dirstate's methods that perform this kind of modifications require
379 this context to be present before being called.
380 Such methods are decorated with `@requires_changing_files`.
381
382 The new dirstate contents will be written to disk when the top-most
383 `changing_files` context exits successfully. If an exception is raised
384 during a `changing_files` context of any level, all changes are
385 invalidated. If this context is open within an open transaction, the
386 dirstate writing is delayed until that transaction is successfully
387 committed (and the dirstate is invalidated on transaction abort).
388
389 The `changing_files` operation is mutually exclusive with the
390 `changing_parents` one.
391 """
351 with self._changing(repo, CHANGE_TYPE_FILES) as c:
392 with self._changing(repo, CHANGE_TYPE_FILES) as c:
352 yield c
393 yield c
353
394
354 # here to help migration to the new code
395 # here to help migration to the new code
355 def parentchange(self):
396 def parentchange(self):
356 msg = (
397 msg = (
357 "Mercurial 6.4 and later requires call to "
398 "Mercurial 6.4 and later requires call to "
358 "`dirstate.changing_parents(repo)`"
399 "`dirstate.changing_parents(repo)`"
359 )
400 )
360 raise error.ProgrammingError(msg)
401 raise error.ProgrammingError(msg)
361
402
362 @property
403 @property
363 def is_changing_any(self):
404 def is_changing_any(self):
364 """Returns true if the dirstate is in the middle of a set of changes.
405 """Returns true if the dirstate is in the middle of a set of changes.
365
406
366 This returns True for any kind of change.
407 This returns True for any kind of change.
367 """
408 """
368 return self._changing_level > 0
409 return self._changing_level > 0
369
410
370 def pendingparentchange(self):
411 def pendingparentchange(self):
371 return self.is_changing_parent()
412 return self.is_changing_parent()
372
413
373 def is_changing_parent(self):
414 def is_changing_parent(self):
374 """Returns true if the dirstate is in the middle of a set of changes
415 """Returns true if the dirstate is in the middle of a set of changes
375 that modify the dirstate parent.
416 that modify the dirstate parent.
376 """
417 """
377 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
418 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
378 return self.is_changing_parents
419 return self.is_changing_parents
379
420
380 @property
421 @property
381 def is_changing_parents(self):
422 def is_changing_parents(self):
382 """Returns true if the dirstate is in the middle of a set of changes
423 """Returns true if the dirstate is in the middle of a set of changes
383 that modify the dirstate parent.
424 that modify the dirstate parent.
384 """
425 """
385 if self._changing_level <= 0:
426 if self._changing_level <= 0:
386 return False
427 return False
387 return self._change_type == CHANGE_TYPE_PARENTS
428 return self._change_type == CHANGE_TYPE_PARENTS
388
429
389 @property
430 @property
390 def is_changing_files(self):
431 def is_changing_files(self):
391 """Returns true if the dirstate is in the middle of a set of changes
432 """Returns true if the dirstate is in the middle of a set of changes
392 that modify the files tracked or their sources.
433 that modify the files tracked or their sources.
393 """
434 """
394 if self._changing_level <= 0:
435 if self._changing_level <= 0:
395 return False
436 return False
396 return self._change_type == CHANGE_TYPE_FILES
437 return self._change_type == CHANGE_TYPE_FILES
397
438
398 @propertycache
439 @propertycache
399 def _map(self):
440 def _map(self):
400 """Return the dirstate contents (see documentation for dirstatemap)."""
441 """Return the dirstate contents (see documentation for dirstatemap)."""
401 return self._mapcls(
442 return self._mapcls(
402 self._ui,
443 self._ui,
403 self._opener,
444 self._opener,
404 self._root,
445 self._root,
405 self._nodeconstants,
446 self._nodeconstants,
406 self._use_dirstate_v2,
447 self._use_dirstate_v2,
407 )
448 )
408
449
409 @property
450 @property
410 def _sparsematcher(self):
451 def _sparsematcher(self):
411 """The matcher for the sparse checkout.
452 """The matcher for the sparse checkout.
412
453
413 The working directory may not include every file from a manifest. The
454 The working directory may not include every file from a manifest. The
414 matcher obtained by this property will match a path if it is to be
455 matcher obtained by this property will match a path if it is to be
415 included in the working directory.
456 included in the working directory.
416
457
417 When sparse if disabled, return None.
458 When sparse if disabled, return None.
418 """
459 """
419 if self._sparsematchfn is None:
460 if self._sparsematchfn is None:
420 return None
461 return None
421 # TODO there is potential to cache this property. For now, the matcher
462 # TODO there is potential to cache this property. For now, the matcher
422 # is resolved on every access. (But the called function does use a
463 # is resolved on every access. (But the called function does use a
423 # cache to keep the lookup fast.)
464 # cache to keep the lookup fast.)
424 return self._sparsematchfn()
465 return self._sparsematchfn()
425
466
426 @repocache(b'branch')
467 @repocache(b'branch')
427 def _branch(self):
468 def _branch(self):
428 f = None
469 f = None
429 data = b''
470 data = b''
430 try:
471 try:
431 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
472 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
432 data = f.read().strip()
473 data = f.read().strip()
433 except FileNotFoundError:
474 except FileNotFoundError:
434 pass
475 pass
435 finally:
476 finally:
436 if f is not None:
477 if f is not None:
437 f.close()
478 f.close()
438 if not data:
479 if not data:
439 return b"default"
480 return b"default"
440 return data
481 return data
441
482
442 @property
483 @property
443 def _pl(self):
484 def _pl(self):
444 return self._map.parents()
485 return self._map.parents()
445
486
446 def hasdir(self, d):
487 def hasdir(self, d):
447 return self._map.hastrackeddir(d)
488 return self._map.hastrackeddir(d)
448
489
449 @rootcache(b'.hgignore')
490 @rootcache(b'.hgignore')
450 def _ignore(self):
491 def _ignore(self):
451 files = self._ignorefiles()
492 files = self._ignorefiles()
452 if not files:
493 if not files:
453 return matchmod.never()
494 return matchmod.never()
454
495
455 pats = [b'include:%s' % f for f in files]
496 pats = [b'include:%s' % f for f in files]
456 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
497 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
457
498
458 @propertycache
499 @propertycache
459 def _slash(self):
500 def _slash(self):
460 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
501 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
461
502
462 @propertycache
503 @propertycache
463 def _checklink(self):
504 def _checklink(self):
464 return util.checklink(self._root)
505 return util.checklink(self._root)
465
506
466 @propertycache
507 @propertycache
467 def _checkexec(self):
508 def _checkexec(self):
468 return bool(util.checkexec(self._root))
509 return bool(util.checkexec(self._root))
469
510
470 @propertycache
511 @propertycache
471 def _checkcase(self):
512 def _checkcase(self):
472 return not util.fscasesensitive(self._join(b'.hg'))
513 return not util.fscasesensitive(self._join(b'.hg'))
473
514
474 def _join(self, f):
515 def _join(self, f):
475 # much faster than os.path.join()
516 # much faster than os.path.join()
476 # it's safe because f is always a relative path
517 # it's safe because f is always a relative path
477 return self._rootdir + f
518 return self._rootdir + f
478
519
479 def flagfunc(self, buildfallback):
520 def flagfunc(self, buildfallback):
480 """build a callable that returns flags associated with a filename
521 """build a callable that returns flags associated with a filename
481
522
482 The information is extracted from three possible layers:
523 The information is extracted from three possible layers:
483 1. the file system if it supports the information
524 1. the file system if it supports the information
484 2. the "fallback" information stored in the dirstate if any
525 2. the "fallback" information stored in the dirstate if any
485 3. a more expensive mechanism inferring the flags from the parents.
526 3. a more expensive mechanism inferring the flags from the parents.
486 """
527 """
487
528
488 # small hack to cache the result of buildfallback()
529 # small hack to cache the result of buildfallback()
489 fallback_func = []
530 fallback_func = []
490
531
491 def get_flags(x):
532 def get_flags(x):
492 entry = None
533 entry = None
493 fallback_value = None
534 fallback_value = None
494 try:
535 try:
495 st = os.lstat(self._join(x))
536 st = os.lstat(self._join(x))
496 except OSError:
537 except OSError:
497 return b''
538 return b''
498
539
499 if self._checklink:
540 if self._checklink:
500 if util.statislink(st):
541 if util.statislink(st):
501 return b'l'
542 return b'l'
502 else:
543 else:
503 entry = self.get_entry(x)
544 entry = self.get_entry(x)
504 if entry.has_fallback_symlink:
545 if entry.has_fallback_symlink:
505 if entry.fallback_symlink:
546 if entry.fallback_symlink:
506 return b'l'
547 return b'l'
507 else:
548 else:
508 if not fallback_func:
549 if not fallback_func:
509 fallback_func.append(buildfallback())
550 fallback_func.append(buildfallback())
510 fallback_value = fallback_func[0](x)
551 fallback_value = fallback_func[0](x)
511 if b'l' in fallback_value:
552 if b'l' in fallback_value:
512 return b'l'
553 return b'l'
513
554
514 if self._checkexec:
555 if self._checkexec:
515 if util.statisexec(st):
556 if util.statisexec(st):
516 return b'x'
557 return b'x'
517 else:
558 else:
518 if entry is None:
559 if entry is None:
519 entry = self.get_entry(x)
560 entry = self.get_entry(x)
520 if entry.has_fallback_exec:
561 if entry.has_fallback_exec:
521 if entry.fallback_exec:
562 if entry.fallback_exec:
522 return b'x'
563 return b'x'
523 else:
564 else:
524 if fallback_value is None:
565 if fallback_value is None:
525 if not fallback_func:
566 if not fallback_func:
526 fallback_func.append(buildfallback())
567 fallback_func.append(buildfallback())
527 fallback_value = fallback_func[0](x)
568 fallback_value = fallback_func[0](x)
528 if b'x' in fallback_value:
569 if b'x' in fallback_value:
529 return b'x'
570 return b'x'
530 return b''
571 return b''
531
572
532 return get_flags
573 return get_flags
533
574
534 @propertycache
575 @propertycache
535 def _cwd(self):
576 def _cwd(self):
536 # internal config: ui.forcecwd
577 # internal config: ui.forcecwd
537 forcecwd = self._ui.config(b'ui', b'forcecwd')
578 forcecwd = self._ui.config(b'ui', b'forcecwd')
538 if forcecwd:
579 if forcecwd:
539 return forcecwd
580 return forcecwd
540 return encoding.getcwd()
581 return encoding.getcwd()
541
582
542 def getcwd(self):
583 def getcwd(self):
543 """Return the path from which a canonical path is calculated.
584 """Return the path from which a canonical path is calculated.
544
585
545 This path should be used to resolve file patterns or to convert
586 This path should be used to resolve file patterns or to convert
546 canonical paths back to file paths for display. It shouldn't be
587 canonical paths back to file paths for display. It shouldn't be
547 used to get real file paths. Use vfs functions instead.
588 used to get real file paths. Use vfs functions instead.
548 """
589 """
549 cwd = self._cwd
590 cwd = self._cwd
550 if cwd == self._root:
591 if cwd == self._root:
551 return b''
592 return b''
552 # self._root ends with a path separator if self._root is '/' or 'C:\'
593 # self._root ends with a path separator if self._root is '/' or 'C:\'
553 rootsep = self._root
594 rootsep = self._root
554 if not util.endswithsep(rootsep):
595 if not util.endswithsep(rootsep):
555 rootsep += pycompat.ossep
596 rootsep += pycompat.ossep
556 if cwd.startswith(rootsep):
597 if cwd.startswith(rootsep):
557 return cwd[len(rootsep) :]
598 return cwd[len(rootsep) :]
558 else:
599 else:
559 # we're outside the repo. return an absolute path.
600 # we're outside the repo. return an absolute path.
560 return cwd
601 return cwd
561
602
562 def pathto(self, f, cwd=None):
603 def pathto(self, f, cwd=None):
563 if cwd is None:
604 if cwd is None:
564 cwd = self.getcwd()
605 cwd = self.getcwd()
565 path = util.pathto(self._root, cwd, f)
606 path = util.pathto(self._root, cwd, f)
566 if self._slash:
607 if self._slash:
567 return util.pconvert(path)
608 return util.pconvert(path)
568 return path
609 return path
569
610
570 def get_entry(self, path):
611 def get_entry(self, path):
571 """return a DirstateItem for the associated path"""
612 """return a DirstateItem for the associated path"""
572 entry = self._map.get(path)
613 entry = self._map.get(path)
573 if entry is None:
614 if entry is None:
574 return DirstateItem()
615 return DirstateItem()
575 return entry
616 return entry
576
617
577 def __contains__(self, key):
618 def __contains__(self, key):
578 return key in self._map
619 return key in self._map
579
620
580 def __iter__(self):
621 def __iter__(self):
581 return iter(sorted(self._map))
622 return iter(sorted(self._map))
582
623
583 def items(self):
624 def items(self):
584 return self._map.items()
625 return self._map.items()
585
626
586 iteritems = items
627 iteritems = items
587
628
588 def parents(self):
629 def parents(self):
589 return [self._validate(p) for p in self._pl]
630 return [self._validate(p) for p in self._pl]
590
631
591 def p1(self):
632 def p1(self):
592 return self._validate(self._pl[0])
633 return self._validate(self._pl[0])
593
634
594 def p2(self):
635 def p2(self):
595 return self._validate(self._pl[1])
636 return self._validate(self._pl[1])
596
637
597 @property
638 @property
598 def in_merge(self):
639 def in_merge(self):
599 """True if a merge is in progress"""
640 """True if a merge is in progress"""
600 return self._pl[1] != self._nodeconstants.nullid
641 return self._pl[1] != self._nodeconstants.nullid
601
642
602 def branch(self):
643 def branch(self):
603 return encoding.tolocal(self._branch)
644 return encoding.tolocal(self._branch)
604
645
605 @requires_changing_parents
646 @requires_changing_parents
606 def setparents(self, p1, p2=None):
647 def setparents(self, p1, p2=None):
607 """Set dirstate parents to p1 and p2.
648 """Set dirstate parents to p1 and p2.
608
649
609 When moving from two parents to one, "merged" entries a
650 When moving from two parents to one, "merged" entries a
610 adjusted to normal and previous copy records discarded and
651 adjusted to normal and previous copy records discarded and
611 returned by the call.
652 returned by the call.
612
653
613 See localrepo.setparents()
654 See localrepo.setparents()
614 """
655 """
615 if p2 is None:
656 if p2 is None:
616 p2 = self._nodeconstants.nullid
657 p2 = self._nodeconstants.nullid
617 if self._changing_level == 0:
658 if self._changing_level == 0:
618 raise ValueError(
659 raise ValueError(
619 b"cannot set dirstate parent outside of "
660 b"cannot set dirstate parent outside of "
620 b"dirstate.changing_parents context manager"
661 b"dirstate.changing_parents context manager"
621 )
662 )
622
663
623 self._dirty = True
664 self._dirty = True
624 oldp2 = self._pl[1]
665 oldp2 = self._pl[1]
625 if self._origpl is None:
666 if self._origpl is None:
626 self._origpl = self._pl
667 self._origpl = self._pl
627 nullid = self._nodeconstants.nullid
668 nullid = self._nodeconstants.nullid
628 # True if we need to fold p2 related state back to a linear case
669 # True if we need to fold p2 related state back to a linear case
629 fold_p2 = oldp2 != nullid and p2 == nullid
670 fold_p2 = oldp2 != nullid and p2 == nullid
630 return self._map.setparents(p1, p2, fold_p2=fold_p2)
671 return self._map.setparents(p1, p2, fold_p2=fold_p2)
631
672
632 def setbranch(self, branch, transaction=SENTINEL):
673 def setbranch(self, branch, transaction=SENTINEL):
633 self.__class__._branch.set(self, encoding.fromlocal(branch))
674 self.__class__._branch.set(self, encoding.fromlocal(branch))
634 if transaction is SENTINEL:
675 if transaction is SENTINEL:
635 msg = b"setbranch needs a `transaction` argument"
676 msg = b"setbranch needs a `transaction` argument"
636 self._ui.deprecwarn(msg, b'6.5')
677 self._ui.deprecwarn(msg, b'6.5')
637 transaction = None
678 transaction = None
638 if transaction is not None:
679 if transaction is not None:
639 self._setup_tr_abort(transaction)
680 self._setup_tr_abort(transaction)
640 transaction.addfilegenerator(
681 transaction.addfilegenerator(
641 b'dirstate-3-branch%s' % self._tr_key_suffix,
682 b'dirstate-3-branch%s' % self._tr_key_suffix,
642 (b'branch',),
683 (b'branch',),
643 self._write_branch,
684 self._write_branch,
644 location=b'plain',
685 location=b'plain',
645 post_finalize=True,
686 post_finalize=True,
646 )
687 )
647 return
688 return
648
689
649 vfs = self._opener
690 vfs = self._opener
650 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
691 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
651 self._write_branch(f)
692 self._write_branch(f)
652 # make sure filecache has the correct stat info for _branch after
693 # make sure filecache has the correct stat info for _branch after
653 # replacing the underlying file
694 # replacing the underlying file
654 #
695 #
655 # XXX do we actually need this,
696 # XXX do we actually need this,
656 # refreshing the attribute is quite cheap
697 # refreshing the attribute is quite cheap
657 ce = self._filecache[b'_branch']
698 ce = self._filecache[b'_branch']
658 if ce:
699 if ce:
659 ce.refresh()
700 ce.refresh()
660
701
661 def _write_branch(self, file_obj):
702 def _write_branch(self, file_obj):
662 file_obj.write(self._branch + b'\n')
703 file_obj.write(self._branch + b'\n')
663
704
664 def invalidate(self):
705 def invalidate(self):
665 """Causes the next access to reread the dirstate.
706 """Causes the next access to reread the dirstate.
666
707
667 This is different from localrepo.invalidatedirstate() because it always
708 This is different from localrepo.invalidatedirstate() because it always
668 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
709 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
669 check whether the dirstate has changed before rereading it."""
710 check whether the dirstate has changed before rereading it."""
670
711
671 for a in ("_map", "_branch", "_ignore"):
712 for a in ("_map", "_branch", "_ignore"):
672 if a in self.__dict__:
713 if a in self.__dict__:
673 delattr(self, a)
714 delattr(self, a)
674 self._dirty = False
715 self._dirty = False
675 self._dirty_tracked_set = False
716 self._dirty_tracked_set = False
676 self._invalidated_context = bool(
717 self._invalidated_context = bool(
677 self._changing_level > 0
718 self._changing_level > 0
678 or self._attached_to_a_transaction
719 or self._attached_to_a_transaction
679 or self._running_status
720 or self._running_status
680 )
721 )
681 self._origpl = None
722 self._origpl = None
682
723
683 @requires_changing_any
724 @requires_changing_any
684 def copy(self, source, dest):
725 def copy(self, source, dest):
685 """Mark dest as a copy of source. Unmark dest if source is None."""
726 """Mark dest as a copy of source. Unmark dest if source is None."""
686 if source == dest:
727 if source == dest:
687 return
728 return
688 self._dirty = True
729 self._dirty = True
689 if source is not None:
730 if source is not None:
690 self._check_sparse(source)
731 self._check_sparse(source)
691 self._map.copymap[dest] = source
732 self._map.copymap[dest] = source
692 else:
733 else:
693 self._map.copymap.pop(dest, None)
734 self._map.copymap.pop(dest, None)
694
735
695 def copied(self, file):
736 def copied(self, file):
696 return self._map.copymap.get(file, None)
737 return self._map.copymap.get(file, None)
697
738
698 def copies(self):
739 def copies(self):
699 return self._map.copymap
740 return self._map.copymap
700
741
701 @requires_changing_files
742 @requires_changing_files
702 def set_tracked(self, filename, reset_copy=False):
743 def set_tracked(self, filename, reset_copy=False):
703 """a "public" method for generic code to mark a file as tracked
744 """a "public" method for generic code to mark a file as tracked
704
745
705 This function is to be called outside of "update/merge" case. For
746 This function is to be called outside of "update/merge" case. For
706 example by a command like `hg add X`.
747 example by a command like `hg add X`.
707
748
708 if reset_copy is set, any existing copy information will be dropped.
749 if reset_copy is set, any existing copy information will be dropped.
709
750
710 return True the file was previously untracked, False otherwise.
751 return True the file was previously untracked, False otherwise.
711 """
752 """
712 self._dirty = True
753 self._dirty = True
713 entry = self._map.get(filename)
754 entry = self._map.get(filename)
714 if entry is None or not entry.tracked:
755 if entry is None or not entry.tracked:
715 self._check_new_tracked_filename(filename)
756 self._check_new_tracked_filename(filename)
716 pre_tracked = self._map.set_tracked(filename)
757 pre_tracked = self._map.set_tracked(filename)
717 if reset_copy:
758 if reset_copy:
718 self._map.copymap.pop(filename, None)
759 self._map.copymap.pop(filename, None)
719 if pre_tracked:
760 if pre_tracked:
720 self._dirty_tracked_set = True
761 self._dirty_tracked_set = True
721 return pre_tracked
762 return pre_tracked
722
763
723 @requires_changing_files
764 @requires_changing_files
724 def set_untracked(self, filename):
765 def set_untracked(self, filename):
725 """a "public" method for generic code to mark a file as untracked
766 """a "public" method for generic code to mark a file as untracked
726
767
727 This function is to be called outside of "update/merge" case. For
768 This function is to be called outside of "update/merge" case. For
728 example by a command like `hg remove X`.
769 example by a command like `hg remove X`.
729
770
730 return True the file was previously tracked, False otherwise.
771 return True the file was previously tracked, False otherwise.
731 """
772 """
732 ret = self._map.set_untracked(filename)
773 ret = self._map.set_untracked(filename)
733 if ret:
774 if ret:
734 self._dirty = True
775 self._dirty = True
735 self._dirty_tracked_set = True
776 self._dirty_tracked_set = True
736 return ret
777 return ret
737
778
738 @requires_changing_files_or_status
779 @requires_changing_files_or_status
739 def set_clean(self, filename, parentfiledata):
780 def set_clean(self, filename, parentfiledata):
740 """record that the current state of the file on disk is known to be clean"""
781 """record that the current state of the file on disk is known to be clean"""
741 self._dirty = True
782 self._dirty = True
742 if not self._map[filename].tracked:
783 if not self._map[filename].tracked:
743 self._check_new_tracked_filename(filename)
784 self._check_new_tracked_filename(filename)
744 (mode, size, mtime) = parentfiledata
785 (mode, size, mtime) = parentfiledata
745 self._map.set_clean(filename, mode, size, mtime)
786 self._map.set_clean(filename, mode, size, mtime)
746
787
747 @requires_changing_files_or_status
788 @requires_changing_files_or_status
748 def set_possibly_dirty(self, filename):
789 def set_possibly_dirty(self, filename):
749 """record that the current state of the file on disk is unknown"""
790 """record that the current state of the file on disk is unknown"""
750 self._dirty = True
791 self._dirty = True
751 self._map.set_possibly_dirty(filename)
792 self._map.set_possibly_dirty(filename)
752
793
753 @requires_changing_parents
794 @requires_changing_parents
754 def update_file_p1(
795 def update_file_p1(
755 self,
796 self,
756 filename,
797 filename,
757 p1_tracked,
798 p1_tracked,
758 ):
799 ):
759 """Set a file as tracked in the parent (or not)
800 """Set a file as tracked in the parent (or not)
760
801
761 This is to be called when adjust the dirstate to a new parent after an history
802 This is to be called when adjust the dirstate to a new parent after an history
762 rewriting operation.
803 rewriting operation.
763
804
764 It should not be called during a merge (p2 != nullid) and only within
805 It should not be called during a merge (p2 != nullid) and only within
765 a `with dirstate.changing_parents(repo):` context.
806 a `with dirstate.changing_parents(repo):` context.
766 """
807 """
767 if self.in_merge:
808 if self.in_merge:
768 msg = b'update_file_reference should not be called when merging'
809 msg = b'update_file_reference should not be called when merging'
769 raise error.ProgrammingError(msg)
810 raise error.ProgrammingError(msg)
770 entry = self._map.get(filename)
811 entry = self._map.get(filename)
771 if entry is None:
812 if entry is None:
772 wc_tracked = False
813 wc_tracked = False
773 else:
814 else:
774 wc_tracked = entry.tracked
815 wc_tracked = entry.tracked
775 if not (p1_tracked or wc_tracked):
816 if not (p1_tracked or wc_tracked):
776 # the file is no longer relevant to anyone
817 # the file is no longer relevant to anyone
777 if self._map.get(filename) is not None:
818 if self._map.get(filename) is not None:
778 self._map.reset_state(filename)
819 self._map.reset_state(filename)
779 self._dirty = True
820 self._dirty = True
780 elif (not p1_tracked) and wc_tracked:
821 elif (not p1_tracked) and wc_tracked:
781 if entry is not None and entry.added:
822 if entry is not None and entry.added:
782 return # avoid dropping copy information (maybe?)
823 return # avoid dropping copy information (maybe?)
783
824
784 self._map.reset_state(
825 self._map.reset_state(
785 filename,
826 filename,
786 wc_tracked,
827 wc_tracked,
787 p1_tracked,
828 p1_tracked,
788 # the underlying reference might have changed, we will have to
829 # the underlying reference might have changed, we will have to
789 # check it.
830 # check it.
790 has_meaningful_mtime=False,
831 has_meaningful_mtime=False,
791 )
832 )
792
833
793 @requires_changing_parents
834 @requires_changing_parents
794 def update_file(
835 def update_file(
795 self,
836 self,
796 filename,
837 filename,
797 wc_tracked,
838 wc_tracked,
798 p1_tracked,
839 p1_tracked,
799 p2_info=False,
840 p2_info=False,
800 possibly_dirty=False,
841 possibly_dirty=False,
801 parentfiledata=None,
842 parentfiledata=None,
802 ):
843 ):
803 """update the information about a file in the dirstate
844 """update the information about a file in the dirstate
804
845
805 This is to be called when the direstates parent changes to keep track
846 This is to be called when the direstates parent changes to keep track
806 of what is the file situation in regards to the working copy and its parent.
847 of what is the file situation in regards to the working copy and its parent.
807
848
808 This function must be called within a `dirstate.changing_parents` context.
849 This function must be called within a `dirstate.changing_parents` context.
809
850
810 note: the API is at an early stage and we might need to adjust it
851 note: the API is at an early stage and we might need to adjust it
811 depending of what information ends up being relevant and useful to
852 depending of what information ends up being relevant and useful to
812 other processing.
853 other processing.
813 """
854 """
814 self._update_file(
855 self._update_file(
815 filename=filename,
856 filename=filename,
816 wc_tracked=wc_tracked,
857 wc_tracked=wc_tracked,
817 p1_tracked=p1_tracked,
858 p1_tracked=p1_tracked,
818 p2_info=p2_info,
859 p2_info=p2_info,
819 possibly_dirty=possibly_dirty,
860 possibly_dirty=possibly_dirty,
820 parentfiledata=parentfiledata,
861 parentfiledata=parentfiledata,
821 )
862 )
822
863
823 def hacky_extension_update_file(self, *args, **kwargs):
864 def hacky_extension_update_file(self, *args, **kwargs):
824 """NEVER USE THIS, YOU DO NOT NEED IT
865 """NEVER USE THIS, YOU DO NOT NEED IT
825
866
826 This function is a variant of "update_file" to be called by a small set
867 This function is a variant of "update_file" to be called by a small set
827 of extensions, it also adjust the internal state of file, but can be
868 of extensions, it also adjust the internal state of file, but can be
828 called outside an `changing_parents` context.
869 called outside an `changing_parents` context.
829
870
830 A very small number of extension meddle with the working copy content
871 A very small number of extension meddle with the working copy content
831 in a way that requires to adjust the dirstate accordingly. At the time
872 in a way that requires to adjust the dirstate accordingly. At the time
832 this command is written they are :
873 this command is written they are :
833 - keyword,
874 - keyword,
834 - largefile,
875 - largefile,
835 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
876 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
836
877
837 This function could probably be replaced by more semantic one (like
878 This function could probably be replaced by more semantic one (like
838 "adjust expected size" or "always revalidate file content", etc)
879 "adjust expected size" or "always revalidate file content", etc)
839 however at the time where this is writen, this is too much of a detour
880 however at the time where this is writen, this is too much of a detour
840 to be considered.
881 to be considered.
841 """
882 """
842 if not (self._changing_level > 0 or self._running_status > 0):
883 if not (self._changing_level > 0 or self._running_status > 0):
843 msg = "requires a changes context"
884 msg = "requires a changes context"
844 raise error.ProgrammingError(msg)
885 raise error.ProgrammingError(msg)
845 self._update_file(
886 self._update_file(
846 *args,
887 *args,
847 **kwargs,
888 **kwargs,
848 )
889 )
849
890
850 def _update_file(
891 def _update_file(
851 self,
892 self,
852 filename,
893 filename,
853 wc_tracked,
894 wc_tracked,
854 p1_tracked,
895 p1_tracked,
855 p2_info=False,
896 p2_info=False,
856 possibly_dirty=False,
897 possibly_dirty=False,
857 parentfiledata=None,
898 parentfiledata=None,
858 ):
899 ):
859
900
860 # note: I do not think we need to double check name clash here since we
901 # note: I do not think we need to double check name clash here since we
861 # are in a update/merge case that should already have taken care of
902 # are in a update/merge case that should already have taken care of
862 # this. The test agrees
903 # this. The test agrees
863
904
864 self._dirty = True
905 self._dirty = True
865 old_entry = self._map.get(filename)
906 old_entry = self._map.get(filename)
866 if old_entry is None:
907 if old_entry is None:
867 prev_tracked = False
908 prev_tracked = False
868 else:
909 else:
869 prev_tracked = old_entry.tracked
910 prev_tracked = old_entry.tracked
870 if prev_tracked != wc_tracked:
911 if prev_tracked != wc_tracked:
871 self._dirty_tracked_set = True
912 self._dirty_tracked_set = True
872
913
873 self._map.reset_state(
914 self._map.reset_state(
874 filename,
915 filename,
875 wc_tracked,
916 wc_tracked,
876 p1_tracked,
917 p1_tracked,
877 p2_info=p2_info,
918 p2_info=p2_info,
878 has_meaningful_mtime=not possibly_dirty,
919 has_meaningful_mtime=not possibly_dirty,
879 parentfiledata=parentfiledata,
920 parentfiledata=parentfiledata,
880 )
921 )
881
922
882 def _check_new_tracked_filename(self, filename):
923 def _check_new_tracked_filename(self, filename):
883 scmutil.checkfilename(filename)
924 scmutil.checkfilename(filename)
884 if self._map.hastrackeddir(filename):
925 if self._map.hastrackeddir(filename):
885 msg = _(b'directory %r already in dirstate')
926 msg = _(b'directory %r already in dirstate')
886 msg %= pycompat.bytestr(filename)
927 msg %= pycompat.bytestr(filename)
887 raise error.Abort(msg)
928 raise error.Abort(msg)
888 # shadows
929 # shadows
889 for d in pathutil.finddirs(filename):
930 for d in pathutil.finddirs(filename):
890 if self._map.hastrackeddir(d):
931 if self._map.hastrackeddir(d):
891 break
932 break
892 entry = self._map.get(d)
933 entry = self._map.get(d)
893 if entry is not None and not entry.removed:
934 if entry is not None and not entry.removed:
894 msg = _(b'file %r in dirstate clashes with %r')
935 msg = _(b'file %r in dirstate clashes with %r')
895 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
936 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
896 raise error.Abort(msg)
937 raise error.Abort(msg)
897 self._check_sparse(filename)
938 self._check_sparse(filename)
898
939
899 def _check_sparse(self, filename):
940 def _check_sparse(self, filename):
900 """Check that a filename is inside the sparse profile"""
941 """Check that a filename is inside the sparse profile"""
901 sparsematch = self._sparsematcher
942 sparsematch = self._sparsematcher
902 if sparsematch is not None and not sparsematch.always():
943 if sparsematch is not None and not sparsematch.always():
903 if not sparsematch(filename):
944 if not sparsematch(filename):
904 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
945 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
905 hint = _(
946 hint = _(
906 b'include file with `hg debugsparse --include <pattern>` or use '
947 b'include file with `hg debugsparse --include <pattern>` or use '
907 b'`hg add -s <file>` to include file directory while adding'
948 b'`hg add -s <file>` to include file directory while adding'
908 )
949 )
909 raise error.Abort(msg % filename, hint=hint)
950 raise error.Abort(msg % filename, hint=hint)
910
951
911 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
952 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
912 if exists is None:
953 if exists is None:
913 exists = os.path.lexists(os.path.join(self._root, path))
954 exists = os.path.lexists(os.path.join(self._root, path))
914 if not exists:
955 if not exists:
915 # Maybe a path component exists
956 # Maybe a path component exists
916 if not ignoremissing and b'/' in path:
957 if not ignoremissing and b'/' in path:
917 d, f = path.rsplit(b'/', 1)
958 d, f = path.rsplit(b'/', 1)
918 d = self._normalize(d, False, ignoremissing, None)
959 d = self._normalize(d, False, ignoremissing, None)
919 folded = d + b"/" + f
960 folded = d + b"/" + f
920 else:
961 else:
921 # No path components, preserve original case
962 # No path components, preserve original case
922 folded = path
963 folded = path
923 else:
964 else:
924 # recursively normalize leading directory components
965 # recursively normalize leading directory components
925 # against dirstate
966 # against dirstate
926 if b'/' in normed:
967 if b'/' in normed:
927 d, f = normed.rsplit(b'/', 1)
968 d, f = normed.rsplit(b'/', 1)
928 d = self._normalize(d, False, ignoremissing, True)
969 d = self._normalize(d, False, ignoremissing, True)
929 r = self._root + b"/" + d
970 r = self._root + b"/" + d
930 folded = d + b"/" + util.fspath(f, r)
971 folded = d + b"/" + util.fspath(f, r)
931 else:
972 else:
932 folded = util.fspath(normed, self._root)
973 folded = util.fspath(normed, self._root)
933 storemap[normed] = folded
974 storemap[normed] = folded
934
975
935 return folded
976 return folded
936
977
937 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
978 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
938 normed = util.normcase(path)
979 normed = util.normcase(path)
939 folded = self._map.filefoldmap.get(normed, None)
980 folded = self._map.filefoldmap.get(normed, None)
940 if folded is None:
981 if folded is None:
941 if isknown:
982 if isknown:
942 folded = path
983 folded = path
943 else:
984 else:
944 folded = self._discoverpath(
985 folded = self._discoverpath(
945 path, normed, ignoremissing, exists, self._map.filefoldmap
986 path, normed, ignoremissing, exists, self._map.filefoldmap
946 )
987 )
947 return folded
988 return folded
948
989
949 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
990 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
950 normed = util.normcase(path)
991 normed = util.normcase(path)
951 folded = self._map.filefoldmap.get(normed, None)
992 folded = self._map.filefoldmap.get(normed, None)
952 if folded is None:
993 if folded is None:
953 folded = self._map.dirfoldmap.get(normed, None)
994 folded = self._map.dirfoldmap.get(normed, None)
954 if folded is None:
995 if folded is None:
955 if isknown:
996 if isknown:
956 folded = path
997 folded = path
957 else:
998 else:
958 # store discovered result in dirfoldmap so that future
999 # store discovered result in dirfoldmap so that future
959 # normalizefile calls don't start matching directories
1000 # normalizefile calls don't start matching directories
960 folded = self._discoverpath(
1001 folded = self._discoverpath(
961 path, normed, ignoremissing, exists, self._map.dirfoldmap
1002 path, normed, ignoremissing, exists, self._map.dirfoldmap
962 )
1003 )
963 return folded
1004 return folded
964
1005
965 def normalize(self, path, isknown=False, ignoremissing=False):
1006 def normalize(self, path, isknown=False, ignoremissing=False):
966 """
1007 """
967 normalize the case of a pathname when on a casefolding filesystem
1008 normalize the case of a pathname when on a casefolding filesystem
968
1009
969 isknown specifies whether the filename came from walking the
1010 isknown specifies whether the filename came from walking the
970 disk, to avoid extra filesystem access.
1011 disk, to avoid extra filesystem access.
971
1012
972 If ignoremissing is True, missing path are returned
1013 If ignoremissing is True, missing path are returned
973 unchanged. Otherwise, we try harder to normalize possibly
1014 unchanged. Otherwise, we try harder to normalize possibly
974 existing path components.
1015 existing path components.
975
1016
976 The normalized case is determined based on the following precedence:
1017 The normalized case is determined based on the following precedence:
977
1018
978 - version of name already stored in the dirstate
1019 - version of name already stored in the dirstate
979 - version of name stored on disk
1020 - version of name stored on disk
980 - version provided via command arguments
1021 - version provided via command arguments
981 """
1022 """
982
1023
983 if self._checkcase:
1024 if self._checkcase:
984 return self._normalize(path, isknown, ignoremissing)
1025 return self._normalize(path, isknown, ignoremissing)
985 return path
1026 return path
986
1027
987 # XXX this method is barely used, as a result:
1028 # XXX this method is barely used, as a result:
988 # - its semantic is unclear
1029 # - its semantic is unclear
989 # - do we really needs it ?
1030 # - do we really needs it ?
990 @requires_changing_parents
1031 @requires_changing_parents
991 def clear(self):
1032 def clear(self):
992 self._map.clear()
1033 self._map.clear()
993 self._dirty = True
1034 self._dirty = True
994
1035
995 @requires_changing_parents
1036 @requires_changing_parents
996 def rebuild(self, parent, allfiles, changedfiles=None):
1037 def rebuild(self, parent, allfiles, changedfiles=None):
997 matcher = self._sparsematcher
1038 matcher = self._sparsematcher
998 if matcher is not None and not matcher.always():
1039 if matcher is not None and not matcher.always():
999 # should not add non-matching files
1040 # should not add non-matching files
1000 allfiles = [f for f in allfiles if matcher(f)]
1041 allfiles = [f for f in allfiles if matcher(f)]
1001 if changedfiles:
1042 if changedfiles:
1002 changedfiles = [f for f in changedfiles if matcher(f)]
1043 changedfiles = [f for f in changedfiles if matcher(f)]
1003
1044
1004 if changedfiles is not None:
1045 if changedfiles is not None:
1005 # these files will be deleted from the dirstate when they are
1046 # these files will be deleted from the dirstate when they are
1006 # not found to be in allfiles
1047 # not found to be in allfiles
1007 dirstatefilestoremove = {f for f in self if not matcher(f)}
1048 dirstatefilestoremove = {f for f in self if not matcher(f)}
1008 changedfiles = dirstatefilestoremove.union(changedfiles)
1049 changedfiles = dirstatefilestoremove.union(changedfiles)
1009
1050
1010 if changedfiles is None:
1051 if changedfiles is None:
1011 # Rebuild entire dirstate
1052 # Rebuild entire dirstate
1012 to_lookup = allfiles
1053 to_lookup = allfiles
1013 to_drop = []
1054 to_drop = []
1014 self.clear()
1055 self.clear()
1015 elif len(changedfiles) < 10:
1056 elif len(changedfiles) < 10:
1016 # Avoid turning allfiles into a set, which can be expensive if it's
1057 # Avoid turning allfiles into a set, which can be expensive if it's
1017 # large.
1058 # large.
1018 to_lookup = []
1059 to_lookup = []
1019 to_drop = []
1060 to_drop = []
1020 for f in changedfiles:
1061 for f in changedfiles:
1021 if f in allfiles:
1062 if f in allfiles:
1022 to_lookup.append(f)
1063 to_lookup.append(f)
1023 else:
1064 else:
1024 to_drop.append(f)
1065 to_drop.append(f)
1025 else:
1066 else:
1026 changedfilesset = set(changedfiles)
1067 changedfilesset = set(changedfiles)
1027 to_lookup = changedfilesset & set(allfiles)
1068 to_lookup = changedfilesset & set(allfiles)
1028 to_drop = changedfilesset - to_lookup
1069 to_drop = changedfilesset - to_lookup
1029
1070
1030 if self._origpl is None:
1071 if self._origpl is None:
1031 self._origpl = self._pl
1072 self._origpl = self._pl
1032 self._map.setparents(parent, self._nodeconstants.nullid)
1073 self._map.setparents(parent, self._nodeconstants.nullid)
1033
1074
1034 for f in to_lookup:
1075 for f in to_lookup:
1035 if self.in_merge:
1076 if self.in_merge:
1036 self.set_tracked(f)
1077 self.set_tracked(f)
1037 else:
1078 else:
1038 self._map.reset_state(
1079 self._map.reset_state(
1039 f,
1080 f,
1040 wc_tracked=True,
1081 wc_tracked=True,
1041 p1_tracked=True,
1082 p1_tracked=True,
1042 )
1083 )
1043 for f in to_drop:
1084 for f in to_drop:
1044 self._map.reset_state(f)
1085 self._map.reset_state(f)
1045
1086
1046 self._dirty = True
1087 self._dirty = True
1047
1088
1048 def _setup_tr_abort(self, tr):
1089 def _setup_tr_abort(self, tr):
1049 """make sure we invalidate the current change on abort"""
1090 """make sure we invalidate the current change on abort"""
1050 if tr is None:
1091 if tr is None:
1051 return
1092 return
1052
1093
1053 def on_abort(tr):
1094 def on_abort(tr):
1054 self._attached_to_a_transaction = False
1095 self._attached_to_a_transaction = False
1055 self.invalidate()
1096 self.invalidate()
1056
1097
1057 tr.addabort(
1098 tr.addabort(
1058 b'dirstate-invalidate%s' % self._tr_key_suffix,
1099 b'dirstate-invalidate%s' % self._tr_key_suffix,
1059 on_abort,
1100 on_abort,
1060 )
1101 )
1061
1102
1062 def write(self, tr):
1103 def write(self, tr):
1063 if not self._dirty:
1104 if not self._dirty:
1064 return
1105 return
1065 # make sure we don't request a write of invalidated content
1106 # make sure we don't request a write of invalidated content
1066 # XXX move before the dirty check once `unlock` stop calling `write`
1107 # XXX move before the dirty check once `unlock` stop calling `write`
1067 assert not self._invalidated_context
1108 assert not self._invalidated_context
1068
1109
1069 write_key = self._use_tracked_hint and self._dirty_tracked_set
1110 write_key = self._use_tracked_hint and self._dirty_tracked_set
1070 if tr:
1111 if tr:
1071
1112
1072 self._setup_tr_abort(tr)
1113 self._setup_tr_abort(tr)
1073 self._attached_to_a_transaction = True
1114 self._attached_to_a_transaction = True
1074
1115
1075 def on_success(f):
1116 def on_success(f):
1076 self._attached_to_a_transaction = False
1117 self._attached_to_a_transaction = False
1077 self._writedirstate(tr, f),
1118 self._writedirstate(tr, f),
1078
1119
1079 # delay writing in-memory changes out
1120 # delay writing in-memory changes out
1080 tr.addfilegenerator(
1121 tr.addfilegenerator(
1081 b'dirstate-1-main%s' % self._tr_key_suffix,
1122 b'dirstate-1-main%s' % self._tr_key_suffix,
1082 (self._filename,),
1123 (self._filename,),
1083 on_success,
1124 on_success,
1084 location=b'plain',
1125 location=b'plain',
1085 post_finalize=True,
1126 post_finalize=True,
1086 )
1127 )
1087 if write_key:
1128 if write_key:
1088 tr.addfilegenerator(
1129 tr.addfilegenerator(
1089 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1130 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1090 (self._filename_th,),
1131 (self._filename_th,),
1091 lambda f: self._write_tracked_hint(tr, f),
1132 lambda f: self._write_tracked_hint(tr, f),
1092 location=b'plain',
1133 location=b'plain',
1093 post_finalize=True,
1134 post_finalize=True,
1094 )
1135 )
1095 return
1136 return
1096
1137
1097 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1138 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1098 with file(self._filename) as f:
1139 with file(self._filename) as f:
1099 self._writedirstate(tr, f)
1140 self._writedirstate(tr, f)
1100 if write_key:
1141 if write_key:
1101 # we update the key-file after writing to make sure reader have a
1142 # we update the key-file after writing to make sure reader have a
1102 # key that match the newly written content
1143 # key that match the newly written content
1103 with file(self._filename_th) as f:
1144 with file(self._filename_th) as f:
1104 self._write_tracked_hint(tr, f)
1145 self._write_tracked_hint(tr, f)
1105
1146
1106 def delete_tracked_hint(self):
1147 def delete_tracked_hint(self):
1107 """remove the tracked_hint file
1148 """remove the tracked_hint file
1108
1149
1109 To be used by format downgrades operation"""
1150 To be used by format downgrades operation"""
1110 self._opener.unlink(self._filename_th)
1151 self._opener.unlink(self._filename_th)
1111 self._use_tracked_hint = False
1152 self._use_tracked_hint = False
1112
1153
1113 def addparentchangecallback(self, category, callback):
1154 def addparentchangecallback(self, category, callback):
1114 """add a callback to be called when the wd parents are changed
1155 """add a callback to be called when the wd parents are changed
1115
1156
1116 Callback will be called with the following arguments:
1157 Callback will be called with the following arguments:
1117 dirstate, (oldp1, oldp2), (newp1, newp2)
1158 dirstate, (oldp1, oldp2), (newp1, newp2)
1118
1159
1119 Category is a unique identifier to allow overwriting an old callback
1160 Category is a unique identifier to allow overwriting an old callback
1120 with a newer callback.
1161 with a newer callback.
1121 """
1162 """
1122 self._plchangecallbacks[category] = callback
1163 self._plchangecallbacks[category] = callback
1123
1164
1124 def _writedirstate(self, tr, st):
1165 def _writedirstate(self, tr, st):
1125 # make sure we don't write invalidated content
1166 # make sure we don't write invalidated content
1126 assert not self._invalidated_context
1167 assert not self._invalidated_context
1127 # notify callbacks about parents change
1168 # notify callbacks about parents change
1128 if self._origpl is not None and self._origpl != self._pl:
1169 if self._origpl is not None and self._origpl != self._pl:
1129 for c, callback in sorted(self._plchangecallbacks.items()):
1170 for c, callback in sorted(self._plchangecallbacks.items()):
1130 callback(self, self._origpl, self._pl)
1171 callback(self, self._origpl, self._pl)
1131 self._origpl = None
1172 self._origpl = None
1132 self._map.write(tr, st)
1173 self._map.write(tr, st)
1133 self._dirty = False
1174 self._dirty = False
1134 self._dirty_tracked_set = False
1175 self._dirty_tracked_set = False
1135
1176
1136 def _write_tracked_hint(self, tr, f):
1177 def _write_tracked_hint(self, tr, f):
1137 key = node.hex(uuid.uuid4().bytes)
1178 key = node.hex(uuid.uuid4().bytes)
1138 f.write(b"1\n%s\n" % key) # 1 is the format version
1179 f.write(b"1\n%s\n" % key) # 1 is the format version
1139
1180
1140 def _dirignore(self, f):
1181 def _dirignore(self, f):
1141 if self._ignore(f):
1182 if self._ignore(f):
1142 return True
1183 return True
1143 for p in pathutil.finddirs(f):
1184 for p in pathutil.finddirs(f):
1144 if self._ignore(p):
1185 if self._ignore(p):
1145 return True
1186 return True
1146 return False
1187 return False
1147
1188
1148 def _ignorefiles(self):
1189 def _ignorefiles(self):
1149 files = []
1190 files = []
1150 if os.path.exists(self._join(b'.hgignore')):
1191 if os.path.exists(self._join(b'.hgignore')):
1151 files.append(self._join(b'.hgignore'))
1192 files.append(self._join(b'.hgignore'))
1152 for name, path in self._ui.configitems(b"ui"):
1193 for name, path in self._ui.configitems(b"ui"):
1153 if name == b'ignore' or name.startswith(b'ignore.'):
1194 if name == b'ignore' or name.startswith(b'ignore.'):
1154 # we need to use os.path.join here rather than self._join
1195 # we need to use os.path.join here rather than self._join
1155 # because path is arbitrary and user-specified
1196 # because path is arbitrary and user-specified
1156 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1197 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1157 return files
1198 return files
1158
1199
1159 def _ignorefileandline(self, f):
1200 def _ignorefileandline(self, f):
1160 files = collections.deque(self._ignorefiles())
1201 files = collections.deque(self._ignorefiles())
1161 visited = set()
1202 visited = set()
1162 while files:
1203 while files:
1163 i = files.popleft()
1204 i = files.popleft()
1164 patterns = matchmod.readpatternfile(
1205 patterns = matchmod.readpatternfile(
1165 i, self._ui.warn, sourceinfo=True
1206 i, self._ui.warn, sourceinfo=True
1166 )
1207 )
1167 for pattern, lineno, line in patterns:
1208 for pattern, lineno, line in patterns:
1168 kind, p = matchmod._patsplit(pattern, b'glob')
1209 kind, p = matchmod._patsplit(pattern, b'glob')
1169 if kind == b"subinclude":
1210 if kind == b"subinclude":
1170 if p not in visited:
1211 if p not in visited:
1171 files.append(p)
1212 files.append(p)
1172 continue
1213 continue
1173 m = matchmod.match(
1214 m = matchmod.match(
1174 self._root, b'', [], [pattern], warn=self._ui.warn
1215 self._root, b'', [], [pattern], warn=self._ui.warn
1175 )
1216 )
1176 if m(f):
1217 if m(f):
1177 return (i, lineno, line)
1218 return (i, lineno, line)
1178 visited.add(i)
1219 visited.add(i)
1179 return (None, -1, b"")
1220 return (None, -1, b"")
1180
1221
1181 def _walkexplicit(self, match, subrepos):
1222 def _walkexplicit(self, match, subrepos):
1182 """Get stat data about the files explicitly specified by match.
1223 """Get stat data about the files explicitly specified by match.
1183
1224
1184 Return a triple (results, dirsfound, dirsnotfound).
1225 Return a triple (results, dirsfound, dirsnotfound).
1185 - results is a mapping from filename to stat result. It also contains
1226 - results is a mapping from filename to stat result. It also contains
1186 listings mapping subrepos and .hg to None.
1227 listings mapping subrepos and .hg to None.
1187 - dirsfound is a list of files found to be directories.
1228 - dirsfound is a list of files found to be directories.
1188 - dirsnotfound is a list of files that the dirstate thinks are
1229 - dirsnotfound is a list of files that the dirstate thinks are
1189 directories and that were not found."""
1230 directories and that were not found."""
1190
1231
1191 def badtype(mode):
1232 def badtype(mode):
1192 kind = _(b'unknown')
1233 kind = _(b'unknown')
1193 if stat.S_ISCHR(mode):
1234 if stat.S_ISCHR(mode):
1194 kind = _(b'character device')
1235 kind = _(b'character device')
1195 elif stat.S_ISBLK(mode):
1236 elif stat.S_ISBLK(mode):
1196 kind = _(b'block device')
1237 kind = _(b'block device')
1197 elif stat.S_ISFIFO(mode):
1238 elif stat.S_ISFIFO(mode):
1198 kind = _(b'fifo')
1239 kind = _(b'fifo')
1199 elif stat.S_ISSOCK(mode):
1240 elif stat.S_ISSOCK(mode):
1200 kind = _(b'socket')
1241 kind = _(b'socket')
1201 elif stat.S_ISDIR(mode):
1242 elif stat.S_ISDIR(mode):
1202 kind = _(b'directory')
1243 kind = _(b'directory')
1203 return _(b'unsupported file type (type is %s)') % kind
1244 return _(b'unsupported file type (type is %s)') % kind
1204
1245
1205 badfn = match.bad
1246 badfn = match.bad
1206 dmap = self._map
1247 dmap = self._map
1207 lstat = os.lstat
1248 lstat = os.lstat
1208 getkind = stat.S_IFMT
1249 getkind = stat.S_IFMT
1209 dirkind = stat.S_IFDIR
1250 dirkind = stat.S_IFDIR
1210 regkind = stat.S_IFREG
1251 regkind = stat.S_IFREG
1211 lnkkind = stat.S_IFLNK
1252 lnkkind = stat.S_IFLNK
1212 join = self._join
1253 join = self._join
1213 dirsfound = []
1254 dirsfound = []
1214 foundadd = dirsfound.append
1255 foundadd = dirsfound.append
1215 dirsnotfound = []
1256 dirsnotfound = []
1216 notfoundadd = dirsnotfound.append
1257 notfoundadd = dirsnotfound.append
1217
1258
1218 if not match.isexact() and self._checkcase:
1259 if not match.isexact() and self._checkcase:
1219 normalize = self._normalize
1260 normalize = self._normalize
1220 else:
1261 else:
1221 normalize = None
1262 normalize = None
1222
1263
1223 files = sorted(match.files())
1264 files = sorted(match.files())
1224 subrepos.sort()
1265 subrepos.sort()
1225 i, j = 0, 0
1266 i, j = 0, 0
1226 while i < len(files) and j < len(subrepos):
1267 while i < len(files) and j < len(subrepos):
1227 subpath = subrepos[j] + b"/"
1268 subpath = subrepos[j] + b"/"
1228 if files[i] < subpath:
1269 if files[i] < subpath:
1229 i += 1
1270 i += 1
1230 continue
1271 continue
1231 while i < len(files) and files[i].startswith(subpath):
1272 while i < len(files) and files[i].startswith(subpath):
1232 del files[i]
1273 del files[i]
1233 j += 1
1274 j += 1
1234
1275
1235 if not files or b'' in files:
1276 if not files or b'' in files:
1236 files = [b'']
1277 files = [b'']
1237 # constructing the foldmap is expensive, so don't do it for the
1278 # constructing the foldmap is expensive, so don't do it for the
1238 # common case where files is ['']
1279 # common case where files is ['']
1239 normalize = None
1280 normalize = None
1240 results = dict.fromkeys(subrepos)
1281 results = dict.fromkeys(subrepos)
1241 results[b'.hg'] = None
1282 results[b'.hg'] = None
1242
1283
1243 for ff in files:
1284 for ff in files:
1244 if normalize:
1285 if normalize:
1245 nf = normalize(ff, False, True)
1286 nf = normalize(ff, False, True)
1246 else:
1287 else:
1247 nf = ff
1288 nf = ff
1248 if nf in results:
1289 if nf in results:
1249 continue
1290 continue
1250
1291
1251 try:
1292 try:
1252 st = lstat(join(nf))
1293 st = lstat(join(nf))
1253 kind = getkind(st.st_mode)
1294 kind = getkind(st.st_mode)
1254 if kind == dirkind:
1295 if kind == dirkind:
1255 if nf in dmap:
1296 if nf in dmap:
1256 # file replaced by dir on disk but still in dirstate
1297 # file replaced by dir on disk but still in dirstate
1257 results[nf] = None
1298 results[nf] = None
1258 foundadd((nf, ff))
1299 foundadd((nf, ff))
1259 elif kind == regkind or kind == lnkkind:
1300 elif kind == regkind or kind == lnkkind:
1260 results[nf] = st
1301 results[nf] = st
1261 else:
1302 else:
1262 badfn(ff, badtype(kind))
1303 badfn(ff, badtype(kind))
1263 if nf in dmap:
1304 if nf in dmap:
1264 results[nf] = None
1305 results[nf] = None
1265 except (OSError) as inst:
1306 except (OSError) as inst:
1266 # nf not found on disk - it is dirstate only
1307 # nf not found on disk - it is dirstate only
1267 if nf in dmap: # does it exactly match a missing file?
1308 if nf in dmap: # does it exactly match a missing file?
1268 results[nf] = None
1309 results[nf] = None
1269 else: # does it match a missing directory?
1310 else: # does it match a missing directory?
1270 if self._map.hasdir(nf):
1311 if self._map.hasdir(nf):
1271 notfoundadd(nf)
1312 notfoundadd(nf)
1272 else:
1313 else:
1273 badfn(ff, encoding.strtolocal(inst.strerror))
1314 badfn(ff, encoding.strtolocal(inst.strerror))
1274
1315
1275 # match.files() may contain explicitly-specified paths that shouldn't
1316 # match.files() may contain explicitly-specified paths that shouldn't
1276 # be taken; drop them from the list of files found. dirsfound/notfound
1317 # be taken; drop them from the list of files found. dirsfound/notfound
1277 # aren't filtered here because they will be tested later.
1318 # aren't filtered here because they will be tested later.
1278 if match.anypats():
1319 if match.anypats():
1279 for f in list(results):
1320 for f in list(results):
1280 if f == b'.hg' or f in subrepos:
1321 if f == b'.hg' or f in subrepos:
1281 # keep sentinel to disable further out-of-repo walks
1322 # keep sentinel to disable further out-of-repo walks
1282 continue
1323 continue
1283 if not match(f):
1324 if not match(f):
1284 del results[f]
1325 del results[f]
1285
1326
1286 # Case insensitive filesystems cannot rely on lstat() failing to detect
1327 # Case insensitive filesystems cannot rely on lstat() failing to detect
1287 # a case-only rename. Prune the stat object for any file that does not
1328 # a case-only rename. Prune the stat object for any file that does not
1288 # match the case in the filesystem, if there are multiple files that
1329 # match the case in the filesystem, if there are multiple files that
1289 # normalize to the same path.
1330 # normalize to the same path.
1290 if match.isexact() and self._checkcase:
1331 if match.isexact() and self._checkcase:
1291 normed = {}
1332 normed = {}
1292
1333
1293 for f, st in results.items():
1334 for f, st in results.items():
1294 if st is None:
1335 if st is None:
1295 continue
1336 continue
1296
1337
1297 nc = util.normcase(f)
1338 nc = util.normcase(f)
1298 paths = normed.get(nc)
1339 paths = normed.get(nc)
1299
1340
1300 if paths is None:
1341 if paths is None:
1301 paths = set()
1342 paths = set()
1302 normed[nc] = paths
1343 normed[nc] = paths
1303
1344
1304 paths.add(f)
1345 paths.add(f)
1305
1346
1306 for norm, paths in normed.items():
1347 for norm, paths in normed.items():
1307 if len(paths) > 1:
1348 if len(paths) > 1:
1308 for path in paths:
1349 for path in paths:
1309 folded = self._discoverpath(
1350 folded = self._discoverpath(
1310 path, norm, True, None, self._map.dirfoldmap
1351 path, norm, True, None, self._map.dirfoldmap
1311 )
1352 )
1312 if path != folded:
1353 if path != folded:
1313 results[path] = None
1354 results[path] = None
1314
1355
1315 return results, dirsfound, dirsnotfound
1356 return results, dirsfound, dirsnotfound
1316
1357
1317 def walk(self, match, subrepos, unknown, ignored, full=True):
1358 def walk(self, match, subrepos, unknown, ignored, full=True):
1318 """
1359 """
1319 Walk recursively through the directory tree, finding all files
1360 Walk recursively through the directory tree, finding all files
1320 matched by match.
1361 matched by match.
1321
1362
1322 If full is False, maybe skip some known-clean files.
1363 If full is False, maybe skip some known-clean files.
1323
1364
1324 Return a dict mapping filename to stat-like object (either
1365 Return a dict mapping filename to stat-like object (either
1325 mercurial.osutil.stat instance or return value of os.stat()).
1366 mercurial.osutil.stat instance or return value of os.stat()).
1326
1367
1327 """
1368 """
1328 # full is a flag that extensions that hook into walk can use -- this
1369 # full is a flag that extensions that hook into walk can use -- this
1329 # implementation doesn't use it at all. This satisfies the contract
1370 # implementation doesn't use it at all. This satisfies the contract
1330 # because we only guarantee a "maybe".
1371 # because we only guarantee a "maybe".
1331
1372
1332 if ignored:
1373 if ignored:
1333 ignore = util.never
1374 ignore = util.never
1334 dirignore = util.never
1375 dirignore = util.never
1335 elif unknown:
1376 elif unknown:
1336 ignore = self._ignore
1377 ignore = self._ignore
1337 dirignore = self._dirignore
1378 dirignore = self._dirignore
1338 else:
1379 else:
1339 # if not unknown and not ignored, drop dir recursion and step 2
1380 # if not unknown and not ignored, drop dir recursion and step 2
1340 ignore = util.always
1381 ignore = util.always
1341 dirignore = util.always
1382 dirignore = util.always
1342
1383
1343 if self._sparsematchfn is not None:
1384 if self._sparsematchfn is not None:
1344 em = matchmod.exact(match.files())
1385 em = matchmod.exact(match.files())
1345 sm = matchmod.unionmatcher([self._sparsematcher, em])
1386 sm = matchmod.unionmatcher([self._sparsematcher, em])
1346 match = matchmod.intersectmatchers(match, sm)
1387 match = matchmod.intersectmatchers(match, sm)
1347
1388
1348 matchfn = match.matchfn
1389 matchfn = match.matchfn
1349 matchalways = match.always()
1390 matchalways = match.always()
1350 matchtdir = match.traversedir
1391 matchtdir = match.traversedir
1351 dmap = self._map
1392 dmap = self._map
1352 listdir = util.listdir
1393 listdir = util.listdir
1353 lstat = os.lstat
1394 lstat = os.lstat
1354 dirkind = stat.S_IFDIR
1395 dirkind = stat.S_IFDIR
1355 regkind = stat.S_IFREG
1396 regkind = stat.S_IFREG
1356 lnkkind = stat.S_IFLNK
1397 lnkkind = stat.S_IFLNK
1357 join = self._join
1398 join = self._join
1358
1399
1359 exact = skipstep3 = False
1400 exact = skipstep3 = False
1360 if match.isexact(): # match.exact
1401 if match.isexact(): # match.exact
1361 exact = True
1402 exact = True
1362 dirignore = util.always # skip step 2
1403 dirignore = util.always # skip step 2
1363 elif match.prefix(): # match.match, no patterns
1404 elif match.prefix(): # match.match, no patterns
1364 skipstep3 = True
1405 skipstep3 = True
1365
1406
1366 if not exact and self._checkcase:
1407 if not exact and self._checkcase:
1367 normalize = self._normalize
1408 normalize = self._normalize
1368 normalizefile = self._normalizefile
1409 normalizefile = self._normalizefile
1369 skipstep3 = False
1410 skipstep3 = False
1370 else:
1411 else:
1371 normalize = self._normalize
1412 normalize = self._normalize
1372 normalizefile = None
1413 normalizefile = None
1373
1414
1374 # step 1: find all explicit files
1415 # step 1: find all explicit files
1375 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1416 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1376 if matchtdir:
1417 if matchtdir:
1377 for d in work:
1418 for d in work:
1378 matchtdir(d[0])
1419 matchtdir(d[0])
1379 for d in dirsnotfound:
1420 for d in dirsnotfound:
1380 matchtdir(d)
1421 matchtdir(d)
1381
1422
1382 skipstep3 = skipstep3 and not (work or dirsnotfound)
1423 skipstep3 = skipstep3 and not (work or dirsnotfound)
1383 work = [d for d in work if not dirignore(d[0])]
1424 work = [d for d in work if not dirignore(d[0])]
1384
1425
1385 # step 2: visit subdirectories
1426 # step 2: visit subdirectories
1386 def traverse(work, alreadynormed):
1427 def traverse(work, alreadynormed):
1387 wadd = work.append
1428 wadd = work.append
1388 while work:
1429 while work:
1389 tracing.counter('dirstate.walk work', len(work))
1430 tracing.counter('dirstate.walk work', len(work))
1390 nd = work.pop()
1431 nd = work.pop()
1391 visitentries = match.visitchildrenset(nd)
1432 visitentries = match.visitchildrenset(nd)
1392 if not visitentries:
1433 if not visitentries:
1393 continue
1434 continue
1394 if visitentries == b'this' or visitentries == b'all':
1435 if visitentries == b'this' or visitentries == b'all':
1395 visitentries = None
1436 visitentries = None
1396 skip = None
1437 skip = None
1397 if nd != b'':
1438 if nd != b'':
1398 skip = b'.hg'
1439 skip = b'.hg'
1399 try:
1440 try:
1400 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1441 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1401 entries = listdir(join(nd), stat=True, skip=skip)
1442 entries = listdir(join(nd), stat=True, skip=skip)
1402 except (PermissionError, FileNotFoundError) as inst:
1443 except (PermissionError, FileNotFoundError) as inst:
1403 match.bad(
1444 match.bad(
1404 self.pathto(nd), encoding.strtolocal(inst.strerror)
1445 self.pathto(nd), encoding.strtolocal(inst.strerror)
1405 )
1446 )
1406 continue
1447 continue
1407 for f, kind, st in entries:
1448 for f, kind, st in entries:
1408 # Some matchers may return files in the visitentries set,
1449 # Some matchers may return files in the visitentries set,
1409 # instead of 'this', if the matcher explicitly mentions them
1450 # instead of 'this', if the matcher explicitly mentions them
1410 # and is not an exactmatcher. This is acceptable; we do not
1451 # and is not an exactmatcher. This is acceptable; we do not
1411 # make any hard assumptions about file-or-directory below
1452 # make any hard assumptions about file-or-directory below
1412 # based on the presence of `f` in visitentries. If
1453 # based on the presence of `f` in visitentries. If
1413 # visitchildrenset returned a set, we can always skip the
1454 # visitchildrenset returned a set, we can always skip the
1414 # entries *not* in the set it provided regardless of whether
1455 # entries *not* in the set it provided regardless of whether
1415 # they're actually a file or a directory.
1456 # they're actually a file or a directory.
1416 if visitentries and f not in visitentries:
1457 if visitentries and f not in visitentries:
1417 continue
1458 continue
1418 if normalizefile:
1459 if normalizefile:
1419 # even though f might be a directory, we're only
1460 # even though f might be a directory, we're only
1420 # interested in comparing it to files currently in the
1461 # interested in comparing it to files currently in the
1421 # dmap -- therefore normalizefile is enough
1462 # dmap -- therefore normalizefile is enough
1422 nf = normalizefile(
1463 nf = normalizefile(
1423 nd and (nd + b"/" + f) or f, True, True
1464 nd and (nd + b"/" + f) or f, True, True
1424 )
1465 )
1425 else:
1466 else:
1426 nf = nd and (nd + b"/" + f) or f
1467 nf = nd and (nd + b"/" + f) or f
1427 if nf not in results:
1468 if nf not in results:
1428 if kind == dirkind:
1469 if kind == dirkind:
1429 if not ignore(nf):
1470 if not ignore(nf):
1430 if matchtdir:
1471 if matchtdir:
1431 matchtdir(nf)
1472 matchtdir(nf)
1432 wadd(nf)
1473 wadd(nf)
1433 if nf in dmap and (matchalways or matchfn(nf)):
1474 if nf in dmap and (matchalways or matchfn(nf)):
1434 results[nf] = None
1475 results[nf] = None
1435 elif kind == regkind or kind == lnkkind:
1476 elif kind == regkind or kind == lnkkind:
1436 if nf in dmap:
1477 if nf in dmap:
1437 if matchalways or matchfn(nf):
1478 if matchalways or matchfn(nf):
1438 results[nf] = st
1479 results[nf] = st
1439 elif (matchalways or matchfn(nf)) and not ignore(
1480 elif (matchalways or matchfn(nf)) and not ignore(
1440 nf
1481 nf
1441 ):
1482 ):
1442 # unknown file -- normalize if necessary
1483 # unknown file -- normalize if necessary
1443 if not alreadynormed:
1484 if not alreadynormed:
1444 nf = normalize(nf, False, True)
1485 nf = normalize(nf, False, True)
1445 results[nf] = st
1486 results[nf] = st
1446 elif nf in dmap and (matchalways or matchfn(nf)):
1487 elif nf in dmap and (matchalways or matchfn(nf)):
1447 results[nf] = None
1488 results[nf] = None
1448
1489
1449 for nd, d in work:
1490 for nd, d in work:
1450 # alreadynormed means that processwork doesn't have to do any
1491 # alreadynormed means that processwork doesn't have to do any
1451 # expensive directory normalization
1492 # expensive directory normalization
1452 alreadynormed = not normalize or nd == d
1493 alreadynormed = not normalize or nd == d
1453 traverse([d], alreadynormed)
1494 traverse([d], alreadynormed)
1454
1495
1455 for s in subrepos:
1496 for s in subrepos:
1456 del results[s]
1497 del results[s]
1457 del results[b'.hg']
1498 del results[b'.hg']
1458
1499
1459 # step 3: visit remaining files from dmap
1500 # step 3: visit remaining files from dmap
1460 if not skipstep3 and not exact:
1501 if not skipstep3 and not exact:
1461 # If a dmap file is not in results yet, it was either
1502 # If a dmap file is not in results yet, it was either
1462 # a) not matching matchfn b) ignored, c) missing, or d) under a
1503 # a) not matching matchfn b) ignored, c) missing, or d) under a
1463 # symlink directory.
1504 # symlink directory.
1464 if not results and matchalways:
1505 if not results and matchalways:
1465 visit = [f for f in dmap]
1506 visit = [f for f in dmap]
1466 else:
1507 else:
1467 visit = [f for f in dmap if f not in results and matchfn(f)]
1508 visit = [f for f in dmap if f not in results and matchfn(f)]
1468 visit.sort()
1509 visit.sort()
1469
1510
1470 if unknown:
1511 if unknown:
1471 # unknown == True means we walked all dirs under the roots
1512 # unknown == True means we walked all dirs under the roots
1472 # that wasn't ignored, and everything that matched was stat'ed
1513 # that wasn't ignored, and everything that matched was stat'ed
1473 # and is already in results.
1514 # and is already in results.
1474 # The rest must thus be ignored or under a symlink.
1515 # The rest must thus be ignored or under a symlink.
1475 audit_path = pathutil.pathauditor(self._root, cached=True)
1516 audit_path = pathutil.pathauditor(self._root, cached=True)
1476
1517
1477 for nf in iter(visit):
1518 for nf in iter(visit):
1478 # If a stat for the same file was already added with a
1519 # If a stat for the same file was already added with a
1479 # different case, don't add one for this, since that would
1520 # different case, don't add one for this, since that would
1480 # make it appear as if the file exists under both names
1521 # make it appear as if the file exists under both names
1481 # on disk.
1522 # on disk.
1482 if (
1523 if (
1483 normalizefile
1524 normalizefile
1484 and normalizefile(nf, True, True) in results
1525 and normalizefile(nf, True, True) in results
1485 ):
1526 ):
1486 results[nf] = None
1527 results[nf] = None
1487 # Report ignored items in the dmap as long as they are not
1528 # Report ignored items in the dmap as long as they are not
1488 # under a symlink directory.
1529 # under a symlink directory.
1489 elif audit_path.check(nf):
1530 elif audit_path.check(nf):
1490 try:
1531 try:
1491 results[nf] = lstat(join(nf))
1532 results[nf] = lstat(join(nf))
1492 # file was just ignored, no links, and exists
1533 # file was just ignored, no links, and exists
1493 except OSError:
1534 except OSError:
1494 # file doesn't exist
1535 # file doesn't exist
1495 results[nf] = None
1536 results[nf] = None
1496 else:
1537 else:
1497 # It's either missing or under a symlink directory
1538 # It's either missing or under a symlink directory
1498 # which we in this case report as missing
1539 # which we in this case report as missing
1499 results[nf] = None
1540 results[nf] = None
1500 else:
1541 else:
1501 # We may not have walked the full directory tree above,
1542 # We may not have walked the full directory tree above,
1502 # so stat and check everything we missed.
1543 # so stat and check everything we missed.
1503 iv = iter(visit)
1544 iv = iter(visit)
1504 for st in util.statfiles([join(i) for i in visit]):
1545 for st in util.statfiles([join(i) for i in visit]):
1505 results[next(iv)] = st
1546 results[next(iv)] = st
1506 return results
1547 return results
1507
1548
1508 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1549 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1509 if self._sparsematchfn is not None:
1550 if self._sparsematchfn is not None:
1510 em = matchmod.exact(matcher.files())
1551 em = matchmod.exact(matcher.files())
1511 sm = matchmod.unionmatcher([self._sparsematcher, em])
1552 sm = matchmod.unionmatcher([self._sparsematcher, em])
1512 matcher = matchmod.intersectmatchers(matcher, sm)
1553 matcher = matchmod.intersectmatchers(matcher, sm)
1513 # Force Rayon (Rust parallelism library) to respect the number of
1554 # Force Rayon (Rust parallelism library) to respect the number of
1514 # workers. This is a temporary workaround until Rust code knows
1555 # workers. This is a temporary workaround until Rust code knows
1515 # how to read the config file.
1556 # how to read the config file.
1516 numcpus = self._ui.configint(b"worker", b"numcpus")
1557 numcpus = self._ui.configint(b"worker", b"numcpus")
1517 if numcpus is not None:
1558 if numcpus is not None:
1518 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1559 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1519
1560
1520 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1561 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1521 if not workers_enabled:
1562 if not workers_enabled:
1522 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1563 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1523
1564
1524 (
1565 (
1525 lookup,
1566 lookup,
1526 modified,
1567 modified,
1527 added,
1568 added,
1528 removed,
1569 removed,
1529 deleted,
1570 deleted,
1530 clean,
1571 clean,
1531 ignored,
1572 ignored,
1532 unknown,
1573 unknown,
1533 warnings,
1574 warnings,
1534 bad,
1575 bad,
1535 traversed,
1576 traversed,
1536 dirty,
1577 dirty,
1537 ) = rustmod.status(
1578 ) = rustmod.status(
1538 self._map._map,
1579 self._map._map,
1539 matcher,
1580 matcher,
1540 self._rootdir,
1581 self._rootdir,
1541 self._ignorefiles(),
1582 self._ignorefiles(),
1542 self._checkexec,
1583 self._checkexec,
1543 bool(list_clean),
1584 bool(list_clean),
1544 bool(list_ignored),
1585 bool(list_ignored),
1545 bool(list_unknown),
1586 bool(list_unknown),
1546 bool(matcher.traversedir),
1587 bool(matcher.traversedir),
1547 )
1588 )
1548
1589
1549 self._dirty |= dirty
1590 self._dirty |= dirty
1550
1591
1551 if matcher.traversedir:
1592 if matcher.traversedir:
1552 for dir in traversed:
1593 for dir in traversed:
1553 matcher.traversedir(dir)
1594 matcher.traversedir(dir)
1554
1595
1555 if self._ui.warn:
1596 if self._ui.warn:
1556 for item in warnings:
1597 for item in warnings:
1557 if isinstance(item, tuple):
1598 if isinstance(item, tuple):
1558 file_path, syntax = item
1599 file_path, syntax = item
1559 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1600 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1560 file_path,
1601 file_path,
1561 syntax,
1602 syntax,
1562 )
1603 )
1563 self._ui.warn(msg)
1604 self._ui.warn(msg)
1564 else:
1605 else:
1565 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1606 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1566 self._ui.warn(
1607 self._ui.warn(
1567 msg
1608 msg
1568 % (
1609 % (
1569 pathutil.canonpath(
1610 pathutil.canonpath(
1570 self._rootdir, self._rootdir, item
1611 self._rootdir, self._rootdir, item
1571 ),
1612 ),
1572 b"No such file or directory",
1613 b"No such file or directory",
1573 )
1614 )
1574 )
1615 )
1575
1616
1576 for fn, message in bad:
1617 for fn, message in bad:
1577 matcher.bad(fn, encoding.strtolocal(message))
1618 matcher.bad(fn, encoding.strtolocal(message))
1578
1619
1579 status = scmutil.status(
1620 status = scmutil.status(
1580 modified=modified,
1621 modified=modified,
1581 added=added,
1622 added=added,
1582 removed=removed,
1623 removed=removed,
1583 deleted=deleted,
1624 deleted=deleted,
1584 unknown=unknown,
1625 unknown=unknown,
1585 ignored=ignored,
1626 ignored=ignored,
1586 clean=clean,
1627 clean=clean,
1587 )
1628 )
1588 return (lookup, status)
1629 return (lookup, status)
1589
1630
1590 def status(self, match, subrepos, ignored, clean, unknown):
1631 def status(self, match, subrepos, ignored, clean, unknown):
1591 """Determine the status of the working copy relative to the
1632 """Determine the status of the working copy relative to the
1592 dirstate and return a pair of (unsure, status), where status is of type
1633 dirstate and return a pair of (unsure, status), where status is of type
1593 scmutil.status and:
1634 scmutil.status and:
1594
1635
1595 unsure:
1636 unsure:
1596 files that might have been modified since the dirstate was
1637 files that might have been modified since the dirstate was
1597 written, but need to be read to be sure (size is the same
1638 written, but need to be read to be sure (size is the same
1598 but mtime differs)
1639 but mtime differs)
1599 status.modified:
1640 status.modified:
1600 files that have definitely been modified since the dirstate
1641 files that have definitely been modified since the dirstate
1601 was written (different size or mode)
1642 was written (different size or mode)
1602 status.clean:
1643 status.clean:
1603 files that have definitely not been modified since the
1644 files that have definitely not been modified since the
1604 dirstate was written
1645 dirstate was written
1605 """
1646 """
1606 if not self._running_status:
1647 if not self._running_status:
1607 msg = "Calling `status` outside a `running_status` context"
1648 msg = "Calling `status` outside a `running_status` context"
1608 raise error.ProgrammingError(msg)
1649 raise error.ProgrammingError(msg)
1609 listignored, listclean, listunknown = ignored, clean, unknown
1650 listignored, listclean, listunknown = ignored, clean, unknown
1610 lookup, modified, added, unknown, ignored = [], [], [], [], []
1651 lookup, modified, added, unknown, ignored = [], [], [], [], []
1611 removed, deleted, clean = [], [], []
1652 removed, deleted, clean = [], [], []
1612
1653
1613 dmap = self._map
1654 dmap = self._map
1614 dmap.preload()
1655 dmap.preload()
1615
1656
1616 use_rust = True
1657 use_rust = True
1617
1658
1618 allowed_matchers = (
1659 allowed_matchers = (
1619 matchmod.alwaysmatcher,
1660 matchmod.alwaysmatcher,
1620 matchmod.differencematcher,
1661 matchmod.differencematcher,
1621 matchmod.exactmatcher,
1662 matchmod.exactmatcher,
1622 matchmod.includematcher,
1663 matchmod.includematcher,
1623 matchmod.intersectionmatcher,
1664 matchmod.intersectionmatcher,
1624 matchmod.nevermatcher,
1665 matchmod.nevermatcher,
1625 matchmod.unionmatcher,
1666 matchmod.unionmatcher,
1626 )
1667 )
1627
1668
1628 if rustmod is None:
1669 if rustmod is None:
1629 use_rust = False
1670 use_rust = False
1630 elif self._checkcase:
1671 elif self._checkcase:
1631 # Case-insensitive filesystems are not handled yet
1672 # Case-insensitive filesystems are not handled yet
1632 use_rust = False
1673 use_rust = False
1633 elif subrepos:
1674 elif subrepos:
1634 use_rust = False
1675 use_rust = False
1635 elif not isinstance(match, allowed_matchers):
1676 elif not isinstance(match, allowed_matchers):
1636 # Some matchers have yet to be implemented
1677 # Some matchers have yet to be implemented
1637 use_rust = False
1678 use_rust = False
1638
1679
1639 # Get the time from the filesystem so we can disambiguate files that
1680 # Get the time from the filesystem so we can disambiguate files that
1640 # appear modified in the present or future.
1681 # appear modified in the present or future.
1641 try:
1682 try:
1642 mtime_boundary = timestamp.get_fs_now(self._opener)
1683 mtime_boundary = timestamp.get_fs_now(self._opener)
1643 except OSError:
1684 except OSError:
1644 # In largefiles or readonly context
1685 # In largefiles or readonly context
1645 mtime_boundary = None
1686 mtime_boundary = None
1646
1687
1647 if use_rust:
1688 if use_rust:
1648 try:
1689 try:
1649 res = self._rust_status(
1690 res = self._rust_status(
1650 match, listclean, listignored, listunknown
1691 match, listclean, listignored, listunknown
1651 )
1692 )
1652 return res + (mtime_boundary,)
1693 return res + (mtime_boundary,)
1653 except rustmod.FallbackError:
1694 except rustmod.FallbackError:
1654 pass
1695 pass
1655
1696
1656 def noop(f):
1697 def noop(f):
1657 pass
1698 pass
1658
1699
1659 dcontains = dmap.__contains__
1700 dcontains = dmap.__contains__
1660 dget = dmap.__getitem__
1701 dget = dmap.__getitem__
1661 ladd = lookup.append # aka "unsure"
1702 ladd = lookup.append # aka "unsure"
1662 madd = modified.append
1703 madd = modified.append
1663 aadd = added.append
1704 aadd = added.append
1664 uadd = unknown.append if listunknown else noop
1705 uadd = unknown.append if listunknown else noop
1665 iadd = ignored.append if listignored else noop
1706 iadd = ignored.append if listignored else noop
1666 radd = removed.append
1707 radd = removed.append
1667 dadd = deleted.append
1708 dadd = deleted.append
1668 cadd = clean.append if listclean else noop
1709 cadd = clean.append if listclean else noop
1669 mexact = match.exact
1710 mexact = match.exact
1670 dirignore = self._dirignore
1711 dirignore = self._dirignore
1671 checkexec = self._checkexec
1712 checkexec = self._checkexec
1672 checklink = self._checklink
1713 checklink = self._checklink
1673 copymap = self._map.copymap
1714 copymap = self._map.copymap
1674
1715
1675 # We need to do full walks when either
1716 # We need to do full walks when either
1676 # - we're listing all clean files, or
1717 # - we're listing all clean files, or
1677 # - match.traversedir does something, because match.traversedir should
1718 # - match.traversedir does something, because match.traversedir should
1678 # be called for every dir in the working dir
1719 # be called for every dir in the working dir
1679 full = listclean or match.traversedir is not None
1720 full = listclean or match.traversedir is not None
1680 for fn, st in self.walk(
1721 for fn, st in self.walk(
1681 match, subrepos, listunknown, listignored, full=full
1722 match, subrepos, listunknown, listignored, full=full
1682 ).items():
1723 ).items():
1683 if not dcontains(fn):
1724 if not dcontains(fn):
1684 if (listignored or mexact(fn)) and dirignore(fn):
1725 if (listignored or mexact(fn)) and dirignore(fn):
1685 if listignored:
1726 if listignored:
1686 iadd(fn)
1727 iadd(fn)
1687 else:
1728 else:
1688 uadd(fn)
1729 uadd(fn)
1689 continue
1730 continue
1690
1731
1691 t = dget(fn)
1732 t = dget(fn)
1692 mode = t.mode
1733 mode = t.mode
1693 size = t.size
1734 size = t.size
1694
1735
1695 if not st and t.tracked:
1736 if not st and t.tracked:
1696 dadd(fn)
1737 dadd(fn)
1697 elif t.p2_info:
1738 elif t.p2_info:
1698 madd(fn)
1739 madd(fn)
1699 elif t.added:
1740 elif t.added:
1700 aadd(fn)
1741 aadd(fn)
1701 elif t.removed:
1742 elif t.removed:
1702 radd(fn)
1743 radd(fn)
1703 elif t.tracked:
1744 elif t.tracked:
1704 if not checklink and t.has_fallback_symlink:
1745 if not checklink and t.has_fallback_symlink:
1705 # If the file system does not support symlink, the mode
1746 # If the file system does not support symlink, the mode
1706 # might not be correctly stored in the dirstate, so do not
1747 # might not be correctly stored in the dirstate, so do not
1707 # trust it.
1748 # trust it.
1708 ladd(fn)
1749 ladd(fn)
1709 elif not checkexec and t.has_fallback_exec:
1750 elif not checkexec and t.has_fallback_exec:
1710 # If the file system does not support exec bits, the mode
1751 # If the file system does not support exec bits, the mode
1711 # might not be correctly stored in the dirstate, so do not
1752 # might not be correctly stored in the dirstate, so do not
1712 # trust it.
1753 # trust it.
1713 ladd(fn)
1754 ladd(fn)
1714 elif (
1755 elif (
1715 size >= 0
1756 size >= 0
1716 and (
1757 and (
1717 (size != st.st_size and size != st.st_size & _rangemask)
1758 (size != st.st_size and size != st.st_size & _rangemask)
1718 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1759 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1719 )
1760 )
1720 or fn in copymap
1761 or fn in copymap
1721 ):
1762 ):
1722 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1763 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1723 # issue6456: Size returned may be longer due to
1764 # issue6456: Size returned may be longer due to
1724 # encryption on EXT-4 fscrypt, undecided.
1765 # encryption on EXT-4 fscrypt, undecided.
1725 ladd(fn)
1766 ladd(fn)
1726 else:
1767 else:
1727 madd(fn)
1768 madd(fn)
1728 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1769 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1729 # There might be a change in the future if for example the
1770 # There might be a change in the future if for example the
1730 # internal clock is off, but this is a case where the issues
1771 # internal clock is off, but this is a case where the issues
1731 # the user would face would be a lot worse and there is
1772 # the user would face would be a lot worse and there is
1732 # nothing we can really do.
1773 # nothing we can really do.
1733 ladd(fn)
1774 ladd(fn)
1734 elif listclean:
1775 elif listclean:
1735 cadd(fn)
1776 cadd(fn)
1736 status = scmutil.status(
1777 status = scmutil.status(
1737 modified, added, removed, deleted, unknown, ignored, clean
1778 modified, added, removed, deleted, unknown, ignored, clean
1738 )
1779 )
1739 return (lookup, status, mtime_boundary)
1780 return (lookup, status, mtime_boundary)
1740
1781
1741 def matches(self, match):
1782 def matches(self, match):
1742 """
1783 """
1743 return files in the dirstate (in whatever state) filtered by match
1784 return files in the dirstate (in whatever state) filtered by match
1744 """
1785 """
1745 dmap = self._map
1786 dmap = self._map
1746 if rustmod is not None:
1787 if rustmod is not None:
1747 dmap = self._map._map
1788 dmap = self._map._map
1748
1789
1749 if match.always():
1790 if match.always():
1750 return dmap.keys()
1791 return dmap.keys()
1751 files = match.files()
1792 files = match.files()
1752 if match.isexact():
1793 if match.isexact():
1753 # fast path -- filter the other way around, since typically files is
1794 # fast path -- filter the other way around, since typically files is
1754 # much smaller than dmap
1795 # much smaller than dmap
1755 return [f for f in files if f in dmap]
1796 return [f for f in files if f in dmap]
1756 if match.prefix() and all(fn in dmap for fn in files):
1797 if match.prefix() and all(fn in dmap for fn in files):
1757 # fast path -- all the values are known to be files, so just return
1798 # fast path -- all the values are known to be files, so just return
1758 # that
1799 # that
1759 return list(files)
1800 return list(files)
1760 return [f for f in dmap if match(f)]
1801 return [f for f in dmap if match(f)]
1761
1802
1762 def all_file_names(self):
1803 def all_file_names(self):
1763 """list all filename currently used by this dirstate
1804 """list all filename currently used by this dirstate
1764
1805
1765 This is only used to do `hg rollback` related backup in the transaction
1806 This is only used to do `hg rollback` related backup in the transaction
1766 """
1807 """
1767 files = [b'branch']
1808 files = [b'branch']
1768 if self._opener.exists(self._filename):
1809 if self._opener.exists(self._filename):
1769 files.append(self._filename)
1810 files.append(self._filename)
1770 if self._use_dirstate_v2:
1811 if self._use_dirstate_v2:
1771 files.append(self._map.docket.data_filename())
1812 files.append(self._map.docket.data_filename())
1772 return tuple(files)
1813 return tuple(files)
1773
1814
1774 def verify(self, m1, m2, p1, narrow_matcher=None):
1815 def verify(self, m1, m2, p1, narrow_matcher=None):
1775 """
1816 """
1776 check the dirstate contents against the parent manifest and yield errors
1817 check the dirstate contents against the parent manifest and yield errors
1777 """
1818 """
1778 missing_from_p1 = _(
1819 missing_from_p1 = _(
1779 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1820 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1780 )
1821 )
1781 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1822 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1782 missing_from_ps = _(
1823 missing_from_ps = _(
1783 b"%s marked as modified, but not in either manifest\n"
1824 b"%s marked as modified, but not in either manifest\n"
1784 )
1825 )
1785 missing_from_ds = _(
1826 missing_from_ds = _(
1786 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1827 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1787 )
1828 )
1788 for f, entry in self.items():
1829 for f, entry in self.items():
1789 if entry.p1_tracked:
1830 if entry.p1_tracked:
1790 if entry.modified and f not in m1 and f not in m2:
1831 if entry.modified and f not in m1 and f not in m2:
1791 yield missing_from_ps % f
1832 yield missing_from_ps % f
1792 elif f not in m1:
1833 elif f not in m1:
1793 yield missing_from_p1 % (f, node.short(p1))
1834 yield missing_from_p1 % (f, node.short(p1))
1794 if entry.added and f in m1:
1835 if entry.added and f in m1:
1795 yield unexpected_in_p1 % f
1836 yield unexpected_in_p1 % f
1796 for f in m1:
1837 for f in m1:
1797 if narrow_matcher is not None and not narrow_matcher(f):
1838 if narrow_matcher is not None and not narrow_matcher(f):
1798 continue
1839 continue
1799 entry = self.get_entry(f)
1840 entry = self.get_entry(f)
1800 if not entry.p1_tracked:
1841 if not entry.p1_tracked:
1801 yield missing_from_ds % (f, node.short(p1))
1842 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now