##// END OF EJS Templates
dirstate: deprecate calling `setbranch` without a transaction parameter...
marmoute -
r51161:307c155e default
parent child Browse files
Show More
@@ -1,1799 +1,1806 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .dirstateutils import (
34 from .dirstateutils import (
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 # use to detect lack of a parameter
47 SENTINEL = object()
48
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
49 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
50
48 propertycache = util.propertycache
51 propertycache = util.propertycache
49 filecache = scmutil.filecache
52 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
53 _rangemask = dirstatemap.rangemask
51
54
52 DirstateItem = dirstatemap.DirstateItem
55 DirstateItem = dirstatemap.DirstateItem
53
56
54
57
55 class repocache(filecache):
58 class repocache(filecache):
56 """filecache for files in .hg/"""
59 """filecache for files in .hg/"""
57
60
58 def join(self, obj, fname):
61 def join(self, obj, fname):
59 return obj._opener.join(fname)
62 return obj._opener.join(fname)
60
63
61
64
62 class rootcache(filecache):
65 class rootcache(filecache):
63 """filecache for files in the repository root"""
66 """filecache for files in the repository root"""
64
67
65 def join(self, obj, fname):
68 def join(self, obj, fname):
66 return obj._join(fname)
69 return obj._join(fname)
67
70
68
71
69 def check_invalidated(func):
72 def check_invalidated(func):
70 """check that the func is called with a non-invalidated dirstate
73 """check that the func is called with a non-invalidated dirstate
71
74
72 The dirstate is in an "invalidated state" after an error occured during its
75 The dirstate is in an "invalidated state" after an error occured during its
73 modification and remains so until we exited the top level scope that framed
76 modification and remains so until we exited the top level scope that framed
74 such change.
77 such change.
75 """
78 """
76
79
77 def wrap(self, *args, **kwargs):
80 def wrap(self, *args, **kwargs):
78 if self._invalidated_context:
81 if self._invalidated_context:
79 msg = 'calling `%s` after the dirstate was invalidated'
82 msg = 'calling `%s` after the dirstate was invalidated'
80 msg %= func.__name__
83 msg %= func.__name__
81 raise error.ProgrammingError(msg)
84 raise error.ProgrammingError(msg)
82 return func(self, *args, **kwargs)
85 return func(self, *args, **kwargs)
83
86
84 return wrap
87 return wrap
85
88
86
89
87 def requires_changing_parents(func):
90 def requires_changing_parents(func):
88 def wrap(self, *args, **kwargs):
91 def wrap(self, *args, **kwargs):
89 if not self.is_changing_parents:
92 if not self.is_changing_parents:
90 msg = 'calling `%s` outside of a changing_parents context'
93 msg = 'calling `%s` outside of a changing_parents context'
91 msg %= func.__name__
94 msg %= func.__name__
92 raise error.ProgrammingError(msg)
95 raise error.ProgrammingError(msg)
93 return func(self, *args, **kwargs)
96 return func(self, *args, **kwargs)
94
97
95 return check_invalidated(wrap)
98 return check_invalidated(wrap)
96
99
97
100
98 def requires_changing_files(func):
101 def requires_changing_files(func):
99 def wrap(self, *args, **kwargs):
102 def wrap(self, *args, **kwargs):
100 if not self.is_changing_files:
103 if not self.is_changing_files:
101 msg = 'calling `%s` outside of a `changing_files`'
104 msg = 'calling `%s` outside of a `changing_files`'
102 msg %= func.__name__
105 msg %= func.__name__
103 raise error.ProgrammingError(msg)
106 raise error.ProgrammingError(msg)
104 return func(self, *args, **kwargs)
107 return func(self, *args, **kwargs)
105
108
106 return check_invalidated(wrap)
109 return check_invalidated(wrap)
107
110
108
111
109 def requires_changing_any(func):
112 def requires_changing_any(func):
110 def wrap(self, *args, **kwargs):
113 def wrap(self, *args, **kwargs):
111 if not self.is_changing_any:
114 if not self.is_changing_any:
112 msg = 'calling `%s` outside of a changing context'
115 msg = 'calling `%s` outside of a changing context'
113 msg %= func.__name__
116 msg %= func.__name__
114 raise error.ProgrammingError(msg)
117 raise error.ProgrammingError(msg)
115 return func(self, *args, **kwargs)
118 return func(self, *args, **kwargs)
116
119
117 return check_invalidated(wrap)
120 return check_invalidated(wrap)
118
121
119
122
120 def requires_changing_files_or_status(func):
123 def requires_changing_files_or_status(func):
121 def wrap(self, *args, **kwargs):
124 def wrap(self, *args, **kwargs):
122 if not (self.is_changing_files or self._running_status > 0):
125 if not (self.is_changing_files or self._running_status > 0):
123 msg = (
126 msg = (
124 'calling `%s` outside of a changing_files '
127 'calling `%s` outside of a changing_files '
125 'or running_status context'
128 'or running_status context'
126 )
129 )
127 msg %= func.__name__
130 msg %= func.__name__
128 raise error.ProgrammingError(msg)
131 raise error.ProgrammingError(msg)
129 return func(self, *args, **kwargs)
132 return func(self, *args, **kwargs)
130
133
131 return check_invalidated(wrap)
134 return check_invalidated(wrap)
132
135
133
136
134 CHANGE_TYPE_PARENTS = "parents"
137 CHANGE_TYPE_PARENTS = "parents"
135 CHANGE_TYPE_FILES = "files"
138 CHANGE_TYPE_FILES = "files"
136
139
137
140
138 @interfaceutil.implementer(intdirstate.idirstate)
141 @interfaceutil.implementer(intdirstate.idirstate)
139 class dirstate:
142 class dirstate:
140
143
141 # used by largefile to avoid overwritting transaction callback
144 # used by largefile to avoid overwritting transaction callback
142 _tr_key_suffix = b''
145 _tr_key_suffix = b''
143
146
144 def __init__(
147 def __init__(
145 self,
148 self,
146 opener,
149 opener,
147 ui,
150 ui,
148 root,
151 root,
149 validate,
152 validate,
150 sparsematchfn,
153 sparsematchfn,
151 nodeconstants,
154 nodeconstants,
152 use_dirstate_v2,
155 use_dirstate_v2,
153 use_tracked_hint=False,
156 use_tracked_hint=False,
154 ):
157 ):
155 """Create a new dirstate object.
158 """Create a new dirstate object.
156
159
157 opener is an open()-like callable that can be used to open the
160 opener is an open()-like callable that can be used to open the
158 dirstate file; root is the root of the directory tracked by
161 dirstate file; root is the root of the directory tracked by
159 the dirstate.
162 the dirstate.
160 """
163 """
161 self._use_dirstate_v2 = use_dirstate_v2
164 self._use_dirstate_v2 = use_dirstate_v2
162 self._use_tracked_hint = use_tracked_hint
165 self._use_tracked_hint = use_tracked_hint
163 self._nodeconstants = nodeconstants
166 self._nodeconstants = nodeconstants
164 self._opener = opener
167 self._opener = opener
165 self._validate = validate
168 self._validate = validate
166 self._root = root
169 self._root = root
167 # Either build a sparse-matcher or None if sparse is disabled
170 # Either build a sparse-matcher or None if sparse is disabled
168 self._sparsematchfn = sparsematchfn
171 self._sparsematchfn = sparsematchfn
169 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
172 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
170 # UNC path pointing to root share (issue4557)
173 # UNC path pointing to root share (issue4557)
171 self._rootdir = pathutil.normasprefix(root)
174 self._rootdir = pathutil.normasprefix(root)
172 # True is any internal state may be different
175 # True is any internal state may be different
173 self._dirty = False
176 self._dirty = False
174 # True if the set of tracked file may be different
177 # True if the set of tracked file may be different
175 self._dirty_tracked_set = False
178 self._dirty_tracked_set = False
176 self._ui = ui
179 self._ui = ui
177 self._filecache = {}
180 self._filecache = {}
178 # nesting level of `changing_parents` context
181 # nesting level of `changing_parents` context
179 self._changing_level = 0
182 self._changing_level = 0
180 # the change currently underway
183 # the change currently underway
181 self._change_type = None
184 self._change_type = None
182 # number of open _running_status context
185 # number of open _running_status context
183 self._running_status = 0
186 self._running_status = 0
184 # True if the current dirstate changing operations have been
187 # True if the current dirstate changing operations have been
185 # invalidated (used to make sure all nested contexts have been exited)
188 # invalidated (used to make sure all nested contexts have been exited)
186 self._invalidated_context = False
189 self._invalidated_context = False
187 self._attached_to_a_transaction = False
190 self._attached_to_a_transaction = False
188 self._filename = b'dirstate'
191 self._filename = b'dirstate'
189 self._filename_th = b'dirstate-tracked-hint'
192 self._filename_th = b'dirstate-tracked-hint'
190 self._pendingfilename = b'%s.pending' % self._filename
193 self._pendingfilename = b'%s.pending' % self._filename
191 self._plchangecallbacks = {}
194 self._plchangecallbacks = {}
192 self._origpl = None
195 self._origpl = None
193 self._mapcls = dirstatemap.dirstatemap
196 self._mapcls = dirstatemap.dirstatemap
194 # Access and cache cwd early, so we don't access it for the first time
197 # Access and cache cwd early, so we don't access it for the first time
195 # after a working-copy update caused it to not exist (accessing it then
198 # after a working-copy update caused it to not exist (accessing it then
196 # raises an exception).
199 # raises an exception).
197 self._cwd
200 self._cwd
198
201
199 def refresh(self):
202 def refresh(self):
200 if '_branch' in vars(self):
203 if '_branch' in vars(self):
201 del self._branch
204 del self._branch
202 if '_map' in vars(self) and self._map.may_need_refresh():
205 if '_map' in vars(self) and self._map.may_need_refresh():
203 self.invalidate()
206 self.invalidate()
204
207
205 def prefetch_parents(self):
208 def prefetch_parents(self):
206 """make sure the parents are loaded
209 """make sure the parents are loaded
207
210
208 Used to avoid a race condition.
211 Used to avoid a race condition.
209 """
212 """
210 self._pl
213 self._pl
211
214
212 @contextlib.contextmanager
215 @contextlib.contextmanager
213 @check_invalidated
216 @check_invalidated
214 def running_status(self, repo):
217 def running_status(self, repo):
215 """Wrap a status operation
218 """Wrap a status operation
216
219
217 This context is not mutally exclusive with the `changing_*` context. It
220 This context is not mutally exclusive with the `changing_*` context. It
218 also do not warrant for the `wlock` to be taken.
221 also do not warrant for the `wlock` to be taken.
219
222
220 If the wlock is taken, this context will behave in a simple way, and
223 If the wlock is taken, this context will behave in a simple way, and
221 ensure the data are scheduled for write when leaving the top level
224 ensure the data are scheduled for write when leaving the top level
222 context.
225 context.
223
226
224 If the lock is not taken, it will only warrant that the data are either
227 If the lock is not taken, it will only warrant that the data are either
225 committed (written) and rolled back (invalidated) when exiting the top
228 committed (written) and rolled back (invalidated) when exiting the top
226 level context. The write/invalidate action must be performed by the
229 level context. The write/invalidate action must be performed by the
227 wrapped code.
230 wrapped code.
228
231
229
232
230 The expected logic is:
233 The expected logic is:
231
234
232 A: read the dirstate
235 A: read the dirstate
233 B: run status
236 B: run status
234 This might make the dirstate dirty by updating cache,
237 This might make the dirstate dirty by updating cache,
235 especially in Rust.
238 especially in Rust.
236 C: do more "post status fixup if relevant
239 C: do more "post status fixup if relevant
237 D: try to take the w-lock (this will invalidate the changes if they were raced)
240 D: try to take the w-lock (this will invalidate the changes if they were raced)
238 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
241 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
239 E1: elif lock was acquired β†’ write the changes
242 E1: elif lock was acquired β†’ write the changes
240 E2: else β†’ discard the changes
243 E2: else β†’ discard the changes
241 """
244 """
242 has_lock = repo.currentwlock() is not None
245 has_lock = repo.currentwlock() is not None
243 is_changing = self.is_changing_any
246 is_changing = self.is_changing_any
244 tr = repo.currenttransaction()
247 tr = repo.currenttransaction()
245 has_tr = tr is not None
248 has_tr = tr is not None
246 nested = bool(self._running_status)
249 nested = bool(self._running_status)
247
250
248 first_and_alone = not (is_changing or has_tr or nested)
251 first_and_alone = not (is_changing or has_tr or nested)
249
252
250 # enforce no change happened outside of a proper context.
253 # enforce no change happened outside of a proper context.
251 if first_and_alone and self._dirty:
254 if first_and_alone and self._dirty:
252 has_tr = repo.currenttransaction() is not None
255 has_tr = repo.currenttransaction() is not None
253 if not has_tr and self._changing_level == 0 and self._dirty:
256 if not has_tr and self._changing_level == 0 and self._dirty:
254 msg = "entering a status context, but dirstate is already dirty"
257 msg = "entering a status context, but dirstate is already dirty"
255 raise error.ProgrammingError(msg)
258 raise error.ProgrammingError(msg)
256
259
257 should_write = has_lock and not (nested or is_changing)
260 should_write = has_lock and not (nested or is_changing)
258
261
259 self._running_status += 1
262 self._running_status += 1
260 try:
263 try:
261 yield
264 yield
262 except Exception:
265 except Exception:
263 self.invalidate()
266 self.invalidate()
264 raise
267 raise
265 finally:
268 finally:
266 self._running_status -= 1
269 self._running_status -= 1
267 if self._invalidated_context:
270 if self._invalidated_context:
268 should_write = False
271 should_write = False
269 self.invalidate()
272 self.invalidate()
270
273
271 if should_write:
274 if should_write:
272 assert repo.currenttransaction() is tr
275 assert repo.currenttransaction() is tr
273 self.write(tr)
276 self.write(tr)
274 elif not has_lock:
277 elif not has_lock:
275 if self._dirty:
278 if self._dirty:
276 msg = b'dirstate dirty while exiting an isolated status context'
279 msg = b'dirstate dirty while exiting an isolated status context'
277 repo.ui.develwarn(msg)
280 repo.ui.develwarn(msg)
278 self.invalidate()
281 self.invalidate()
279
282
280 @contextlib.contextmanager
283 @contextlib.contextmanager
281 @check_invalidated
284 @check_invalidated
282 def _changing(self, repo, change_type):
285 def _changing(self, repo, change_type):
283 if repo.currentwlock() is None:
286 if repo.currentwlock() is None:
284 msg = b"trying to change the dirstate without holding the wlock"
287 msg = b"trying to change the dirstate without holding the wlock"
285 raise error.ProgrammingError(msg)
288 raise error.ProgrammingError(msg)
286
289
287 has_tr = repo.currenttransaction() is not None
290 has_tr = repo.currenttransaction() is not None
288 if not has_tr and self._changing_level == 0 and self._dirty:
291 if not has_tr and self._changing_level == 0 and self._dirty:
289 msg = b"entering a changing context, but dirstate is already dirty"
292 msg = b"entering a changing context, but dirstate is already dirty"
290 repo.ui.develwarn(msg)
293 repo.ui.develwarn(msg)
291
294
292 assert self._changing_level >= 0
295 assert self._changing_level >= 0
293 # different type of change are mutually exclusive
296 # different type of change are mutually exclusive
294 if self._change_type is None:
297 if self._change_type is None:
295 assert self._changing_level == 0
298 assert self._changing_level == 0
296 self._change_type = change_type
299 self._change_type = change_type
297 elif self._change_type != change_type:
300 elif self._change_type != change_type:
298 msg = (
301 msg = (
299 'trying to open "%s" dirstate-changing context while a "%s" is'
302 'trying to open "%s" dirstate-changing context while a "%s" is'
300 ' already open'
303 ' already open'
301 )
304 )
302 msg %= (change_type, self._change_type)
305 msg %= (change_type, self._change_type)
303 raise error.ProgrammingError(msg)
306 raise error.ProgrammingError(msg)
304 should_write = False
307 should_write = False
305 self._changing_level += 1
308 self._changing_level += 1
306 try:
309 try:
307 yield
310 yield
308 except: # re-raises
311 except: # re-raises
309 self.invalidate() # this will set `_invalidated_context`
312 self.invalidate() # this will set `_invalidated_context`
310 raise
313 raise
311 finally:
314 finally:
312 assert self._changing_level > 0
315 assert self._changing_level > 0
313 self._changing_level -= 1
316 self._changing_level -= 1
314 # If the dirstate is being invalidated, call invalidate again.
317 # If the dirstate is being invalidated, call invalidate again.
315 # This will throw away anything added by a upper context and
318 # This will throw away anything added by a upper context and
316 # reset the `_invalidated_context` flag when relevant
319 # reset the `_invalidated_context` flag when relevant
317 if self._changing_level <= 0:
320 if self._changing_level <= 0:
318 self._change_type = None
321 self._change_type = None
319 assert self._changing_level == 0
322 assert self._changing_level == 0
320 if self._invalidated_context:
323 if self._invalidated_context:
321 # make sure we invalidate anything an upper context might
324 # make sure we invalidate anything an upper context might
322 # have changed.
325 # have changed.
323 self.invalidate()
326 self.invalidate()
324 else:
327 else:
325 should_write = self._changing_level <= 0
328 should_write = self._changing_level <= 0
326 tr = repo.currenttransaction()
329 tr = repo.currenttransaction()
327 if has_tr != (tr is not None):
330 if has_tr != (tr is not None):
328 if has_tr:
331 if has_tr:
329 m = "transaction vanished while changing dirstate"
332 m = "transaction vanished while changing dirstate"
330 else:
333 else:
331 m = "transaction appeared while changing dirstate"
334 m = "transaction appeared while changing dirstate"
332 raise error.ProgrammingError(m)
335 raise error.ProgrammingError(m)
333 if should_write:
336 if should_write:
334 self.write(tr)
337 self.write(tr)
335
338
336 @contextlib.contextmanager
339 @contextlib.contextmanager
337 def changing_parents(self, repo):
340 def changing_parents(self, repo):
338 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
341 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
339 yield c
342 yield c
340
343
341 @contextlib.contextmanager
344 @contextlib.contextmanager
342 def changing_files(self, repo):
345 def changing_files(self, repo):
343 with self._changing(repo, CHANGE_TYPE_FILES) as c:
346 with self._changing(repo, CHANGE_TYPE_FILES) as c:
344 yield c
347 yield c
345
348
346 # here to help migration to the new code
349 # here to help migration to the new code
347 def parentchange(self):
350 def parentchange(self):
348 msg = (
351 msg = (
349 "Mercurial 6.4 and later requires call to "
352 "Mercurial 6.4 and later requires call to "
350 "`dirstate.changing_parents(repo)`"
353 "`dirstate.changing_parents(repo)`"
351 )
354 )
352 raise error.ProgrammingError(msg)
355 raise error.ProgrammingError(msg)
353
356
354 @property
357 @property
355 def is_changing_any(self):
358 def is_changing_any(self):
356 """Returns true if the dirstate is in the middle of a set of changes.
359 """Returns true if the dirstate is in the middle of a set of changes.
357
360
358 This returns True for any kind of change.
361 This returns True for any kind of change.
359 """
362 """
360 return self._changing_level > 0
363 return self._changing_level > 0
361
364
362 def pendingparentchange(self):
365 def pendingparentchange(self):
363 return self.is_changing_parent()
366 return self.is_changing_parent()
364
367
365 def is_changing_parent(self):
368 def is_changing_parent(self):
366 """Returns true if the dirstate is in the middle of a set of changes
369 """Returns true if the dirstate is in the middle of a set of changes
367 that modify the dirstate parent.
370 that modify the dirstate parent.
368 """
371 """
369 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
372 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
370 return self.is_changing_parents
373 return self.is_changing_parents
371
374
372 @property
375 @property
373 def is_changing_parents(self):
376 def is_changing_parents(self):
374 """Returns true if the dirstate is in the middle of a set of changes
377 """Returns true if the dirstate is in the middle of a set of changes
375 that modify the dirstate parent.
378 that modify the dirstate parent.
376 """
379 """
377 if self._changing_level <= 0:
380 if self._changing_level <= 0:
378 return False
381 return False
379 return self._change_type == CHANGE_TYPE_PARENTS
382 return self._change_type == CHANGE_TYPE_PARENTS
380
383
381 @property
384 @property
382 def is_changing_files(self):
385 def is_changing_files(self):
383 """Returns true if the dirstate is in the middle of a set of changes
386 """Returns true if the dirstate is in the middle of a set of changes
384 that modify the files tracked or their sources.
387 that modify the files tracked or their sources.
385 """
388 """
386 if self._changing_level <= 0:
389 if self._changing_level <= 0:
387 return False
390 return False
388 return self._change_type == CHANGE_TYPE_FILES
391 return self._change_type == CHANGE_TYPE_FILES
389
392
390 @propertycache
393 @propertycache
391 def _map(self):
394 def _map(self):
392 """Return the dirstate contents (see documentation for dirstatemap)."""
395 """Return the dirstate contents (see documentation for dirstatemap)."""
393 return self._mapcls(
396 return self._mapcls(
394 self._ui,
397 self._ui,
395 self._opener,
398 self._opener,
396 self._root,
399 self._root,
397 self._nodeconstants,
400 self._nodeconstants,
398 self._use_dirstate_v2,
401 self._use_dirstate_v2,
399 )
402 )
400
403
401 @property
404 @property
402 def _sparsematcher(self):
405 def _sparsematcher(self):
403 """The matcher for the sparse checkout.
406 """The matcher for the sparse checkout.
404
407
405 The working directory may not include every file from a manifest. The
408 The working directory may not include every file from a manifest. The
406 matcher obtained by this property will match a path if it is to be
409 matcher obtained by this property will match a path if it is to be
407 included in the working directory.
410 included in the working directory.
408
411
409 When sparse if disabled, return None.
412 When sparse if disabled, return None.
410 """
413 """
411 if self._sparsematchfn is None:
414 if self._sparsematchfn is None:
412 return None
415 return None
413 # TODO there is potential to cache this property. For now, the matcher
416 # TODO there is potential to cache this property. For now, the matcher
414 # is resolved on every access. (But the called function does use a
417 # is resolved on every access. (But the called function does use a
415 # cache to keep the lookup fast.)
418 # cache to keep the lookup fast.)
416 return self._sparsematchfn()
419 return self._sparsematchfn()
417
420
418 @repocache(b'branch')
421 @repocache(b'branch')
419 def _branch(self):
422 def _branch(self):
420 f = None
423 f = None
421 data = b''
424 data = b''
422 try:
425 try:
423 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
426 f, mode = txnutil.trypending(self._root, self._opener, b'branch')
424 data = f.read().strip()
427 data = f.read().strip()
425 except FileNotFoundError:
428 except FileNotFoundError:
426 pass
429 pass
427 finally:
430 finally:
428 if f is not None:
431 if f is not None:
429 f.close()
432 f.close()
430 if not data:
433 if not data:
431 return b"default"
434 return b"default"
432 return data
435 return data
433
436
434 @property
437 @property
435 def _pl(self):
438 def _pl(self):
436 return self._map.parents()
439 return self._map.parents()
437
440
438 def hasdir(self, d):
441 def hasdir(self, d):
439 return self._map.hastrackeddir(d)
442 return self._map.hastrackeddir(d)
440
443
441 @rootcache(b'.hgignore')
444 @rootcache(b'.hgignore')
442 def _ignore(self):
445 def _ignore(self):
443 files = self._ignorefiles()
446 files = self._ignorefiles()
444 if not files:
447 if not files:
445 return matchmod.never()
448 return matchmod.never()
446
449
447 pats = [b'include:%s' % f for f in files]
450 pats = [b'include:%s' % f for f in files]
448 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
451 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
449
452
450 @propertycache
453 @propertycache
451 def _slash(self):
454 def _slash(self):
452 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
455 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
453
456
454 @propertycache
457 @propertycache
455 def _checklink(self):
458 def _checklink(self):
456 return util.checklink(self._root)
459 return util.checklink(self._root)
457
460
458 @propertycache
461 @propertycache
459 def _checkexec(self):
462 def _checkexec(self):
460 return bool(util.checkexec(self._root))
463 return bool(util.checkexec(self._root))
461
464
462 @propertycache
465 @propertycache
463 def _checkcase(self):
466 def _checkcase(self):
464 return not util.fscasesensitive(self._join(b'.hg'))
467 return not util.fscasesensitive(self._join(b'.hg'))
465
468
466 def _join(self, f):
469 def _join(self, f):
467 # much faster than os.path.join()
470 # much faster than os.path.join()
468 # it's safe because f is always a relative path
471 # it's safe because f is always a relative path
469 return self._rootdir + f
472 return self._rootdir + f
470
473
471 def flagfunc(self, buildfallback):
474 def flagfunc(self, buildfallback):
472 """build a callable that returns flags associated with a filename
475 """build a callable that returns flags associated with a filename
473
476
474 The information is extracted from three possible layers:
477 The information is extracted from three possible layers:
475 1. the file system if it supports the information
478 1. the file system if it supports the information
476 2. the "fallback" information stored in the dirstate if any
479 2. the "fallback" information stored in the dirstate if any
477 3. a more expensive mechanism inferring the flags from the parents.
480 3. a more expensive mechanism inferring the flags from the parents.
478 """
481 """
479
482
480 # small hack to cache the result of buildfallback()
483 # small hack to cache the result of buildfallback()
481 fallback_func = []
484 fallback_func = []
482
485
483 def get_flags(x):
486 def get_flags(x):
484 entry = None
487 entry = None
485 fallback_value = None
488 fallback_value = None
486 try:
489 try:
487 st = os.lstat(self._join(x))
490 st = os.lstat(self._join(x))
488 except OSError:
491 except OSError:
489 return b''
492 return b''
490
493
491 if self._checklink:
494 if self._checklink:
492 if util.statislink(st):
495 if util.statislink(st):
493 return b'l'
496 return b'l'
494 else:
497 else:
495 entry = self.get_entry(x)
498 entry = self.get_entry(x)
496 if entry.has_fallback_symlink:
499 if entry.has_fallback_symlink:
497 if entry.fallback_symlink:
500 if entry.fallback_symlink:
498 return b'l'
501 return b'l'
499 else:
502 else:
500 if not fallback_func:
503 if not fallback_func:
501 fallback_func.append(buildfallback())
504 fallback_func.append(buildfallback())
502 fallback_value = fallback_func[0](x)
505 fallback_value = fallback_func[0](x)
503 if b'l' in fallback_value:
506 if b'l' in fallback_value:
504 return b'l'
507 return b'l'
505
508
506 if self._checkexec:
509 if self._checkexec:
507 if util.statisexec(st):
510 if util.statisexec(st):
508 return b'x'
511 return b'x'
509 else:
512 else:
510 if entry is None:
513 if entry is None:
511 entry = self.get_entry(x)
514 entry = self.get_entry(x)
512 if entry.has_fallback_exec:
515 if entry.has_fallback_exec:
513 if entry.fallback_exec:
516 if entry.fallback_exec:
514 return b'x'
517 return b'x'
515 else:
518 else:
516 if fallback_value is None:
519 if fallback_value is None:
517 if not fallback_func:
520 if not fallback_func:
518 fallback_func.append(buildfallback())
521 fallback_func.append(buildfallback())
519 fallback_value = fallback_func[0](x)
522 fallback_value = fallback_func[0](x)
520 if b'x' in fallback_value:
523 if b'x' in fallback_value:
521 return b'x'
524 return b'x'
522 return b''
525 return b''
523
526
524 return get_flags
527 return get_flags
525
528
526 @propertycache
529 @propertycache
527 def _cwd(self):
530 def _cwd(self):
528 # internal config: ui.forcecwd
531 # internal config: ui.forcecwd
529 forcecwd = self._ui.config(b'ui', b'forcecwd')
532 forcecwd = self._ui.config(b'ui', b'forcecwd')
530 if forcecwd:
533 if forcecwd:
531 return forcecwd
534 return forcecwd
532 return encoding.getcwd()
535 return encoding.getcwd()
533
536
534 def getcwd(self):
537 def getcwd(self):
535 """Return the path from which a canonical path is calculated.
538 """Return the path from which a canonical path is calculated.
536
539
537 This path should be used to resolve file patterns or to convert
540 This path should be used to resolve file patterns or to convert
538 canonical paths back to file paths for display. It shouldn't be
541 canonical paths back to file paths for display. It shouldn't be
539 used to get real file paths. Use vfs functions instead.
542 used to get real file paths. Use vfs functions instead.
540 """
543 """
541 cwd = self._cwd
544 cwd = self._cwd
542 if cwd == self._root:
545 if cwd == self._root:
543 return b''
546 return b''
544 # self._root ends with a path separator if self._root is '/' or 'C:\'
547 # self._root ends with a path separator if self._root is '/' or 'C:\'
545 rootsep = self._root
548 rootsep = self._root
546 if not util.endswithsep(rootsep):
549 if not util.endswithsep(rootsep):
547 rootsep += pycompat.ossep
550 rootsep += pycompat.ossep
548 if cwd.startswith(rootsep):
551 if cwd.startswith(rootsep):
549 return cwd[len(rootsep) :]
552 return cwd[len(rootsep) :]
550 else:
553 else:
551 # we're outside the repo. return an absolute path.
554 # we're outside the repo. return an absolute path.
552 return cwd
555 return cwd
553
556
554 def pathto(self, f, cwd=None):
557 def pathto(self, f, cwd=None):
555 if cwd is None:
558 if cwd is None:
556 cwd = self.getcwd()
559 cwd = self.getcwd()
557 path = util.pathto(self._root, cwd, f)
560 path = util.pathto(self._root, cwd, f)
558 if self._slash:
561 if self._slash:
559 return util.pconvert(path)
562 return util.pconvert(path)
560 return path
563 return path
561
564
562 def get_entry(self, path):
565 def get_entry(self, path):
563 """return a DirstateItem for the associated path"""
566 """return a DirstateItem for the associated path"""
564 entry = self._map.get(path)
567 entry = self._map.get(path)
565 if entry is None:
568 if entry is None:
566 return DirstateItem()
569 return DirstateItem()
567 return entry
570 return entry
568
571
569 def __contains__(self, key):
572 def __contains__(self, key):
570 return key in self._map
573 return key in self._map
571
574
572 def __iter__(self):
575 def __iter__(self):
573 return iter(sorted(self._map))
576 return iter(sorted(self._map))
574
577
575 def items(self):
578 def items(self):
576 return self._map.items()
579 return self._map.items()
577
580
578 iteritems = items
581 iteritems = items
579
582
580 def parents(self):
583 def parents(self):
581 return [self._validate(p) for p in self._pl]
584 return [self._validate(p) for p in self._pl]
582
585
583 def p1(self):
586 def p1(self):
584 return self._validate(self._pl[0])
587 return self._validate(self._pl[0])
585
588
586 def p2(self):
589 def p2(self):
587 return self._validate(self._pl[1])
590 return self._validate(self._pl[1])
588
591
589 @property
592 @property
590 def in_merge(self):
593 def in_merge(self):
591 """True if a merge is in progress"""
594 """True if a merge is in progress"""
592 return self._pl[1] != self._nodeconstants.nullid
595 return self._pl[1] != self._nodeconstants.nullid
593
596
594 def branch(self):
597 def branch(self):
595 return encoding.tolocal(self._branch)
598 return encoding.tolocal(self._branch)
596
599
597 @requires_changing_parents
600 @requires_changing_parents
598 def setparents(self, p1, p2=None):
601 def setparents(self, p1, p2=None):
599 """Set dirstate parents to p1 and p2.
602 """Set dirstate parents to p1 and p2.
600
603
601 When moving from two parents to one, "merged" entries a
604 When moving from two parents to one, "merged" entries a
602 adjusted to normal and previous copy records discarded and
605 adjusted to normal and previous copy records discarded and
603 returned by the call.
606 returned by the call.
604
607
605 See localrepo.setparents()
608 See localrepo.setparents()
606 """
609 """
607 if p2 is None:
610 if p2 is None:
608 p2 = self._nodeconstants.nullid
611 p2 = self._nodeconstants.nullid
609 if self._changing_level == 0:
612 if self._changing_level == 0:
610 raise ValueError(
613 raise ValueError(
611 b"cannot set dirstate parent outside of "
614 b"cannot set dirstate parent outside of "
612 b"dirstate.changing_parents context manager"
615 b"dirstate.changing_parents context manager"
613 )
616 )
614
617
615 self._dirty = True
618 self._dirty = True
616 oldp2 = self._pl[1]
619 oldp2 = self._pl[1]
617 if self._origpl is None:
620 if self._origpl is None:
618 self._origpl = self._pl
621 self._origpl = self._pl
619 nullid = self._nodeconstants.nullid
622 nullid = self._nodeconstants.nullid
620 # True if we need to fold p2 related state back to a linear case
623 # True if we need to fold p2 related state back to a linear case
621 fold_p2 = oldp2 != nullid and p2 == nullid
624 fold_p2 = oldp2 != nullid and p2 == nullid
622 return self._map.setparents(p1, p2, fold_p2=fold_p2)
625 return self._map.setparents(p1, p2, fold_p2=fold_p2)
623
626
624 def setbranch(self, branch, transaction=None):
627 def setbranch(self, branch, transaction=SENTINEL):
625 self.__class__._branch.set(self, encoding.fromlocal(branch))
628 self.__class__._branch.set(self, encoding.fromlocal(branch))
629 if transaction is SENTINEL:
630 msg = b"setbranch needs a `transaction` argument"
631 self._ui.deprecwarn(msg, b'6.5')
632 transaction = None
626 if transaction is not None:
633 if transaction is not None:
627 self._setup_tr_abort(transaction)
634 self._setup_tr_abort(transaction)
628 transaction.addfilegenerator(
635 transaction.addfilegenerator(
629 b'dirstate-3-branch%s' % self._tr_key_suffix,
636 b'dirstate-3-branch%s' % self._tr_key_suffix,
630 (b'branch',),
637 (b'branch',),
631 self._write_branch,
638 self._write_branch,
632 location=b'plain',
639 location=b'plain',
633 post_finalize=True,
640 post_finalize=True,
634 )
641 )
635 return
642 return
636
643
637 vfs = self._opener
644 vfs = self._opener
638 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
645 with vfs(b'branch', b'w', atomictemp=True, checkambig=True) as f:
639 self._write_branch(f)
646 self._write_branch(f)
640 # make sure filecache has the correct stat info for _branch after
647 # make sure filecache has the correct stat info for _branch after
641 # replacing the underlying file
648 # replacing the underlying file
642 #
649 #
643 # XXX do we actually need this,
650 # XXX do we actually need this,
644 # refreshing the attribute is quite cheap
651 # refreshing the attribute is quite cheap
645 ce = self._filecache[b'_branch']
652 ce = self._filecache[b'_branch']
646 if ce:
653 if ce:
647 ce.refresh()
654 ce.refresh()
648
655
649 def _write_branch(self, file_obj):
656 def _write_branch(self, file_obj):
650 file_obj.write(self._branch + b'\n')
657 file_obj.write(self._branch + b'\n')
651
658
652 def invalidate(self):
659 def invalidate(self):
653 """Causes the next access to reread the dirstate.
660 """Causes the next access to reread the dirstate.
654
661
655 This is different from localrepo.invalidatedirstate() because it always
662 This is different from localrepo.invalidatedirstate() because it always
656 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
663 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
657 check whether the dirstate has changed before rereading it."""
664 check whether the dirstate has changed before rereading it."""
658
665
659 for a in ("_map", "_branch", "_ignore"):
666 for a in ("_map", "_branch", "_ignore"):
660 if a in self.__dict__:
667 if a in self.__dict__:
661 delattr(self, a)
668 delattr(self, a)
662 self._dirty = False
669 self._dirty = False
663 self._dirty_tracked_set = False
670 self._dirty_tracked_set = False
664 self._invalidated_context = bool(
671 self._invalidated_context = bool(
665 self._changing_level > 0
672 self._changing_level > 0
666 or self._attached_to_a_transaction
673 or self._attached_to_a_transaction
667 or self._running_status
674 or self._running_status
668 )
675 )
669 self._origpl = None
676 self._origpl = None
670
677
671 @requires_changing_any
678 @requires_changing_any
672 def copy(self, source, dest):
679 def copy(self, source, dest):
673 """Mark dest as a copy of source. Unmark dest if source is None."""
680 """Mark dest as a copy of source. Unmark dest if source is None."""
674 if source == dest:
681 if source == dest:
675 return
682 return
676 self._dirty = True
683 self._dirty = True
677 if source is not None:
684 if source is not None:
678 self._check_sparse(source)
685 self._check_sparse(source)
679 self._map.copymap[dest] = source
686 self._map.copymap[dest] = source
680 else:
687 else:
681 self._map.copymap.pop(dest, None)
688 self._map.copymap.pop(dest, None)
682
689
683 def copied(self, file):
690 def copied(self, file):
684 return self._map.copymap.get(file, None)
691 return self._map.copymap.get(file, None)
685
692
686 def copies(self):
693 def copies(self):
687 return self._map.copymap
694 return self._map.copymap
688
695
689 @requires_changing_files
696 @requires_changing_files
690 def set_tracked(self, filename, reset_copy=False):
697 def set_tracked(self, filename, reset_copy=False):
691 """a "public" method for generic code to mark a file as tracked
698 """a "public" method for generic code to mark a file as tracked
692
699
693 This function is to be called outside of "update/merge" case. For
700 This function is to be called outside of "update/merge" case. For
694 example by a command like `hg add X`.
701 example by a command like `hg add X`.
695
702
696 if reset_copy is set, any existing copy information will be dropped.
703 if reset_copy is set, any existing copy information will be dropped.
697
704
698 return True the file was previously untracked, False otherwise.
705 return True the file was previously untracked, False otherwise.
699 """
706 """
700 self._dirty = True
707 self._dirty = True
701 entry = self._map.get(filename)
708 entry = self._map.get(filename)
702 if entry is None or not entry.tracked:
709 if entry is None or not entry.tracked:
703 self._check_new_tracked_filename(filename)
710 self._check_new_tracked_filename(filename)
704 pre_tracked = self._map.set_tracked(filename)
711 pre_tracked = self._map.set_tracked(filename)
705 if reset_copy:
712 if reset_copy:
706 self._map.copymap.pop(filename, None)
713 self._map.copymap.pop(filename, None)
707 if pre_tracked:
714 if pre_tracked:
708 self._dirty_tracked_set = True
715 self._dirty_tracked_set = True
709 return pre_tracked
716 return pre_tracked
710
717
711 @requires_changing_files
718 @requires_changing_files
712 def set_untracked(self, filename):
719 def set_untracked(self, filename):
713 """a "public" method for generic code to mark a file as untracked
720 """a "public" method for generic code to mark a file as untracked
714
721
715 This function is to be called outside of "update/merge" case. For
722 This function is to be called outside of "update/merge" case. For
716 example by a command like `hg remove X`.
723 example by a command like `hg remove X`.
717
724
718 return True the file was previously tracked, False otherwise.
725 return True the file was previously tracked, False otherwise.
719 """
726 """
720 ret = self._map.set_untracked(filename)
727 ret = self._map.set_untracked(filename)
721 if ret:
728 if ret:
722 self._dirty = True
729 self._dirty = True
723 self._dirty_tracked_set = True
730 self._dirty_tracked_set = True
724 return ret
731 return ret
725
732
726 @requires_changing_files_or_status
733 @requires_changing_files_or_status
727 def set_clean(self, filename, parentfiledata):
734 def set_clean(self, filename, parentfiledata):
728 """record that the current state of the file on disk is known to be clean"""
735 """record that the current state of the file on disk is known to be clean"""
729 self._dirty = True
736 self._dirty = True
730 if not self._map[filename].tracked:
737 if not self._map[filename].tracked:
731 self._check_new_tracked_filename(filename)
738 self._check_new_tracked_filename(filename)
732 (mode, size, mtime) = parentfiledata
739 (mode, size, mtime) = parentfiledata
733 self._map.set_clean(filename, mode, size, mtime)
740 self._map.set_clean(filename, mode, size, mtime)
734
741
735 @requires_changing_files_or_status
742 @requires_changing_files_or_status
736 def set_possibly_dirty(self, filename):
743 def set_possibly_dirty(self, filename):
737 """record that the current state of the file on disk is unknown"""
744 """record that the current state of the file on disk is unknown"""
738 self._dirty = True
745 self._dirty = True
739 self._map.set_possibly_dirty(filename)
746 self._map.set_possibly_dirty(filename)
740
747
741 @requires_changing_parents
748 @requires_changing_parents
742 def update_file_p1(
749 def update_file_p1(
743 self,
750 self,
744 filename,
751 filename,
745 p1_tracked,
752 p1_tracked,
746 ):
753 ):
747 """Set a file as tracked in the parent (or not)
754 """Set a file as tracked in the parent (or not)
748
755
749 This is to be called when adjust the dirstate to a new parent after an history
756 This is to be called when adjust the dirstate to a new parent after an history
750 rewriting operation.
757 rewriting operation.
751
758
752 It should not be called during a merge (p2 != nullid) and only within
759 It should not be called during a merge (p2 != nullid) and only within
753 a `with dirstate.changing_parents(repo):` context.
760 a `with dirstate.changing_parents(repo):` context.
754 """
761 """
755 if self.in_merge:
762 if self.in_merge:
756 msg = b'update_file_reference should not be called when merging'
763 msg = b'update_file_reference should not be called when merging'
757 raise error.ProgrammingError(msg)
764 raise error.ProgrammingError(msg)
758 entry = self._map.get(filename)
765 entry = self._map.get(filename)
759 if entry is None:
766 if entry is None:
760 wc_tracked = False
767 wc_tracked = False
761 else:
768 else:
762 wc_tracked = entry.tracked
769 wc_tracked = entry.tracked
763 if not (p1_tracked or wc_tracked):
770 if not (p1_tracked or wc_tracked):
764 # the file is no longer relevant to anyone
771 # the file is no longer relevant to anyone
765 if self._map.get(filename) is not None:
772 if self._map.get(filename) is not None:
766 self._map.reset_state(filename)
773 self._map.reset_state(filename)
767 self._dirty = True
774 self._dirty = True
768 elif (not p1_tracked) and wc_tracked:
775 elif (not p1_tracked) and wc_tracked:
769 if entry is not None and entry.added:
776 if entry is not None and entry.added:
770 return # avoid dropping copy information (maybe?)
777 return # avoid dropping copy information (maybe?)
771
778
772 self._map.reset_state(
779 self._map.reset_state(
773 filename,
780 filename,
774 wc_tracked,
781 wc_tracked,
775 p1_tracked,
782 p1_tracked,
776 # the underlying reference might have changed, we will have to
783 # the underlying reference might have changed, we will have to
777 # check it.
784 # check it.
778 has_meaningful_mtime=False,
785 has_meaningful_mtime=False,
779 )
786 )
780
787
781 @requires_changing_parents
788 @requires_changing_parents
782 def update_file(
789 def update_file(
783 self,
790 self,
784 filename,
791 filename,
785 wc_tracked,
792 wc_tracked,
786 p1_tracked,
793 p1_tracked,
787 p2_info=False,
794 p2_info=False,
788 possibly_dirty=False,
795 possibly_dirty=False,
789 parentfiledata=None,
796 parentfiledata=None,
790 ):
797 ):
791 """update the information about a file in the dirstate
798 """update the information about a file in the dirstate
792
799
793 This is to be called when the direstates parent changes to keep track
800 This is to be called when the direstates parent changes to keep track
794 of what is the file situation in regards to the working copy and its parent.
801 of what is the file situation in regards to the working copy and its parent.
795
802
796 This function must be called within a `dirstate.changing_parents` context.
803 This function must be called within a `dirstate.changing_parents` context.
797
804
798 note: the API is at an early stage and we might need to adjust it
805 note: the API is at an early stage and we might need to adjust it
799 depending of what information ends up being relevant and useful to
806 depending of what information ends up being relevant and useful to
800 other processing.
807 other processing.
801 """
808 """
802 self._update_file(
809 self._update_file(
803 filename=filename,
810 filename=filename,
804 wc_tracked=wc_tracked,
811 wc_tracked=wc_tracked,
805 p1_tracked=p1_tracked,
812 p1_tracked=p1_tracked,
806 p2_info=p2_info,
813 p2_info=p2_info,
807 possibly_dirty=possibly_dirty,
814 possibly_dirty=possibly_dirty,
808 parentfiledata=parentfiledata,
815 parentfiledata=parentfiledata,
809 )
816 )
810
817
811 def hacky_extension_update_file(self, *args, **kwargs):
818 def hacky_extension_update_file(self, *args, **kwargs):
812 """NEVER USE THIS, YOU DO NOT NEED IT
819 """NEVER USE THIS, YOU DO NOT NEED IT
813
820
814 This function is a variant of "update_file" to be called by a small set
821 This function is a variant of "update_file" to be called by a small set
815 of extensions, it also adjust the internal state of file, but can be
822 of extensions, it also adjust the internal state of file, but can be
816 called outside an `changing_parents` context.
823 called outside an `changing_parents` context.
817
824
818 A very small number of extension meddle with the working copy content
825 A very small number of extension meddle with the working copy content
819 in a way that requires to adjust the dirstate accordingly. At the time
826 in a way that requires to adjust the dirstate accordingly. At the time
820 this command is written they are :
827 this command is written they are :
821 - keyword,
828 - keyword,
822 - largefile,
829 - largefile,
823 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
830 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
824
831
825 This function could probably be replaced by more semantic one (like
832 This function could probably be replaced by more semantic one (like
826 "adjust expected size" or "always revalidate file content", etc)
833 "adjust expected size" or "always revalidate file content", etc)
827 however at the time where this is writen, this is too much of a detour
834 however at the time where this is writen, this is too much of a detour
828 to be considered.
835 to be considered.
829 """
836 """
830 if not (self._changing_level > 0 or self._running_status > 0):
837 if not (self._changing_level > 0 or self._running_status > 0):
831 msg = "requires a changes context"
838 msg = "requires a changes context"
832 raise error.ProgrammingError(msg)
839 raise error.ProgrammingError(msg)
833 self._update_file(
840 self._update_file(
834 *args,
841 *args,
835 **kwargs,
842 **kwargs,
836 )
843 )
837
844
838 def _update_file(
845 def _update_file(
839 self,
846 self,
840 filename,
847 filename,
841 wc_tracked,
848 wc_tracked,
842 p1_tracked,
849 p1_tracked,
843 p2_info=False,
850 p2_info=False,
844 possibly_dirty=False,
851 possibly_dirty=False,
845 parentfiledata=None,
852 parentfiledata=None,
846 ):
853 ):
847
854
848 # note: I do not think we need to double check name clash here since we
855 # note: I do not think we need to double check name clash here since we
849 # are in a update/merge case that should already have taken care of
856 # are in a update/merge case that should already have taken care of
850 # this. The test agrees
857 # this. The test agrees
851
858
852 self._dirty = True
859 self._dirty = True
853 old_entry = self._map.get(filename)
860 old_entry = self._map.get(filename)
854 if old_entry is None:
861 if old_entry is None:
855 prev_tracked = False
862 prev_tracked = False
856 else:
863 else:
857 prev_tracked = old_entry.tracked
864 prev_tracked = old_entry.tracked
858 if prev_tracked != wc_tracked:
865 if prev_tracked != wc_tracked:
859 self._dirty_tracked_set = True
866 self._dirty_tracked_set = True
860
867
861 self._map.reset_state(
868 self._map.reset_state(
862 filename,
869 filename,
863 wc_tracked,
870 wc_tracked,
864 p1_tracked,
871 p1_tracked,
865 p2_info=p2_info,
872 p2_info=p2_info,
866 has_meaningful_mtime=not possibly_dirty,
873 has_meaningful_mtime=not possibly_dirty,
867 parentfiledata=parentfiledata,
874 parentfiledata=parentfiledata,
868 )
875 )
869
876
870 def _check_new_tracked_filename(self, filename):
877 def _check_new_tracked_filename(self, filename):
871 scmutil.checkfilename(filename)
878 scmutil.checkfilename(filename)
872 if self._map.hastrackeddir(filename):
879 if self._map.hastrackeddir(filename):
873 msg = _(b'directory %r already in dirstate')
880 msg = _(b'directory %r already in dirstate')
874 msg %= pycompat.bytestr(filename)
881 msg %= pycompat.bytestr(filename)
875 raise error.Abort(msg)
882 raise error.Abort(msg)
876 # shadows
883 # shadows
877 for d in pathutil.finddirs(filename):
884 for d in pathutil.finddirs(filename):
878 if self._map.hastrackeddir(d):
885 if self._map.hastrackeddir(d):
879 break
886 break
880 entry = self._map.get(d)
887 entry = self._map.get(d)
881 if entry is not None and not entry.removed:
888 if entry is not None and not entry.removed:
882 msg = _(b'file %r in dirstate clashes with %r')
889 msg = _(b'file %r in dirstate clashes with %r')
883 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
890 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
884 raise error.Abort(msg)
891 raise error.Abort(msg)
885 self._check_sparse(filename)
892 self._check_sparse(filename)
886
893
887 def _check_sparse(self, filename):
894 def _check_sparse(self, filename):
888 """Check that a filename is inside the sparse profile"""
895 """Check that a filename is inside the sparse profile"""
889 sparsematch = self._sparsematcher
896 sparsematch = self._sparsematcher
890 if sparsematch is not None and not sparsematch.always():
897 if sparsematch is not None and not sparsematch.always():
891 if not sparsematch(filename):
898 if not sparsematch(filename):
892 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
899 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
893 hint = _(
900 hint = _(
894 b'include file with `hg debugsparse --include <pattern>` or use '
901 b'include file with `hg debugsparse --include <pattern>` or use '
895 b'`hg add -s <file>` to include file directory while adding'
902 b'`hg add -s <file>` to include file directory while adding'
896 )
903 )
897 raise error.Abort(msg % filename, hint=hint)
904 raise error.Abort(msg % filename, hint=hint)
898
905
899 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
906 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
900 if exists is None:
907 if exists is None:
901 exists = os.path.lexists(os.path.join(self._root, path))
908 exists = os.path.lexists(os.path.join(self._root, path))
902 if not exists:
909 if not exists:
903 # Maybe a path component exists
910 # Maybe a path component exists
904 if not ignoremissing and b'/' in path:
911 if not ignoremissing and b'/' in path:
905 d, f = path.rsplit(b'/', 1)
912 d, f = path.rsplit(b'/', 1)
906 d = self._normalize(d, False, ignoremissing, None)
913 d = self._normalize(d, False, ignoremissing, None)
907 folded = d + b"/" + f
914 folded = d + b"/" + f
908 else:
915 else:
909 # No path components, preserve original case
916 # No path components, preserve original case
910 folded = path
917 folded = path
911 else:
918 else:
912 # recursively normalize leading directory components
919 # recursively normalize leading directory components
913 # against dirstate
920 # against dirstate
914 if b'/' in normed:
921 if b'/' in normed:
915 d, f = normed.rsplit(b'/', 1)
922 d, f = normed.rsplit(b'/', 1)
916 d = self._normalize(d, False, ignoremissing, True)
923 d = self._normalize(d, False, ignoremissing, True)
917 r = self._root + b"/" + d
924 r = self._root + b"/" + d
918 folded = d + b"/" + util.fspath(f, r)
925 folded = d + b"/" + util.fspath(f, r)
919 else:
926 else:
920 folded = util.fspath(normed, self._root)
927 folded = util.fspath(normed, self._root)
921 storemap[normed] = folded
928 storemap[normed] = folded
922
929
923 return folded
930 return folded
924
931
925 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
932 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
926 normed = util.normcase(path)
933 normed = util.normcase(path)
927 folded = self._map.filefoldmap.get(normed, None)
934 folded = self._map.filefoldmap.get(normed, None)
928 if folded is None:
935 if folded is None:
929 if isknown:
936 if isknown:
930 folded = path
937 folded = path
931 else:
938 else:
932 folded = self._discoverpath(
939 folded = self._discoverpath(
933 path, normed, ignoremissing, exists, self._map.filefoldmap
940 path, normed, ignoremissing, exists, self._map.filefoldmap
934 )
941 )
935 return folded
942 return folded
936
943
937 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
944 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
938 normed = util.normcase(path)
945 normed = util.normcase(path)
939 folded = self._map.filefoldmap.get(normed, None)
946 folded = self._map.filefoldmap.get(normed, None)
940 if folded is None:
947 if folded is None:
941 folded = self._map.dirfoldmap.get(normed, None)
948 folded = self._map.dirfoldmap.get(normed, None)
942 if folded is None:
949 if folded is None:
943 if isknown:
950 if isknown:
944 folded = path
951 folded = path
945 else:
952 else:
946 # store discovered result in dirfoldmap so that future
953 # store discovered result in dirfoldmap so that future
947 # normalizefile calls don't start matching directories
954 # normalizefile calls don't start matching directories
948 folded = self._discoverpath(
955 folded = self._discoverpath(
949 path, normed, ignoremissing, exists, self._map.dirfoldmap
956 path, normed, ignoremissing, exists, self._map.dirfoldmap
950 )
957 )
951 return folded
958 return folded
952
959
953 def normalize(self, path, isknown=False, ignoremissing=False):
960 def normalize(self, path, isknown=False, ignoremissing=False):
954 """
961 """
955 normalize the case of a pathname when on a casefolding filesystem
962 normalize the case of a pathname when on a casefolding filesystem
956
963
957 isknown specifies whether the filename came from walking the
964 isknown specifies whether the filename came from walking the
958 disk, to avoid extra filesystem access.
965 disk, to avoid extra filesystem access.
959
966
960 If ignoremissing is True, missing path are returned
967 If ignoremissing is True, missing path are returned
961 unchanged. Otherwise, we try harder to normalize possibly
968 unchanged. Otherwise, we try harder to normalize possibly
962 existing path components.
969 existing path components.
963
970
964 The normalized case is determined based on the following precedence:
971 The normalized case is determined based on the following precedence:
965
972
966 - version of name already stored in the dirstate
973 - version of name already stored in the dirstate
967 - version of name stored on disk
974 - version of name stored on disk
968 - version provided via command arguments
975 - version provided via command arguments
969 """
976 """
970
977
971 if self._checkcase:
978 if self._checkcase:
972 return self._normalize(path, isknown, ignoremissing)
979 return self._normalize(path, isknown, ignoremissing)
973 return path
980 return path
974
981
975 # XXX this method is barely used, as a result:
982 # XXX this method is barely used, as a result:
976 # - its semantic is unclear
983 # - its semantic is unclear
977 # - do we really needs it ?
984 # - do we really needs it ?
978 @requires_changing_parents
985 @requires_changing_parents
979 def clear(self):
986 def clear(self):
980 self._map.clear()
987 self._map.clear()
981 self._dirty = True
988 self._dirty = True
982
989
983 @requires_changing_parents
990 @requires_changing_parents
984 def rebuild(self, parent, allfiles, changedfiles=None):
991 def rebuild(self, parent, allfiles, changedfiles=None):
985 matcher = self._sparsematcher
992 matcher = self._sparsematcher
986 if matcher is not None and not matcher.always():
993 if matcher is not None and not matcher.always():
987 # should not add non-matching files
994 # should not add non-matching files
988 allfiles = [f for f in allfiles if matcher(f)]
995 allfiles = [f for f in allfiles if matcher(f)]
989 if changedfiles:
996 if changedfiles:
990 changedfiles = [f for f in changedfiles if matcher(f)]
997 changedfiles = [f for f in changedfiles if matcher(f)]
991
998
992 if changedfiles is not None:
999 if changedfiles is not None:
993 # these files will be deleted from the dirstate when they are
1000 # these files will be deleted from the dirstate when they are
994 # not found to be in allfiles
1001 # not found to be in allfiles
995 dirstatefilestoremove = {f for f in self if not matcher(f)}
1002 dirstatefilestoremove = {f for f in self if not matcher(f)}
996 changedfiles = dirstatefilestoremove.union(changedfiles)
1003 changedfiles = dirstatefilestoremove.union(changedfiles)
997
1004
998 if changedfiles is None:
1005 if changedfiles is None:
999 # Rebuild entire dirstate
1006 # Rebuild entire dirstate
1000 to_lookup = allfiles
1007 to_lookup = allfiles
1001 to_drop = []
1008 to_drop = []
1002 self.clear()
1009 self.clear()
1003 elif len(changedfiles) < 10:
1010 elif len(changedfiles) < 10:
1004 # Avoid turning allfiles into a set, which can be expensive if it's
1011 # Avoid turning allfiles into a set, which can be expensive if it's
1005 # large.
1012 # large.
1006 to_lookup = []
1013 to_lookup = []
1007 to_drop = []
1014 to_drop = []
1008 for f in changedfiles:
1015 for f in changedfiles:
1009 if f in allfiles:
1016 if f in allfiles:
1010 to_lookup.append(f)
1017 to_lookup.append(f)
1011 else:
1018 else:
1012 to_drop.append(f)
1019 to_drop.append(f)
1013 else:
1020 else:
1014 changedfilesset = set(changedfiles)
1021 changedfilesset = set(changedfiles)
1015 to_lookup = changedfilesset & set(allfiles)
1022 to_lookup = changedfilesset & set(allfiles)
1016 to_drop = changedfilesset - to_lookup
1023 to_drop = changedfilesset - to_lookup
1017
1024
1018 if self._origpl is None:
1025 if self._origpl is None:
1019 self._origpl = self._pl
1026 self._origpl = self._pl
1020 self._map.setparents(parent, self._nodeconstants.nullid)
1027 self._map.setparents(parent, self._nodeconstants.nullid)
1021
1028
1022 for f in to_lookup:
1029 for f in to_lookup:
1023 if self.in_merge:
1030 if self.in_merge:
1024 self.set_tracked(f)
1031 self.set_tracked(f)
1025 else:
1032 else:
1026 self._map.reset_state(
1033 self._map.reset_state(
1027 f,
1034 f,
1028 wc_tracked=True,
1035 wc_tracked=True,
1029 p1_tracked=True,
1036 p1_tracked=True,
1030 )
1037 )
1031 for f in to_drop:
1038 for f in to_drop:
1032 self._map.reset_state(f)
1039 self._map.reset_state(f)
1033
1040
1034 self._dirty = True
1041 self._dirty = True
1035
1042
1036 def _setup_tr_abort(self, tr):
1043 def _setup_tr_abort(self, tr):
1037 """make sure we invalidate the current change on abort"""
1044 """make sure we invalidate the current change on abort"""
1038 if tr is None:
1045 if tr is None:
1039 return
1046 return
1040
1047
1041 def on_abort(tr):
1048 def on_abort(tr):
1042 self._attached_to_a_transaction = False
1049 self._attached_to_a_transaction = False
1043 self.invalidate()
1050 self.invalidate()
1044
1051
1045 tr.addabort(
1052 tr.addabort(
1046 b'dirstate-invalidate%s' % self._tr_key_suffix,
1053 b'dirstate-invalidate%s' % self._tr_key_suffix,
1047 on_abort,
1054 on_abort,
1048 )
1055 )
1049
1056
1050 def write(self, tr):
1057 def write(self, tr):
1051 if not self._dirty:
1058 if not self._dirty:
1052 return
1059 return
1053 # make sure we don't request a write of invalidated content
1060 # make sure we don't request a write of invalidated content
1054 # XXX move before the dirty check once `unlock` stop calling `write`
1061 # XXX move before the dirty check once `unlock` stop calling `write`
1055 assert not self._invalidated_context
1062 assert not self._invalidated_context
1056
1063
1057 write_key = self._use_tracked_hint and self._dirty_tracked_set
1064 write_key = self._use_tracked_hint and self._dirty_tracked_set
1058 if tr:
1065 if tr:
1059
1066
1060 self._setup_tr_abort(tr)
1067 self._setup_tr_abort(tr)
1061 self._attached_to_a_transaction = True
1068 self._attached_to_a_transaction = True
1062
1069
1063 def on_success(f):
1070 def on_success(f):
1064 self._attached_to_a_transaction = False
1071 self._attached_to_a_transaction = False
1065 self._writedirstate(tr, f),
1072 self._writedirstate(tr, f),
1066
1073
1067 # delay writing in-memory changes out
1074 # delay writing in-memory changes out
1068 tr.addfilegenerator(
1075 tr.addfilegenerator(
1069 b'dirstate-1-main%s' % self._tr_key_suffix,
1076 b'dirstate-1-main%s' % self._tr_key_suffix,
1070 (self._filename,),
1077 (self._filename,),
1071 on_success,
1078 on_success,
1072 location=b'plain',
1079 location=b'plain',
1073 post_finalize=True,
1080 post_finalize=True,
1074 )
1081 )
1075 if write_key:
1082 if write_key:
1076 tr.addfilegenerator(
1083 tr.addfilegenerator(
1077 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1084 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1078 (self._filename_th,),
1085 (self._filename_th,),
1079 lambda f: self._write_tracked_hint(tr, f),
1086 lambda f: self._write_tracked_hint(tr, f),
1080 location=b'plain',
1087 location=b'plain',
1081 post_finalize=True,
1088 post_finalize=True,
1082 )
1089 )
1083 return
1090 return
1084
1091
1085 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1092 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1086 with file(self._filename) as f:
1093 with file(self._filename) as f:
1087 self._writedirstate(tr, f)
1094 self._writedirstate(tr, f)
1088 if write_key:
1095 if write_key:
1089 # we update the key-file after writing to make sure reader have a
1096 # we update the key-file after writing to make sure reader have a
1090 # key that match the newly written content
1097 # key that match the newly written content
1091 with file(self._filename_th) as f:
1098 with file(self._filename_th) as f:
1092 self._write_tracked_hint(tr, f)
1099 self._write_tracked_hint(tr, f)
1093
1100
1094 def delete_tracked_hint(self):
1101 def delete_tracked_hint(self):
1095 """remove the tracked_hint file
1102 """remove the tracked_hint file
1096
1103
1097 To be used by format downgrades operation"""
1104 To be used by format downgrades operation"""
1098 self._opener.unlink(self._filename_th)
1105 self._opener.unlink(self._filename_th)
1099 self._use_tracked_hint = False
1106 self._use_tracked_hint = False
1100
1107
1101 def addparentchangecallback(self, category, callback):
1108 def addparentchangecallback(self, category, callback):
1102 """add a callback to be called when the wd parents are changed
1109 """add a callback to be called when the wd parents are changed
1103
1110
1104 Callback will be called with the following arguments:
1111 Callback will be called with the following arguments:
1105 dirstate, (oldp1, oldp2), (newp1, newp2)
1112 dirstate, (oldp1, oldp2), (newp1, newp2)
1106
1113
1107 Category is a unique identifier to allow overwriting an old callback
1114 Category is a unique identifier to allow overwriting an old callback
1108 with a newer callback.
1115 with a newer callback.
1109 """
1116 """
1110 self._plchangecallbacks[category] = callback
1117 self._plchangecallbacks[category] = callback
1111
1118
1112 def _writedirstate(self, tr, st):
1119 def _writedirstate(self, tr, st):
1113 # make sure we don't write invalidated content
1120 # make sure we don't write invalidated content
1114 assert not self._invalidated_context
1121 assert not self._invalidated_context
1115 # notify callbacks about parents change
1122 # notify callbacks about parents change
1116 if self._origpl is not None and self._origpl != self._pl:
1123 if self._origpl is not None and self._origpl != self._pl:
1117 for c, callback in sorted(self._plchangecallbacks.items()):
1124 for c, callback in sorted(self._plchangecallbacks.items()):
1118 callback(self, self._origpl, self._pl)
1125 callback(self, self._origpl, self._pl)
1119 self._origpl = None
1126 self._origpl = None
1120 self._map.write(tr, st)
1127 self._map.write(tr, st)
1121 self._dirty = False
1128 self._dirty = False
1122 self._dirty_tracked_set = False
1129 self._dirty_tracked_set = False
1123
1130
1124 def _write_tracked_hint(self, tr, f):
1131 def _write_tracked_hint(self, tr, f):
1125 key = node.hex(uuid.uuid4().bytes)
1132 key = node.hex(uuid.uuid4().bytes)
1126 f.write(b"1\n%s\n" % key) # 1 is the format version
1133 f.write(b"1\n%s\n" % key) # 1 is the format version
1127
1134
1128 def _dirignore(self, f):
1135 def _dirignore(self, f):
1129 if self._ignore(f):
1136 if self._ignore(f):
1130 return True
1137 return True
1131 for p in pathutil.finddirs(f):
1138 for p in pathutil.finddirs(f):
1132 if self._ignore(p):
1139 if self._ignore(p):
1133 return True
1140 return True
1134 return False
1141 return False
1135
1142
1136 def _ignorefiles(self):
1143 def _ignorefiles(self):
1137 files = []
1144 files = []
1138 if os.path.exists(self._join(b'.hgignore')):
1145 if os.path.exists(self._join(b'.hgignore')):
1139 files.append(self._join(b'.hgignore'))
1146 files.append(self._join(b'.hgignore'))
1140 for name, path in self._ui.configitems(b"ui"):
1147 for name, path in self._ui.configitems(b"ui"):
1141 if name == b'ignore' or name.startswith(b'ignore.'):
1148 if name == b'ignore' or name.startswith(b'ignore.'):
1142 # we need to use os.path.join here rather than self._join
1149 # we need to use os.path.join here rather than self._join
1143 # because path is arbitrary and user-specified
1150 # because path is arbitrary and user-specified
1144 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1151 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1145 return files
1152 return files
1146
1153
1147 def _ignorefileandline(self, f):
1154 def _ignorefileandline(self, f):
1148 files = collections.deque(self._ignorefiles())
1155 files = collections.deque(self._ignorefiles())
1149 visited = set()
1156 visited = set()
1150 while files:
1157 while files:
1151 i = files.popleft()
1158 i = files.popleft()
1152 patterns = matchmod.readpatternfile(
1159 patterns = matchmod.readpatternfile(
1153 i, self._ui.warn, sourceinfo=True
1160 i, self._ui.warn, sourceinfo=True
1154 )
1161 )
1155 for pattern, lineno, line in patterns:
1162 for pattern, lineno, line in patterns:
1156 kind, p = matchmod._patsplit(pattern, b'glob')
1163 kind, p = matchmod._patsplit(pattern, b'glob')
1157 if kind == b"subinclude":
1164 if kind == b"subinclude":
1158 if p not in visited:
1165 if p not in visited:
1159 files.append(p)
1166 files.append(p)
1160 continue
1167 continue
1161 m = matchmod.match(
1168 m = matchmod.match(
1162 self._root, b'', [], [pattern], warn=self._ui.warn
1169 self._root, b'', [], [pattern], warn=self._ui.warn
1163 )
1170 )
1164 if m(f):
1171 if m(f):
1165 return (i, lineno, line)
1172 return (i, lineno, line)
1166 visited.add(i)
1173 visited.add(i)
1167 return (None, -1, b"")
1174 return (None, -1, b"")
1168
1175
1169 def _walkexplicit(self, match, subrepos):
1176 def _walkexplicit(self, match, subrepos):
1170 """Get stat data about the files explicitly specified by match.
1177 """Get stat data about the files explicitly specified by match.
1171
1178
1172 Return a triple (results, dirsfound, dirsnotfound).
1179 Return a triple (results, dirsfound, dirsnotfound).
1173 - results is a mapping from filename to stat result. It also contains
1180 - results is a mapping from filename to stat result. It also contains
1174 listings mapping subrepos and .hg to None.
1181 listings mapping subrepos and .hg to None.
1175 - dirsfound is a list of files found to be directories.
1182 - dirsfound is a list of files found to be directories.
1176 - dirsnotfound is a list of files that the dirstate thinks are
1183 - dirsnotfound is a list of files that the dirstate thinks are
1177 directories and that were not found."""
1184 directories and that were not found."""
1178
1185
1179 def badtype(mode):
1186 def badtype(mode):
1180 kind = _(b'unknown')
1187 kind = _(b'unknown')
1181 if stat.S_ISCHR(mode):
1188 if stat.S_ISCHR(mode):
1182 kind = _(b'character device')
1189 kind = _(b'character device')
1183 elif stat.S_ISBLK(mode):
1190 elif stat.S_ISBLK(mode):
1184 kind = _(b'block device')
1191 kind = _(b'block device')
1185 elif stat.S_ISFIFO(mode):
1192 elif stat.S_ISFIFO(mode):
1186 kind = _(b'fifo')
1193 kind = _(b'fifo')
1187 elif stat.S_ISSOCK(mode):
1194 elif stat.S_ISSOCK(mode):
1188 kind = _(b'socket')
1195 kind = _(b'socket')
1189 elif stat.S_ISDIR(mode):
1196 elif stat.S_ISDIR(mode):
1190 kind = _(b'directory')
1197 kind = _(b'directory')
1191 return _(b'unsupported file type (type is %s)') % kind
1198 return _(b'unsupported file type (type is %s)') % kind
1192
1199
1193 badfn = match.bad
1200 badfn = match.bad
1194 dmap = self._map
1201 dmap = self._map
1195 lstat = os.lstat
1202 lstat = os.lstat
1196 getkind = stat.S_IFMT
1203 getkind = stat.S_IFMT
1197 dirkind = stat.S_IFDIR
1204 dirkind = stat.S_IFDIR
1198 regkind = stat.S_IFREG
1205 regkind = stat.S_IFREG
1199 lnkkind = stat.S_IFLNK
1206 lnkkind = stat.S_IFLNK
1200 join = self._join
1207 join = self._join
1201 dirsfound = []
1208 dirsfound = []
1202 foundadd = dirsfound.append
1209 foundadd = dirsfound.append
1203 dirsnotfound = []
1210 dirsnotfound = []
1204 notfoundadd = dirsnotfound.append
1211 notfoundadd = dirsnotfound.append
1205
1212
1206 if not match.isexact() and self._checkcase:
1213 if not match.isexact() and self._checkcase:
1207 normalize = self._normalize
1214 normalize = self._normalize
1208 else:
1215 else:
1209 normalize = None
1216 normalize = None
1210
1217
1211 files = sorted(match.files())
1218 files = sorted(match.files())
1212 subrepos.sort()
1219 subrepos.sort()
1213 i, j = 0, 0
1220 i, j = 0, 0
1214 while i < len(files) and j < len(subrepos):
1221 while i < len(files) and j < len(subrepos):
1215 subpath = subrepos[j] + b"/"
1222 subpath = subrepos[j] + b"/"
1216 if files[i] < subpath:
1223 if files[i] < subpath:
1217 i += 1
1224 i += 1
1218 continue
1225 continue
1219 while i < len(files) and files[i].startswith(subpath):
1226 while i < len(files) and files[i].startswith(subpath):
1220 del files[i]
1227 del files[i]
1221 j += 1
1228 j += 1
1222
1229
1223 if not files or b'' in files:
1230 if not files or b'' in files:
1224 files = [b'']
1231 files = [b'']
1225 # constructing the foldmap is expensive, so don't do it for the
1232 # constructing the foldmap is expensive, so don't do it for the
1226 # common case where files is ['']
1233 # common case where files is ['']
1227 normalize = None
1234 normalize = None
1228 results = dict.fromkeys(subrepos)
1235 results = dict.fromkeys(subrepos)
1229 results[b'.hg'] = None
1236 results[b'.hg'] = None
1230
1237
1231 for ff in files:
1238 for ff in files:
1232 if normalize:
1239 if normalize:
1233 nf = normalize(ff, False, True)
1240 nf = normalize(ff, False, True)
1234 else:
1241 else:
1235 nf = ff
1242 nf = ff
1236 if nf in results:
1243 if nf in results:
1237 continue
1244 continue
1238
1245
1239 try:
1246 try:
1240 st = lstat(join(nf))
1247 st = lstat(join(nf))
1241 kind = getkind(st.st_mode)
1248 kind = getkind(st.st_mode)
1242 if kind == dirkind:
1249 if kind == dirkind:
1243 if nf in dmap:
1250 if nf in dmap:
1244 # file replaced by dir on disk but still in dirstate
1251 # file replaced by dir on disk but still in dirstate
1245 results[nf] = None
1252 results[nf] = None
1246 foundadd((nf, ff))
1253 foundadd((nf, ff))
1247 elif kind == regkind or kind == lnkkind:
1254 elif kind == regkind or kind == lnkkind:
1248 results[nf] = st
1255 results[nf] = st
1249 else:
1256 else:
1250 badfn(ff, badtype(kind))
1257 badfn(ff, badtype(kind))
1251 if nf in dmap:
1258 if nf in dmap:
1252 results[nf] = None
1259 results[nf] = None
1253 except (OSError) as inst:
1260 except (OSError) as inst:
1254 # nf not found on disk - it is dirstate only
1261 # nf not found on disk - it is dirstate only
1255 if nf in dmap: # does it exactly match a missing file?
1262 if nf in dmap: # does it exactly match a missing file?
1256 results[nf] = None
1263 results[nf] = None
1257 else: # does it match a missing directory?
1264 else: # does it match a missing directory?
1258 if self._map.hasdir(nf):
1265 if self._map.hasdir(nf):
1259 notfoundadd(nf)
1266 notfoundadd(nf)
1260 else:
1267 else:
1261 badfn(ff, encoding.strtolocal(inst.strerror))
1268 badfn(ff, encoding.strtolocal(inst.strerror))
1262
1269
1263 # match.files() may contain explicitly-specified paths that shouldn't
1270 # match.files() may contain explicitly-specified paths that shouldn't
1264 # be taken; drop them from the list of files found. dirsfound/notfound
1271 # be taken; drop them from the list of files found. dirsfound/notfound
1265 # aren't filtered here because they will be tested later.
1272 # aren't filtered here because they will be tested later.
1266 if match.anypats():
1273 if match.anypats():
1267 for f in list(results):
1274 for f in list(results):
1268 if f == b'.hg' or f in subrepos:
1275 if f == b'.hg' or f in subrepos:
1269 # keep sentinel to disable further out-of-repo walks
1276 # keep sentinel to disable further out-of-repo walks
1270 continue
1277 continue
1271 if not match(f):
1278 if not match(f):
1272 del results[f]
1279 del results[f]
1273
1280
1274 # Case insensitive filesystems cannot rely on lstat() failing to detect
1281 # Case insensitive filesystems cannot rely on lstat() failing to detect
1275 # a case-only rename. Prune the stat object for any file that does not
1282 # a case-only rename. Prune the stat object for any file that does not
1276 # match the case in the filesystem, if there are multiple files that
1283 # match the case in the filesystem, if there are multiple files that
1277 # normalize to the same path.
1284 # normalize to the same path.
1278 if match.isexact() and self._checkcase:
1285 if match.isexact() and self._checkcase:
1279 normed = {}
1286 normed = {}
1280
1287
1281 for f, st in results.items():
1288 for f, st in results.items():
1282 if st is None:
1289 if st is None:
1283 continue
1290 continue
1284
1291
1285 nc = util.normcase(f)
1292 nc = util.normcase(f)
1286 paths = normed.get(nc)
1293 paths = normed.get(nc)
1287
1294
1288 if paths is None:
1295 if paths is None:
1289 paths = set()
1296 paths = set()
1290 normed[nc] = paths
1297 normed[nc] = paths
1291
1298
1292 paths.add(f)
1299 paths.add(f)
1293
1300
1294 for norm, paths in normed.items():
1301 for norm, paths in normed.items():
1295 if len(paths) > 1:
1302 if len(paths) > 1:
1296 for path in paths:
1303 for path in paths:
1297 folded = self._discoverpath(
1304 folded = self._discoverpath(
1298 path, norm, True, None, self._map.dirfoldmap
1305 path, norm, True, None, self._map.dirfoldmap
1299 )
1306 )
1300 if path != folded:
1307 if path != folded:
1301 results[path] = None
1308 results[path] = None
1302
1309
1303 return results, dirsfound, dirsnotfound
1310 return results, dirsfound, dirsnotfound
1304
1311
1305 def walk(self, match, subrepos, unknown, ignored, full=True):
1312 def walk(self, match, subrepos, unknown, ignored, full=True):
1306 """
1313 """
1307 Walk recursively through the directory tree, finding all files
1314 Walk recursively through the directory tree, finding all files
1308 matched by match.
1315 matched by match.
1309
1316
1310 If full is False, maybe skip some known-clean files.
1317 If full is False, maybe skip some known-clean files.
1311
1318
1312 Return a dict mapping filename to stat-like object (either
1319 Return a dict mapping filename to stat-like object (either
1313 mercurial.osutil.stat instance or return value of os.stat()).
1320 mercurial.osutil.stat instance or return value of os.stat()).
1314
1321
1315 """
1322 """
1316 # full is a flag that extensions that hook into walk can use -- this
1323 # full is a flag that extensions that hook into walk can use -- this
1317 # implementation doesn't use it at all. This satisfies the contract
1324 # implementation doesn't use it at all. This satisfies the contract
1318 # because we only guarantee a "maybe".
1325 # because we only guarantee a "maybe".
1319
1326
1320 if ignored:
1327 if ignored:
1321 ignore = util.never
1328 ignore = util.never
1322 dirignore = util.never
1329 dirignore = util.never
1323 elif unknown:
1330 elif unknown:
1324 ignore = self._ignore
1331 ignore = self._ignore
1325 dirignore = self._dirignore
1332 dirignore = self._dirignore
1326 else:
1333 else:
1327 # if not unknown and not ignored, drop dir recursion and step 2
1334 # if not unknown and not ignored, drop dir recursion and step 2
1328 ignore = util.always
1335 ignore = util.always
1329 dirignore = util.always
1336 dirignore = util.always
1330
1337
1331 if self._sparsematchfn is not None:
1338 if self._sparsematchfn is not None:
1332 em = matchmod.exact(match.files())
1339 em = matchmod.exact(match.files())
1333 sm = matchmod.unionmatcher([self._sparsematcher, em])
1340 sm = matchmod.unionmatcher([self._sparsematcher, em])
1334 match = matchmod.intersectmatchers(match, sm)
1341 match = matchmod.intersectmatchers(match, sm)
1335
1342
1336 matchfn = match.matchfn
1343 matchfn = match.matchfn
1337 matchalways = match.always()
1344 matchalways = match.always()
1338 matchtdir = match.traversedir
1345 matchtdir = match.traversedir
1339 dmap = self._map
1346 dmap = self._map
1340 listdir = util.listdir
1347 listdir = util.listdir
1341 lstat = os.lstat
1348 lstat = os.lstat
1342 dirkind = stat.S_IFDIR
1349 dirkind = stat.S_IFDIR
1343 regkind = stat.S_IFREG
1350 regkind = stat.S_IFREG
1344 lnkkind = stat.S_IFLNK
1351 lnkkind = stat.S_IFLNK
1345 join = self._join
1352 join = self._join
1346
1353
1347 exact = skipstep3 = False
1354 exact = skipstep3 = False
1348 if match.isexact(): # match.exact
1355 if match.isexact(): # match.exact
1349 exact = True
1356 exact = True
1350 dirignore = util.always # skip step 2
1357 dirignore = util.always # skip step 2
1351 elif match.prefix(): # match.match, no patterns
1358 elif match.prefix(): # match.match, no patterns
1352 skipstep3 = True
1359 skipstep3 = True
1353
1360
1354 if not exact and self._checkcase:
1361 if not exact and self._checkcase:
1355 normalize = self._normalize
1362 normalize = self._normalize
1356 normalizefile = self._normalizefile
1363 normalizefile = self._normalizefile
1357 skipstep3 = False
1364 skipstep3 = False
1358 else:
1365 else:
1359 normalize = self._normalize
1366 normalize = self._normalize
1360 normalizefile = None
1367 normalizefile = None
1361
1368
1362 # step 1: find all explicit files
1369 # step 1: find all explicit files
1363 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1370 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1364 if matchtdir:
1371 if matchtdir:
1365 for d in work:
1372 for d in work:
1366 matchtdir(d[0])
1373 matchtdir(d[0])
1367 for d in dirsnotfound:
1374 for d in dirsnotfound:
1368 matchtdir(d)
1375 matchtdir(d)
1369
1376
1370 skipstep3 = skipstep3 and not (work or dirsnotfound)
1377 skipstep3 = skipstep3 and not (work or dirsnotfound)
1371 work = [d for d in work if not dirignore(d[0])]
1378 work = [d for d in work if not dirignore(d[0])]
1372
1379
1373 # step 2: visit subdirectories
1380 # step 2: visit subdirectories
1374 def traverse(work, alreadynormed):
1381 def traverse(work, alreadynormed):
1375 wadd = work.append
1382 wadd = work.append
1376 while work:
1383 while work:
1377 tracing.counter('dirstate.walk work', len(work))
1384 tracing.counter('dirstate.walk work', len(work))
1378 nd = work.pop()
1385 nd = work.pop()
1379 visitentries = match.visitchildrenset(nd)
1386 visitentries = match.visitchildrenset(nd)
1380 if not visitentries:
1387 if not visitentries:
1381 continue
1388 continue
1382 if visitentries == b'this' or visitentries == b'all':
1389 if visitentries == b'this' or visitentries == b'all':
1383 visitentries = None
1390 visitentries = None
1384 skip = None
1391 skip = None
1385 if nd != b'':
1392 if nd != b'':
1386 skip = b'.hg'
1393 skip = b'.hg'
1387 try:
1394 try:
1388 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1395 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1389 entries = listdir(join(nd), stat=True, skip=skip)
1396 entries = listdir(join(nd), stat=True, skip=skip)
1390 except (PermissionError, FileNotFoundError) as inst:
1397 except (PermissionError, FileNotFoundError) as inst:
1391 match.bad(
1398 match.bad(
1392 self.pathto(nd), encoding.strtolocal(inst.strerror)
1399 self.pathto(nd), encoding.strtolocal(inst.strerror)
1393 )
1400 )
1394 continue
1401 continue
1395 for f, kind, st in entries:
1402 for f, kind, st in entries:
1396 # Some matchers may return files in the visitentries set,
1403 # Some matchers may return files in the visitentries set,
1397 # instead of 'this', if the matcher explicitly mentions them
1404 # instead of 'this', if the matcher explicitly mentions them
1398 # and is not an exactmatcher. This is acceptable; we do not
1405 # and is not an exactmatcher. This is acceptable; we do not
1399 # make any hard assumptions about file-or-directory below
1406 # make any hard assumptions about file-or-directory below
1400 # based on the presence of `f` in visitentries. If
1407 # based on the presence of `f` in visitentries. If
1401 # visitchildrenset returned a set, we can always skip the
1408 # visitchildrenset returned a set, we can always skip the
1402 # entries *not* in the set it provided regardless of whether
1409 # entries *not* in the set it provided regardless of whether
1403 # they're actually a file or a directory.
1410 # they're actually a file or a directory.
1404 if visitentries and f not in visitentries:
1411 if visitentries and f not in visitentries:
1405 continue
1412 continue
1406 if normalizefile:
1413 if normalizefile:
1407 # even though f might be a directory, we're only
1414 # even though f might be a directory, we're only
1408 # interested in comparing it to files currently in the
1415 # interested in comparing it to files currently in the
1409 # dmap -- therefore normalizefile is enough
1416 # dmap -- therefore normalizefile is enough
1410 nf = normalizefile(
1417 nf = normalizefile(
1411 nd and (nd + b"/" + f) or f, True, True
1418 nd and (nd + b"/" + f) or f, True, True
1412 )
1419 )
1413 else:
1420 else:
1414 nf = nd and (nd + b"/" + f) or f
1421 nf = nd and (nd + b"/" + f) or f
1415 if nf not in results:
1422 if nf not in results:
1416 if kind == dirkind:
1423 if kind == dirkind:
1417 if not ignore(nf):
1424 if not ignore(nf):
1418 if matchtdir:
1425 if matchtdir:
1419 matchtdir(nf)
1426 matchtdir(nf)
1420 wadd(nf)
1427 wadd(nf)
1421 if nf in dmap and (matchalways or matchfn(nf)):
1428 if nf in dmap and (matchalways or matchfn(nf)):
1422 results[nf] = None
1429 results[nf] = None
1423 elif kind == regkind or kind == lnkkind:
1430 elif kind == regkind or kind == lnkkind:
1424 if nf in dmap:
1431 if nf in dmap:
1425 if matchalways or matchfn(nf):
1432 if matchalways or matchfn(nf):
1426 results[nf] = st
1433 results[nf] = st
1427 elif (matchalways or matchfn(nf)) and not ignore(
1434 elif (matchalways or matchfn(nf)) and not ignore(
1428 nf
1435 nf
1429 ):
1436 ):
1430 # unknown file -- normalize if necessary
1437 # unknown file -- normalize if necessary
1431 if not alreadynormed:
1438 if not alreadynormed:
1432 nf = normalize(nf, False, True)
1439 nf = normalize(nf, False, True)
1433 results[nf] = st
1440 results[nf] = st
1434 elif nf in dmap and (matchalways or matchfn(nf)):
1441 elif nf in dmap and (matchalways or matchfn(nf)):
1435 results[nf] = None
1442 results[nf] = None
1436
1443
1437 for nd, d in work:
1444 for nd, d in work:
1438 # alreadynormed means that processwork doesn't have to do any
1445 # alreadynormed means that processwork doesn't have to do any
1439 # expensive directory normalization
1446 # expensive directory normalization
1440 alreadynormed = not normalize or nd == d
1447 alreadynormed = not normalize or nd == d
1441 traverse([d], alreadynormed)
1448 traverse([d], alreadynormed)
1442
1449
1443 for s in subrepos:
1450 for s in subrepos:
1444 del results[s]
1451 del results[s]
1445 del results[b'.hg']
1452 del results[b'.hg']
1446
1453
1447 # step 3: visit remaining files from dmap
1454 # step 3: visit remaining files from dmap
1448 if not skipstep3 and not exact:
1455 if not skipstep3 and not exact:
1449 # If a dmap file is not in results yet, it was either
1456 # If a dmap file is not in results yet, it was either
1450 # a) not matching matchfn b) ignored, c) missing, or d) under a
1457 # a) not matching matchfn b) ignored, c) missing, or d) under a
1451 # symlink directory.
1458 # symlink directory.
1452 if not results and matchalways:
1459 if not results and matchalways:
1453 visit = [f for f in dmap]
1460 visit = [f for f in dmap]
1454 else:
1461 else:
1455 visit = [f for f in dmap if f not in results and matchfn(f)]
1462 visit = [f for f in dmap if f not in results and matchfn(f)]
1456 visit.sort()
1463 visit.sort()
1457
1464
1458 if unknown:
1465 if unknown:
1459 # unknown == True means we walked all dirs under the roots
1466 # unknown == True means we walked all dirs under the roots
1460 # that wasn't ignored, and everything that matched was stat'ed
1467 # that wasn't ignored, and everything that matched was stat'ed
1461 # and is already in results.
1468 # and is already in results.
1462 # The rest must thus be ignored or under a symlink.
1469 # The rest must thus be ignored or under a symlink.
1463 audit_path = pathutil.pathauditor(self._root, cached=True)
1470 audit_path = pathutil.pathauditor(self._root, cached=True)
1464
1471
1465 for nf in iter(visit):
1472 for nf in iter(visit):
1466 # If a stat for the same file was already added with a
1473 # If a stat for the same file was already added with a
1467 # different case, don't add one for this, since that would
1474 # different case, don't add one for this, since that would
1468 # make it appear as if the file exists under both names
1475 # make it appear as if the file exists under both names
1469 # on disk.
1476 # on disk.
1470 if (
1477 if (
1471 normalizefile
1478 normalizefile
1472 and normalizefile(nf, True, True) in results
1479 and normalizefile(nf, True, True) in results
1473 ):
1480 ):
1474 results[nf] = None
1481 results[nf] = None
1475 # Report ignored items in the dmap as long as they are not
1482 # Report ignored items in the dmap as long as they are not
1476 # under a symlink directory.
1483 # under a symlink directory.
1477 elif audit_path.check(nf):
1484 elif audit_path.check(nf):
1478 try:
1485 try:
1479 results[nf] = lstat(join(nf))
1486 results[nf] = lstat(join(nf))
1480 # file was just ignored, no links, and exists
1487 # file was just ignored, no links, and exists
1481 except OSError:
1488 except OSError:
1482 # file doesn't exist
1489 # file doesn't exist
1483 results[nf] = None
1490 results[nf] = None
1484 else:
1491 else:
1485 # It's either missing or under a symlink directory
1492 # It's either missing or under a symlink directory
1486 # which we in this case report as missing
1493 # which we in this case report as missing
1487 results[nf] = None
1494 results[nf] = None
1488 else:
1495 else:
1489 # We may not have walked the full directory tree above,
1496 # We may not have walked the full directory tree above,
1490 # so stat and check everything we missed.
1497 # so stat and check everything we missed.
1491 iv = iter(visit)
1498 iv = iter(visit)
1492 for st in util.statfiles([join(i) for i in visit]):
1499 for st in util.statfiles([join(i) for i in visit]):
1493 results[next(iv)] = st
1500 results[next(iv)] = st
1494 return results
1501 return results
1495
1502
1496 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1503 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1497 if self._sparsematchfn is not None:
1504 if self._sparsematchfn is not None:
1498 em = matchmod.exact(matcher.files())
1505 em = matchmod.exact(matcher.files())
1499 sm = matchmod.unionmatcher([self._sparsematcher, em])
1506 sm = matchmod.unionmatcher([self._sparsematcher, em])
1500 matcher = matchmod.intersectmatchers(matcher, sm)
1507 matcher = matchmod.intersectmatchers(matcher, sm)
1501 # Force Rayon (Rust parallelism library) to respect the number of
1508 # Force Rayon (Rust parallelism library) to respect the number of
1502 # workers. This is a temporary workaround until Rust code knows
1509 # workers. This is a temporary workaround until Rust code knows
1503 # how to read the config file.
1510 # how to read the config file.
1504 numcpus = self._ui.configint(b"worker", b"numcpus")
1511 numcpus = self._ui.configint(b"worker", b"numcpus")
1505 if numcpus is not None:
1512 if numcpus is not None:
1506 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1513 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1507
1514
1508 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1515 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1509 if not workers_enabled:
1516 if not workers_enabled:
1510 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1517 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1511
1518
1512 (
1519 (
1513 lookup,
1520 lookup,
1514 modified,
1521 modified,
1515 added,
1522 added,
1516 removed,
1523 removed,
1517 deleted,
1524 deleted,
1518 clean,
1525 clean,
1519 ignored,
1526 ignored,
1520 unknown,
1527 unknown,
1521 warnings,
1528 warnings,
1522 bad,
1529 bad,
1523 traversed,
1530 traversed,
1524 dirty,
1531 dirty,
1525 ) = rustmod.status(
1532 ) = rustmod.status(
1526 self._map._map,
1533 self._map._map,
1527 matcher,
1534 matcher,
1528 self._rootdir,
1535 self._rootdir,
1529 self._ignorefiles(),
1536 self._ignorefiles(),
1530 self._checkexec,
1537 self._checkexec,
1531 bool(list_clean),
1538 bool(list_clean),
1532 bool(list_ignored),
1539 bool(list_ignored),
1533 bool(list_unknown),
1540 bool(list_unknown),
1534 bool(matcher.traversedir),
1541 bool(matcher.traversedir),
1535 )
1542 )
1536
1543
1537 self._dirty |= dirty
1544 self._dirty |= dirty
1538
1545
1539 if matcher.traversedir:
1546 if matcher.traversedir:
1540 for dir in traversed:
1547 for dir in traversed:
1541 matcher.traversedir(dir)
1548 matcher.traversedir(dir)
1542
1549
1543 if self._ui.warn:
1550 if self._ui.warn:
1544 for item in warnings:
1551 for item in warnings:
1545 if isinstance(item, tuple):
1552 if isinstance(item, tuple):
1546 file_path, syntax = item
1553 file_path, syntax = item
1547 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1554 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1548 file_path,
1555 file_path,
1549 syntax,
1556 syntax,
1550 )
1557 )
1551 self._ui.warn(msg)
1558 self._ui.warn(msg)
1552 else:
1559 else:
1553 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1560 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1554 self._ui.warn(
1561 self._ui.warn(
1555 msg
1562 msg
1556 % (
1563 % (
1557 pathutil.canonpath(
1564 pathutil.canonpath(
1558 self._rootdir, self._rootdir, item
1565 self._rootdir, self._rootdir, item
1559 ),
1566 ),
1560 b"No such file or directory",
1567 b"No such file or directory",
1561 )
1568 )
1562 )
1569 )
1563
1570
1564 for fn, message in bad:
1571 for fn, message in bad:
1565 matcher.bad(fn, encoding.strtolocal(message))
1572 matcher.bad(fn, encoding.strtolocal(message))
1566
1573
1567 status = scmutil.status(
1574 status = scmutil.status(
1568 modified=modified,
1575 modified=modified,
1569 added=added,
1576 added=added,
1570 removed=removed,
1577 removed=removed,
1571 deleted=deleted,
1578 deleted=deleted,
1572 unknown=unknown,
1579 unknown=unknown,
1573 ignored=ignored,
1580 ignored=ignored,
1574 clean=clean,
1581 clean=clean,
1575 )
1582 )
1576 return (lookup, status)
1583 return (lookup, status)
1577
1584
1578 def status(self, match, subrepos, ignored, clean, unknown):
1585 def status(self, match, subrepos, ignored, clean, unknown):
1579 """Determine the status of the working copy relative to the
1586 """Determine the status of the working copy relative to the
1580 dirstate and return a pair of (unsure, status), where status is of type
1587 dirstate and return a pair of (unsure, status), where status is of type
1581 scmutil.status and:
1588 scmutil.status and:
1582
1589
1583 unsure:
1590 unsure:
1584 files that might have been modified since the dirstate was
1591 files that might have been modified since the dirstate was
1585 written, but need to be read to be sure (size is the same
1592 written, but need to be read to be sure (size is the same
1586 but mtime differs)
1593 but mtime differs)
1587 status.modified:
1594 status.modified:
1588 files that have definitely been modified since the dirstate
1595 files that have definitely been modified since the dirstate
1589 was written (different size or mode)
1596 was written (different size or mode)
1590 status.clean:
1597 status.clean:
1591 files that have definitely not been modified since the
1598 files that have definitely not been modified since the
1592 dirstate was written
1599 dirstate was written
1593 """
1600 """
1594 if not self._running_status:
1601 if not self._running_status:
1595 msg = "Calling `status` outside a `running_status` context"
1602 msg = "Calling `status` outside a `running_status` context"
1596 raise error.ProgrammingError(msg)
1603 raise error.ProgrammingError(msg)
1597 listignored, listclean, listunknown = ignored, clean, unknown
1604 listignored, listclean, listunknown = ignored, clean, unknown
1598 lookup, modified, added, unknown, ignored = [], [], [], [], []
1605 lookup, modified, added, unknown, ignored = [], [], [], [], []
1599 removed, deleted, clean = [], [], []
1606 removed, deleted, clean = [], [], []
1600
1607
1601 dmap = self._map
1608 dmap = self._map
1602 dmap.preload()
1609 dmap.preload()
1603
1610
1604 use_rust = True
1611 use_rust = True
1605
1612
1606 allowed_matchers = (
1613 allowed_matchers = (
1607 matchmod.alwaysmatcher,
1614 matchmod.alwaysmatcher,
1608 matchmod.differencematcher,
1615 matchmod.differencematcher,
1609 matchmod.exactmatcher,
1616 matchmod.exactmatcher,
1610 matchmod.includematcher,
1617 matchmod.includematcher,
1611 matchmod.intersectionmatcher,
1618 matchmod.intersectionmatcher,
1612 matchmod.nevermatcher,
1619 matchmod.nevermatcher,
1613 matchmod.unionmatcher,
1620 matchmod.unionmatcher,
1614 )
1621 )
1615
1622
1616 if rustmod is None:
1623 if rustmod is None:
1617 use_rust = False
1624 use_rust = False
1618 elif self._checkcase:
1625 elif self._checkcase:
1619 # Case-insensitive filesystems are not handled yet
1626 # Case-insensitive filesystems are not handled yet
1620 use_rust = False
1627 use_rust = False
1621 elif subrepos:
1628 elif subrepos:
1622 use_rust = False
1629 use_rust = False
1623 elif not isinstance(match, allowed_matchers):
1630 elif not isinstance(match, allowed_matchers):
1624 # Some matchers have yet to be implemented
1631 # Some matchers have yet to be implemented
1625 use_rust = False
1632 use_rust = False
1626
1633
1627 # Get the time from the filesystem so we can disambiguate files that
1634 # Get the time from the filesystem so we can disambiguate files that
1628 # appear modified in the present or future.
1635 # appear modified in the present or future.
1629 try:
1636 try:
1630 mtime_boundary = timestamp.get_fs_now(self._opener)
1637 mtime_boundary = timestamp.get_fs_now(self._opener)
1631 except OSError:
1638 except OSError:
1632 # In largefiles or readonly context
1639 # In largefiles or readonly context
1633 mtime_boundary = None
1640 mtime_boundary = None
1634
1641
1635 if use_rust:
1642 if use_rust:
1636 try:
1643 try:
1637 res = self._rust_status(
1644 res = self._rust_status(
1638 match, listclean, listignored, listunknown
1645 match, listclean, listignored, listunknown
1639 )
1646 )
1640 return res + (mtime_boundary,)
1647 return res + (mtime_boundary,)
1641 except rustmod.FallbackError:
1648 except rustmod.FallbackError:
1642 pass
1649 pass
1643
1650
1644 def noop(f):
1651 def noop(f):
1645 pass
1652 pass
1646
1653
1647 dcontains = dmap.__contains__
1654 dcontains = dmap.__contains__
1648 dget = dmap.__getitem__
1655 dget = dmap.__getitem__
1649 ladd = lookup.append # aka "unsure"
1656 ladd = lookup.append # aka "unsure"
1650 madd = modified.append
1657 madd = modified.append
1651 aadd = added.append
1658 aadd = added.append
1652 uadd = unknown.append if listunknown else noop
1659 uadd = unknown.append if listunknown else noop
1653 iadd = ignored.append if listignored else noop
1660 iadd = ignored.append if listignored else noop
1654 radd = removed.append
1661 radd = removed.append
1655 dadd = deleted.append
1662 dadd = deleted.append
1656 cadd = clean.append if listclean else noop
1663 cadd = clean.append if listclean else noop
1657 mexact = match.exact
1664 mexact = match.exact
1658 dirignore = self._dirignore
1665 dirignore = self._dirignore
1659 checkexec = self._checkexec
1666 checkexec = self._checkexec
1660 checklink = self._checklink
1667 checklink = self._checklink
1661 copymap = self._map.copymap
1668 copymap = self._map.copymap
1662
1669
1663 # We need to do full walks when either
1670 # We need to do full walks when either
1664 # - we're listing all clean files, or
1671 # - we're listing all clean files, or
1665 # - match.traversedir does something, because match.traversedir should
1672 # - match.traversedir does something, because match.traversedir should
1666 # be called for every dir in the working dir
1673 # be called for every dir in the working dir
1667 full = listclean or match.traversedir is not None
1674 full = listclean or match.traversedir is not None
1668 for fn, st in self.walk(
1675 for fn, st in self.walk(
1669 match, subrepos, listunknown, listignored, full=full
1676 match, subrepos, listunknown, listignored, full=full
1670 ).items():
1677 ).items():
1671 if not dcontains(fn):
1678 if not dcontains(fn):
1672 if (listignored or mexact(fn)) and dirignore(fn):
1679 if (listignored or mexact(fn)) and dirignore(fn):
1673 if listignored:
1680 if listignored:
1674 iadd(fn)
1681 iadd(fn)
1675 else:
1682 else:
1676 uadd(fn)
1683 uadd(fn)
1677 continue
1684 continue
1678
1685
1679 t = dget(fn)
1686 t = dget(fn)
1680 mode = t.mode
1687 mode = t.mode
1681 size = t.size
1688 size = t.size
1682
1689
1683 if not st and t.tracked:
1690 if not st and t.tracked:
1684 dadd(fn)
1691 dadd(fn)
1685 elif t.p2_info:
1692 elif t.p2_info:
1686 madd(fn)
1693 madd(fn)
1687 elif t.added:
1694 elif t.added:
1688 aadd(fn)
1695 aadd(fn)
1689 elif t.removed:
1696 elif t.removed:
1690 radd(fn)
1697 radd(fn)
1691 elif t.tracked:
1698 elif t.tracked:
1692 if not checklink and t.has_fallback_symlink:
1699 if not checklink and t.has_fallback_symlink:
1693 # If the file system does not support symlink, the mode
1700 # If the file system does not support symlink, the mode
1694 # might not be correctly stored in the dirstate, so do not
1701 # might not be correctly stored in the dirstate, so do not
1695 # trust it.
1702 # trust it.
1696 ladd(fn)
1703 ladd(fn)
1697 elif not checkexec and t.has_fallback_exec:
1704 elif not checkexec and t.has_fallback_exec:
1698 # If the file system does not support exec bits, the mode
1705 # If the file system does not support exec bits, the mode
1699 # might not be correctly stored in the dirstate, so do not
1706 # might not be correctly stored in the dirstate, so do not
1700 # trust it.
1707 # trust it.
1701 ladd(fn)
1708 ladd(fn)
1702 elif (
1709 elif (
1703 size >= 0
1710 size >= 0
1704 and (
1711 and (
1705 (size != st.st_size and size != st.st_size & _rangemask)
1712 (size != st.st_size and size != st.st_size & _rangemask)
1706 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1713 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1707 )
1714 )
1708 or fn in copymap
1715 or fn in copymap
1709 ):
1716 ):
1710 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1717 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1711 # issue6456: Size returned may be longer due to
1718 # issue6456: Size returned may be longer due to
1712 # encryption on EXT-4 fscrypt, undecided.
1719 # encryption on EXT-4 fscrypt, undecided.
1713 ladd(fn)
1720 ladd(fn)
1714 else:
1721 else:
1715 madd(fn)
1722 madd(fn)
1716 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1723 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1717 # There might be a change in the future if for example the
1724 # There might be a change in the future if for example the
1718 # internal clock is off, but this is a case where the issues
1725 # internal clock is off, but this is a case where the issues
1719 # the user would face would be a lot worse and there is
1726 # the user would face would be a lot worse and there is
1720 # nothing we can really do.
1727 # nothing we can really do.
1721 ladd(fn)
1728 ladd(fn)
1722 elif listclean:
1729 elif listclean:
1723 cadd(fn)
1730 cadd(fn)
1724 status = scmutil.status(
1731 status = scmutil.status(
1725 modified, added, removed, deleted, unknown, ignored, clean
1732 modified, added, removed, deleted, unknown, ignored, clean
1726 )
1733 )
1727 return (lookup, status, mtime_boundary)
1734 return (lookup, status, mtime_boundary)
1728
1735
1729 def matches(self, match):
1736 def matches(self, match):
1730 """
1737 """
1731 return files in the dirstate (in whatever state) filtered by match
1738 return files in the dirstate (in whatever state) filtered by match
1732 """
1739 """
1733 dmap = self._map
1740 dmap = self._map
1734 if rustmod is not None:
1741 if rustmod is not None:
1735 dmap = self._map._map
1742 dmap = self._map._map
1736
1743
1737 if match.always():
1744 if match.always():
1738 return dmap.keys()
1745 return dmap.keys()
1739 files = match.files()
1746 files = match.files()
1740 if match.isexact():
1747 if match.isexact():
1741 # fast path -- filter the other way around, since typically files is
1748 # fast path -- filter the other way around, since typically files is
1742 # much smaller than dmap
1749 # much smaller than dmap
1743 return [f for f in files if f in dmap]
1750 return [f for f in files if f in dmap]
1744 if match.prefix() and all(fn in dmap for fn in files):
1751 if match.prefix() and all(fn in dmap for fn in files):
1745 # fast path -- all the values are known to be files, so just return
1752 # fast path -- all the values are known to be files, so just return
1746 # that
1753 # that
1747 return list(files)
1754 return list(files)
1748 return [f for f in dmap if match(f)]
1755 return [f for f in dmap if match(f)]
1749
1756
1750 def _actualfilename(self, tr):
1757 def _actualfilename(self, tr):
1751 if tr:
1758 if tr:
1752 return self._pendingfilename
1759 return self._pendingfilename
1753 else:
1760 else:
1754 return self._filename
1761 return self._filename
1755
1762
1756 def all_file_names(self):
1763 def all_file_names(self):
1757 """list all filename currently used by this dirstate
1764 """list all filename currently used by this dirstate
1758
1765
1759 This is only used to do `hg rollback` related backup in the transaction
1766 This is only used to do `hg rollback` related backup in the transaction
1760 """
1767 """
1761 if not self._opener.exists(self._filename):
1768 if not self._opener.exists(self._filename):
1762 # no data every written to disk yet
1769 # no data every written to disk yet
1763 return ()
1770 return ()
1764 elif self._use_dirstate_v2:
1771 elif self._use_dirstate_v2:
1765 return (
1772 return (
1766 self._filename,
1773 self._filename,
1767 self._map.docket.data_filename(),
1774 self._map.docket.data_filename(),
1768 )
1775 )
1769 else:
1776 else:
1770 return (self._filename,)
1777 return (self._filename,)
1771
1778
1772 def verify(self, m1, m2, p1, narrow_matcher=None):
1779 def verify(self, m1, m2, p1, narrow_matcher=None):
1773 """
1780 """
1774 check the dirstate contents against the parent manifest and yield errors
1781 check the dirstate contents against the parent manifest and yield errors
1775 """
1782 """
1776 missing_from_p1 = _(
1783 missing_from_p1 = _(
1777 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1784 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1778 )
1785 )
1779 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1786 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1780 missing_from_ps = _(
1787 missing_from_ps = _(
1781 b"%s marked as modified, but not in either manifest\n"
1788 b"%s marked as modified, but not in either manifest\n"
1782 )
1789 )
1783 missing_from_ds = _(
1790 missing_from_ds = _(
1784 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1791 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1785 )
1792 )
1786 for f, entry in self.items():
1793 for f, entry in self.items():
1787 if entry.p1_tracked:
1794 if entry.p1_tracked:
1788 if entry.modified and f not in m1 and f not in m2:
1795 if entry.modified and f not in m1 and f not in m2:
1789 yield missing_from_ps % f
1796 yield missing_from_ps % f
1790 elif f not in m1:
1797 elif f not in m1:
1791 yield missing_from_p1 % (f, node.short(p1))
1798 yield missing_from_p1 % (f, node.short(p1))
1792 if entry.added and f in m1:
1799 if entry.added and f in m1:
1793 yield unexpected_in_p1 % f
1800 yield unexpected_in_p1 % f
1794 for f in m1:
1801 for f in m1:
1795 if narrow_matcher is not None and not narrow_matcher(f):
1802 if narrow_matcher is not None and not narrow_matcher(f):
1796 continue
1803 continue
1797 entry = self.get_entry(f)
1804 entry = self.get_entry(f)
1798 if not entry.p1_tracked:
1805 if not entry.p1_tracked:
1799 yield missing_from_ds % (f, node.short(p1))
1806 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now