##// END OF EJS Templates
dirstate: check that dirstate is clean at the initial context opening...
marmoute -
r51041:8ba5028d default
parent child Browse files
Show More
@@ -1,1751 +1,1764 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def check_invalidated(func):
68 def check_invalidated(func):
69 """check we func is called a non-invalidated dirstate
69 """check we func is called a non-invalidated dirstate
70
70
71 The dirstate is in an "invalidated state" after an error occured during its
71 The dirstate is in an "invalidated state" after an error occured during its
72 modification and remains so until we exited the top level scope that framed
72 modification and remains so until we exited the top level scope that framed
73 such change.
73 such change.
74 """
74 """
75
75
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if self._invalidated_context:
77 if self._invalidated_context:
78 msg = 'calling `%s` after the dirstate was invalidated'
78 msg = 'calling `%s` after the dirstate was invalidated'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_changing_parents(func):
86 def requires_changing_parents(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if not self.is_changing_parents:
88 if not self.is_changing_parents:
89 msg = 'calling `%s` outside of a changing_parents context'
89 msg = 'calling `%s` outside of a changing_parents context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return check_invalidated(wrap)
94 return check_invalidated(wrap)
95
95
96
96
97 def requires_changing_files(func):
97 def requires_changing_files(func):
98 def wrap(self, *args, **kwargs):
98 def wrap(self, *args, **kwargs):
99 if not self.is_changing_files:
99 if not self.is_changing_files:
100 msg = 'calling `%s` outside of a `changing_files`'
100 msg = 'calling `%s` outside of a `changing_files`'
101 msg %= func.__name__
101 msg %= func.__name__
102 raise error.ProgrammingError(msg)
102 raise error.ProgrammingError(msg)
103 return func(self, *args, **kwargs)
103 return func(self, *args, **kwargs)
104
104
105 return check_invalidated(wrap)
105 return check_invalidated(wrap)
106
106
107
107
108 def requires_changing_any(func):
108 def requires_changing_any(func):
109 def wrap(self, *args, **kwargs):
109 def wrap(self, *args, **kwargs):
110 if not self.is_changing_any:
110 if not self.is_changing_any:
111 msg = 'calling `%s` outside of a changing context'
111 msg = 'calling `%s` outside of a changing context'
112 msg %= func.__name__
112 msg %= func.__name__
113 raise error.ProgrammingError(msg)
113 raise error.ProgrammingError(msg)
114 return func(self, *args, **kwargs)
114 return func(self, *args, **kwargs)
115
115
116 return check_invalidated(wrap)
116 return check_invalidated(wrap)
117
117
118
118
119 def requires_not_changing_parents(func):
119 def requires_not_changing_parents(func):
120 def wrap(self, *args, **kwargs):
120 def wrap(self, *args, **kwargs):
121 if self.is_changing_parents:
121 if self.is_changing_parents:
122 msg = 'calling `%s` inside of a changing_parents context'
122 msg = 'calling `%s` inside of a changing_parents context'
123 msg %= func.__name__
123 msg %= func.__name__
124 raise error.ProgrammingError(msg)
124 raise error.ProgrammingError(msg)
125 return func(self, *args, **kwargs)
125 return func(self, *args, **kwargs)
126
126
127 return check_invalidated(wrap)
127 return check_invalidated(wrap)
128
128
129
129
130 CHANGE_TYPE_PARENTS = "parents"
130 CHANGE_TYPE_PARENTS = "parents"
131 CHANGE_TYPE_FILES = "files"
131 CHANGE_TYPE_FILES = "files"
132
132
133
133
134 @interfaceutil.implementer(intdirstate.idirstate)
134 @interfaceutil.implementer(intdirstate.idirstate)
135 class dirstate:
135 class dirstate:
136
136
137 # used by largefile to avoid overwritting transaction callbacK
137 # used by largefile to avoid overwritting transaction callbacK
138 _tr_key_suffix = b''
138 _tr_key_suffix = b''
139
139
140 def __init__(
140 def __init__(
141 self,
141 self,
142 opener,
142 opener,
143 ui,
143 ui,
144 root,
144 root,
145 validate,
145 validate,
146 sparsematchfn,
146 sparsematchfn,
147 nodeconstants,
147 nodeconstants,
148 use_dirstate_v2,
148 use_dirstate_v2,
149 use_tracked_hint=False,
149 use_tracked_hint=False,
150 ):
150 ):
151 """Create a new dirstate object.
151 """Create a new dirstate object.
152
152
153 opener is an open()-like callable that can be used to open the
153 opener is an open()-like callable that can be used to open the
154 dirstate file; root is the root of the directory tracked by
154 dirstate file; root is the root of the directory tracked by
155 the dirstate.
155 the dirstate.
156 """
156 """
157 self._use_dirstate_v2 = use_dirstate_v2
157 self._use_dirstate_v2 = use_dirstate_v2
158 self._use_tracked_hint = use_tracked_hint
158 self._use_tracked_hint = use_tracked_hint
159 self._nodeconstants = nodeconstants
159 self._nodeconstants = nodeconstants
160 self._opener = opener
160 self._opener = opener
161 self._validate = validate
161 self._validate = validate
162 self._root = root
162 self._root = root
163 # Either build a sparse-matcher or None if sparse is disabled
163 # Either build a sparse-matcher or None if sparse is disabled
164 self._sparsematchfn = sparsematchfn
164 self._sparsematchfn = sparsematchfn
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
165 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
166 # UNC path pointing to root share (issue4557)
166 # UNC path pointing to root share (issue4557)
167 self._rootdir = pathutil.normasprefix(root)
167 self._rootdir = pathutil.normasprefix(root)
168 # True is any internal state may be different
168 # True is any internal state may be different
169 self._dirty = False
169 self._dirty = False
170 # True if the set of tracked file may be different
170 # True if the set of tracked file may be different
171 self._dirty_tracked_set = False
171 self._dirty_tracked_set = False
172 self._ui = ui
172 self._ui = ui
173 self._filecache = {}
173 self._filecache = {}
174 # nesting level of `changing_parents` context
174 # nesting level of `changing_parents` context
175 self._changing_level = 0
175 self._changing_level = 0
176 # the change currently underway
176 # the change currently underway
177 self._change_type = None
177 self._change_type = None
178 # number of open _running_status context
178 # number of open _running_status context
179 self._running_status = 0
179 self._running_status = 0
180 # True if the current dirstate changing operations have been
180 # True if the current dirstate changing operations have been
181 # invalidated (used to make sure all nested contexts have been exited)
181 # invalidated (used to make sure all nested contexts have been exited)
182 self._invalidated_context = False
182 self._invalidated_context = False
183 self._attached_to_a_transaction = False
183 self._attached_to_a_transaction = False
184 self._filename = b'dirstate'
184 self._filename = b'dirstate'
185 self._filename_th = b'dirstate-tracked-hint'
185 self._filename_th = b'dirstate-tracked-hint'
186 self._pendingfilename = b'%s.pending' % self._filename
186 self._pendingfilename = b'%s.pending' % self._filename
187 self._plchangecallbacks = {}
187 self._plchangecallbacks = {}
188 self._origpl = None
188 self._origpl = None
189 self._mapcls = dirstatemap.dirstatemap
189 self._mapcls = dirstatemap.dirstatemap
190 # Access and cache cwd early, so we don't access it for the first time
190 # Access and cache cwd early, so we don't access it for the first time
191 # after a working-copy update caused it to not exist (accessing it then
191 # after a working-copy update caused it to not exist (accessing it then
192 # raises an exception).
192 # raises an exception).
193 self._cwd
193 self._cwd
194
194
195 def refresh(self):
195 def refresh(self):
196 if '_branch' in vars(self):
196 if '_branch' in vars(self):
197 del self._branch
197 del self._branch
198 if '_map' in vars(self) and self._map.may_need_refresh():
198 if '_map' in vars(self) and self._map.may_need_refresh():
199 self.invalidate()
199 self.invalidate()
200
200
201 def prefetch_parents(self):
201 def prefetch_parents(self):
202 """make sure the parents are loaded
202 """make sure the parents are loaded
203
203
204 Used to avoid a race condition.
204 Used to avoid a race condition.
205 """
205 """
206 self._pl
206 self._pl
207
207
208 @contextlib.contextmanager
208 @contextlib.contextmanager
209 @check_invalidated
209 @check_invalidated
210 def running_status(self, repo):
210 def running_status(self, repo):
211 """Wrap a status operation
211 """Wrap a status operation
212
212
213 This context is not mutally exclusive with the `changing_*` context. It
213 This context is not mutally exclusive with the `changing_*` context. It
214 also do not warrant for the `wlock` to be taken.
214 also do not warrant for the `wlock` to be taken.
215
215
216 If the wlock is taken, this context will (in the future) behave in a
216 If the wlock is taken, this context will (in the future) behave in a
217 simple way, and ensure the data are scheduled for write when leaving
217 simple way, and ensure the data are scheduled for write when leaving
218 the top level context.
218 the top level context.
219
219
220 If the lock is not taken, it will only warrant that the data are either
220 If the lock is not taken, it will only warrant that the data are either
221 committed (written) and rolled back (invalidated) when exiting the top
221 committed (written) and rolled back (invalidated) when exiting the top
222 level context. The write/invalidate action must be performed by the
222 level context. The write/invalidate action must be performed by the
223 wrapped code.
223 wrapped code.
224
224
225
225
226 The expected logic is:
226 The expected logic is:
227
227
228 A: read the dirstate
228 A: read the dirstate
229 B: run status
229 B: run status
230 This might make the dirstate dirty by updating cache,
230 This might make the dirstate dirty by updating cache,
231 especially in Rust.
231 especially in Rust.
232 C: do more "post status fixup if relevant
232 C: do more "post status fixup if relevant
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
233 D: try to take the w-lock (this will invalidate the changes if they were raced)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
234 E0: if dirstate changed on disk β†’ discard change (done by dirstate internal)
235 E1: elif lock was acquired β†’ write the changes
235 E1: elif lock was acquired β†’ write the changes
236 E2: else β†’ discard the changes
236 E2: else β†’ discard the changes
237 """
237 """
238 is_changing = self.is_changing_any
239 has_tr = repo.currenttransaction is not None
240 nested = bool(self._running_status)
241
242 first_and_alone = not (is_changing or has_tr or nested)
243
244 # enforce no change happened outside of a proper context.
245 if first_and_alone and self._dirty:
246 has_tr = repo.currenttransaction() is not None
247 if not has_tr and self._changing_level == 0 and self._dirty:
248 msg = "entering a status context, but dirstate is already dirty"
249 raise error.ProgrammingError(msg)
250
238 self._running_status += 1
251 self._running_status += 1
239 try:
252 try:
240 yield
253 yield
241 except Exception:
254 except Exception:
242 self.invalidate()
255 self.invalidate()
243 raise
256 raise
244 finally:
257 finally:
245 self._running_status -= 1
258 self._running_status -= 1
246 if self._invalidated_context:
259 if self._invalidated_context:
247 self.invalidate()
260 self.invalidate()
248
261
249 @contextlib.contextmanager
262 @contextlib.contextmanager
250 @check_invalidated
263 @check_invalidated
251 def _changing(self, repo, change_type):
264 def _changing(self, repo, change_type):
252 if repo.currentwlock() is None:
265 if repo.currentwlock() is None:
253 msg = b"trying to change the dirstate without holding the wlock"
266 msg = b"trying to change the dirstate without holding the wlock"
254 raise error.ProgrammingError(msg)
267 raise error.ProgrammingError(msg)
255
268
256 has_tr = repo.currenttransaction() is not None
269 has_tr = repo.currenttransaction() is not None
257 if not has_tr and self._changing_level == 0 and self._dirty:
270 if not has_tr and self._changing_level == 0 and self._dirty:
258 msg = "entering a changing context, but dirstate is already dirty"
271 msg = "entering a changing context, but dirstate is already dirty"
259 raise error.ProgrammingError(msg)
272 raise error.ProgrammingError(msg)
260
273
261 assert self._changing_level >= 0
274 assert self._changing_level >= 0
262 # different type of change are mutually exclusive
275 # different type of change are mutually exclusive
263 if self._change_type is None:
276 if self._change_type is None:
264 assert self._changing_level == 0
277 assert self._changing_level == 0
265 self._change_type = change_type
278 self._change_type = change_type
266 elif self._change_type != change_type:
279 elif self._change_type != change_type:
267 msg = (
280 msg = (
268 'trying to open "%s" dirstate-changing context while a "%s" is'
281 'trying to open "%s" dirstate-changing context while a "%s" is'
269 ' already open'
282 ' already open'
270 )
283 )
271 msg %= (change_type, self._change_type)
284 msg %= (change_type, self._change_type)
272 raise error.ProgrammingError(msg)
285 raise error.ProgrammingError(msg)
273 should_write = False
286 should_write = False
274 self._changing_level += 1
287 self._changing_level += 1
275 try:
288 try:
276 yield
289 yield
277 except: # re-raises
290 except: # re-raises
278 self.invalidate() # this will set `_invalidated_context`
291 self.invalidate() # this will set `_invalidated_context`
279 raise
292 raise
280 finally:
293 finally:
281 assert self._changing_level > 0
294 assert self._changing_level > 0
282 self._changing_level -= 1
295 self._changing_level -= 1
283 # If the dirstate is being invalidated, call invalidate again.
296 # If the dirstate is being invalidated, call invalidate again.
284 # This will throw away anything added by a upper context and
297 # This will throw away anything added by a upper context and
285 # reset the `_invalidated_context` flag when relevant
298 # reset the `_invalidated_context` flag when relevant
286 if self._changing_level <= 0:
299 if self._changing_level <= 0:
287 self._change_type = None
300 self._change_type = None
288 assert self._changing_level == 0
301 assert self._changing_level == 0
289 if self._invalidated_context:
302 if self._invalidated_context:
290 # make sure we invalidate anything an upper context might
303 # make sure we invalidate anything an upper context might
291 # have changed.
304 # have changed.
292 self.invalidate()
305 self.invalidate()
293 else:
306 else:
294 should_write = self._changing_level <= 0
307 should_write = self._changing_level <= 0
295 tr = repo.currenttransaction()
308 tr = repo.currenttransaction()
296 if has_tr != (tr is not None):
309 if has_tr != (tr is not None):
297 if has_tr:
310 if has_tr:
298 m = "transaction vanished while changing dirstate"
311 m = "transaction vanished while changing dirstate"
299 else:
312 else:
300 m = "transaction appeared while changing dirstate"
313 m = "transaction appeared while changing dirstate"
301 raise error.ProgrammingError(m)
314 raise error.ProgrammingError(m)
302 if should_write:
315 if should_write:
303 self.write(tr)
316 self.write(tr)
304
317
305 @contextlib.contextmanager
318 @contextlib.contextmanager
306 def changing_parents(self, repo):
319 def changing_parents(self, repo):
307 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
320 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
308 yield c
321 yield c
309
322
310 @contextlib.contextmanager
323 @contextlib.contextmanager
311 def changing_files(self, repo):
324 def changing_files(self, repo):
312 with self._changing(repo, CHANGE_TYPE_FILES) as c:
325 with self._changing(repo, CHANGE_TYPE_FILES) as c:
313 yield c
326 yield c
314
327
315 # here to help migration to the new code
328 # here to help migration to the new code
316 def parentchange(self):
329 def parentchange(self):
317 msg = (
330 msg = (
318 "Mercurial 6.4 and later requires call to "
331 "Mercurial 6.4 and later requires call to "
319 "`dirstate.changing_parents(repo)`"
332 "`dirstate.changing_parents(repo)`"
320 )
333 )
321 raise error.ProgrammingError(msg)
334 raise error.ProgrammingError(msg)
322
335
323 @property
336 @property
324 def is_changing_any(self):
337 def is_changing_any(self):
325 """Returns true if the dirstate is in the middle of a set of changes.
338 """Returns true if the dirstate is in the middle of a set of changes.
326
339
327 This returns True for any kind of change.
340 This returns True for any kind of change.
328 """
341 """
329 return self._changing_level > 0
342 return self._changing_level > 0
330
343
331 def pendingparentchange(self):
344 def pendingparentchange(self):
332 return self.is_changing_parent()
345 return self.is_changing_parent()
333
346
334 def is_changing_parent(self):
347 def is_changing_parent(self):
335 """Returns true if the dirstate is in the middle of a set of changes
348 """Returns true if the dirstate is in the middle of a set of changes
336 that modify the dirstate parent.
349 that modify the dirstate parent.
337 """
350 """
338 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
351 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
339 return self.is_changing_parents
352 return self.is_changing_parents
340
353
341 @property
354 @property
342 def is_changing_parents(self):
355 def is_changing_parents(self):
343 """Returns true if the dirstate is in the middle of a set of changes
356 """Returns true if the dirstate is in the middle of a set of changes
344 that modify the dirstate parent.
357 that modify the dirstate parent.
345 """
358 """
346 if self._changing_level <= 0:
359 if self._changing_level <= 0:
347 return False
360 return False
348 return self._change_type == CHANGE_TYPE_PARENTS
361 return self._change_type == CHANGE_TYPE_PARENTS
349
362
350 @property
363 @property
351 def is_changing_files(self):
364 def is_changing_files(self):
352 """Returns true if the dirstate is in the middle of a set of changes
365 """Returns true if the dirstate is in the middle of a set of changes
353 that modify the files tracked or their sources.
366 that modify the files tracked or their sources.
354 """
367 """
355 if self._changing_level <= 0:
368 if self._changing_level <= 0:
356 return False
369 return False
357 return self._change_type == CHANGE_TYPE_FILES
370 return self._change_type == CHANGE_TYPE_FILES
358
371
359 @propertycache
372 @propertycache
360 def _map(self):
373 def _map(self):
361 """Return the dirstate contents (see documentation for dirstatemap)."""
374 """Return the dirstate contents (see documentation for dirstatemap)."""
362 return self._mapcls(
375 return self._mapcls(
363 self._ui,
376 self._ui,
364 self._opener,
377 self._opener,
365 self._root,
378 self._root,
366 self._nodeconstants,
379 self._nodeconstants,
367 self._use_dirstate_v2,
380 self._use_dirstate_v2,
368 )
381 )
369
382
370 @property
383 @property
371 def _sparsematcher(self):
384 def _sparsematcher(self):
372 """The matcher for the sparse checkout.
385 """The matcher for the sparse checkout.
373
386
374 The working directory may not include every file from a manifest. The
387 The working directory may not include every file from a manifest. The
375 matcher obtained by this property will match a path if it is to be
388 matcher obtained by this property will match a path if it is to be
376 included in the working directory.
389 included in the working directory.
377
390
378 When sparse if disabled, return None.
391 When sparse if disabled, return None.
379 """
392 """
380 if self._sparsematchfn is None:
393 if self._sparsematchfn is None:
381 return None
394 return None
382 # TODO there is potential to cache this property. For now, the matcher
395 # TODO there is potential to cache this property. For now, the matcher
383 # is resolved on every access. (But the called function does use a
396 # is resolved on every access. (But the called function does use a
384 # cache to keep the lookup fast.)
397 # cache to keep the lookup fast.)
385 return self._sparsematchfn()
398 return self._sparsematchfn()
386
399
387 @repocache(b'branch')
400 @repocache(b'branch')
388 def _branch(self):
401 def _branch(self):
389 try:
402 try:
390 return self._opener.read(b"branch").strip() or b"default"
403 return self._opener.read(b"branch").strip() or b"default"
391 except FileNotFoundError:
404 except FileNotFoundError:
392 return b"default"
405 return b"default"
393
406
394 @property
407 @property
395 def _pl(self):
408 def _pl(self):
396 return self._map.parents()
409 return self._map.parents()
397
410
398 def hasdir(self, d):
411 def hasdir(self, d):
399 return self._map.hastrackeddir(d)
412 return self._map.hastrackeddir(d)
400
413
401 @rootcache(b'.hgignore')
414 @rootcache(b'.hgignore')
402 def _ignore(self):
415 def _ignore(self):
403 files = self._ignorefiles()
416 files = self._ignorefiles()
404 if not files:
417 if not files:
405 return matchmod.never()
418 return matchmod.never()
406
419
407 pats = [b'include:%s' % f for f in files]
420 pats = [b'include:%s' % f for f in files]
408 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
421 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
409
422
410 @propertycache
423 @propertycache
411 def _slash(self):
424 def _slash(self):
412 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
425 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
413
426
414 @propertycache
427 @propertycache
415 def _checklink(self):
428 def _checklink(self):
416 return util.checklink(self._root)
429 return util.checklink(self._root)
417
430
418 @propertycache
431 @propertycache
419 def _checkexec(self):
432 def _checkexec(self):
420 return bool(util.checkexec(self._root))
433 return bool(util.checkexec(self._root))
421
434
422 @propertycache
435 @propertycache
423 def _checkcase(self):
436 def _checkcase(self):
424 return not util.fscasesensitive(self._join(b'.hg'))
437 return not util.fscasesensitive(self._join(b'.hg'))
425
438
426 def _join(self, f):
439 def _join(self, f):
427 # much faster than os.path.join()
440 # much faster than os.path.join()
428 # it's safe because f is always a relative path
441 # it's safe because f is always a relative path
429 return self._rootdir + f
442 return self._rootdir + f
430
443
431 def flagfunc(self, buildfallback):
444 def flagfunc(self, buildfallback):
432 """build a callable that returns flags associated with a filename
445 """build a callable that returns flags associated with a filename
433
446
434 The information is extracted from three possible layers:
447 The information is extracted from three possible layers:
435 1. the file system if it supports the information
448 1. the file system if it supports the information
436 2. the "fallback" information stored in the dirstate if any
449 2. the "fallback" information stored in the dirstate if any
437 3. a more expensive mechanism inferring the flags from the parents.
450 3. a more expensive mechanism inferring the flags from the parents.
438 """
451 """
439
452
440 # small hack to cache the result of buildfallback()
453 # small hack to cache the result of buildfallback()
441 fallback_func = []
454 fallback_func = []
442
455
443 def get_flags(x):
456 def get_flags(x):
444 entry = None
457 entry = None
445 fallback_value = None
458 fallback_value = None
446 try:
459 try:
447 st = os.lstat(self._join(x))
460 st = os.lstat(self._join(x))
448 except OSError:
461 except OSError:
449 return b''
462 return b''
450
463
451 if self._checklink:
464 if self._checklink:
452 if util.statislink(st):
465 if util.statislink(st):
453 return b'l'
466 return b'l'
454 else:
467 else:
455 entry = self.get_entry(x)
468 entry = self.get_entry(x)
456 if entry.has_fallback_symlink:
469 if entry.has_fallback_symlink:
457 if entry.fallback_symlink:
470 if entry.fallback_symlink:
458 return b'l'
471 return b'l'
459 else:
472 else:
460 if not fallback_func:
473 if not fallback_func:
461 fallback_func.append(buildfallback())
474 fallback_func.append(buildfallback())
462 fallback_value = fallback_func[0](x)
475 fallback_value = fallback_func[0](x)
463 if b'l' in fallback_value:
476 if b'l' in fallback_value:
464 return b'l'
477 return b'l'
465
478
466 if self._checkexec:
479 if self._checkexec:
467 if util.statisexec(st):
480 if util.statisexec(st):
468 return b'x'
481 return b'x'
469 else:
482 else:
470 if entry is None:
483 if entry is None:
471 entry = self.get_entry(x)
484 entry = self.get_entry(x)
472 if entry.has_fallback_exec:
485 if entry.has_fallback_exec:
473 if entry.fallback_exec:
486 if entry.fallback_exec:
474 return b'x'
487 return b'x'
475 else:
488 else:
476 if fallback_value is None:
489 if fallback_value is None:
477 if not fallback_func:
490 if not fallback_func:
478 fallback_func.append(buildfallback())
491 fallback_func.append(buildfallback())
479 fallback_value = fallback_func[0](x)
492 fallback_value = fallback_func[0](x)
480 if b'x' in fallback_value:
493 if b'x' in fallback_value:
481 return b'x'
494 return b'x'
482 return b''
495 return b''
483
496
484 return get_flags
497 return get_flags
485
498
486 @propertycache
499 @propertycache
487 def _cwd(self):
500 def _cwd(self):
488 # internal config: ui.forcecwd
501 # internal config: ui.forcecwd
489 forcecwd = self._ui.config(b'ui', b'forcecwd')
502 forcecwd = self._ui.config(b'ui', b'forcecwd')
490 if forcecwd:
503 if forcecwd:
491 return forcecwd
504 return forcecwd
492 return encoding.getcwd()
505 return encoding.getcwd()
493
506
494 def getcwd(self):
507 def getcwd(self):
495 """Return the path from which a canonical path is calculated.
508 """Return the path from which a canonical path is calculated.
496
509
497 This path should be used to resolve file patterns or to convert
510 This path should be used to resolve file patterns or to convert
498 canonical paths back to file paths for display. It shouldn't be
511 canonical paths back to file paths for display. It shouldn't be
499 used to get real file paths. Use vfs functions instead.
512 used to get real file paths. Use vfs functions instead.
500 """
513 """
501 cwd = self._cwd
514 cwd = self._cwd
502 if cwd == self._root:
515 if cwd == self._root:
503 return b''
516 return b''
504 # self._root ends with a path separator if self._root is '/' or 'C:\'
517 # self._root ends with a path separator if self._root is '/' or 'C:\'
505 rootsep = self._root
518 rootsep = self._root
506 if not util.endswithsep(rootsep):
519 if not util.endswithsep(rootsep):
507 rootsep += pycompat.ossep
520 rootsep += pycompat.ossep
508 if cwd.startswith(rootsep):
521 if cwd.startswith(rootsep):
509 return cwd[len(rootsep) :]
522 return cwd[len(rootsep) :]
510 else:
523 else:
511 # we're outside the repo. return an absolute path.
524 # we're outside the repo. return an absolute path.
512 return cwd
525 return cwd
513
526
514 def pathto(self, f, cwd=None):
527 def pathto(self, f, cwd=None):
515 if cwd is None:
528 if cwd is None:
516 cwd = self.getcwd()
529 cwd = self.getcwd()
517 path = util.pathto(self._root, cwd, f)
530 path = util.pathto(self._root, cwd, f)
518 if self._slash:
531 if self._slash:
519 return util.pconvert(path)
532 return util.pconvert(path)
520 return path
533 return path
521
534
522 def get_entry(self, path):
535 def get_entry(self, path):
523 """return a DirstateItem for the associated path"""
536 """return a DirstateItem for the associated path"""
524 entry = self._map.get(path)
537 entry = self._map.get(path)
525 if entry is None:
538 if entry is None:
526 return DirstateItem()
539 return DirstateItem()
527 return entry
540 return entry
528
541
529 def __contains__(self, key):
542 def __contains__(self, key):
530 return key in self._map
543 return key in self._map
531
544
532 def __iter__(self):
545 def __iter__(self):
533 return iter(sorted(self._map))
546 return iter(sorted(self._map))
534
547
535 def items(self):
548 def items(self):
536 return self._map.items()
549 return self._map.items()
537
550
538 iteritems = items
551 iteritems = items
539
552
540 def parents(self):
553 def parents(self):
541 return [self._validate(p) for p in self._pl]
554 return [self._validate(p) for p in self._pl]
542
555
543 def p1(self):
556 def p1(self):
544 return self._validate(self._pl[0])
557 return self._validate(self._pl[0])
545
558
546 def p2(self):
559 def p2(self):
547 return self._validate(self._pl[1])
560 return self._validate(self._pl[1])
548
561
549 @property
562 @property
550 def in_merge(self):
563 def in_merge(self):
551 """True if a merge is in progress"""
564 """True if a merge is in progress"""
552 return self._pl[1] != self._nodeconstants.nullid
565 return self._pl[1] != self._nodeconstants.nullid
553
566
554 def branch(self):
567 def branch(self):
555 return encoding.tolocal(self._branch)
568 return encoding.tolocal(self._branch)
556
569
557 @requires_changing_parents
570 @requires_changing_parents
558 def setparents(self, p1, p2=None):
571 def setparents(self, p1, p2=None):
559 """Set dirstate parents to p1 and p2.
572 """Set dirstate parents to p1 and p2.
560
573
561 When moving from two parents to one, "merged" entries a
574 When moving from two parents to one, "merged" entries a
562 adjusted to normal and previous copy records discarded and
575 adjusted to normal and previous copy records discarded and
563 returned by the call.
576 returned by the call.
564
577
565 See localrepo.setparents()
578 See localrepo.setparents()
566 """
579 """
567 if p2 is None:
580 if p2 is None:
568 p2 = self._nodeconstants.nullid
581 p2 = self._nodeconstants.nullid
569 if self._changing_level == 0:
582 if self._changing_level == 0:
570 raise ValueError(
583 raise ValueError(
571 b"cannot set dirstate parent outside of "
584 b"cannot set dirstate parent outside of "
572 b"dirstate.changing_parents context manager"
585 b"dirstate.changing_parents context manager"
573 )
586 )
574
587
575 self._dirty = True
588 self._dirty = True
576 oldp2 = self._pl[1]
589 oldp2 = self._pl[1]
577 if self._origpl is None:
590 if self._origpl is None:
578 self._origpl = self._pl
591 self._origpl = self._pl
579 nullid = self._nodeconstants.nullid
592 nullid = self._nodeconstants.nullid
580 # True if we need to fold p2 related state back to a linear case
593 # True if we need to fold p2 related state back to a linear case
581 fold_p2 = oldp2 != nullid and p2 == nullid
594 fold_p2 = oldp2 != nullid and p2 == nullid
582 return self._map.setparents(p1, p2, fold_p2=fold_p2)
595 return self._map.setparents(p1, p2, fold_p2=fold_p2)
583
596
584 def setbranch(self, branch):
597 def setbranch(self, branch):
585 self.__class__._branch.set(self, encoding.fromlocal(branch))
598 self.__class__._branch.set(self, encoding.fromlocal(branch))
586 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
599 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
587 try:
600 try:
588 f.write(self._branch + b'\n')
601 f.write(self._branch + b'\n')
589 f.close()
602 f.close()
590
603
591 # make sure filecache has the correct stat info for _branch after
604 # make sure filecache has the correct stat info for _branch after
592 # replacing the underlying file
605 # replacing the underlying file
593 ce = self._filecache[b'_branch']
606 ce = self._filecache[b'_branch']
594 if ce:
607 if ce:
595 ce.refresh()
608 ce.refresh()
596 except: # re-raises
609 except: # re-raises
597 f.discard()
610 f.discard()
598 raise
611 raise
599
612
600 def invalidate(self):
613 def invalidate(self):
601 """Causes the next access to reread the dirstate.
614 """Causes the next access to reread the dirstate.
602
615
603 This is different from localrepo.invalidatedirstate() because it always
616 This is different from localrepo.invalidatedirstate() because it always
604 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
617 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
605 check whether the dirstate has changed before rereading it."""
618 check whether the dirstate has changed before rereading it."""
606
619
607 for a in ("_map", "_branch", "_ignore"):
620 for a in ("_map", "_branch", "_ignore"):
608 if a in self.__dict__:
621 if a in self.__dict__:
609 delattr(self, a)
622 delattr(self, a)
610 self._dirty = False
623 self._dirty = False
611 self._dirty_tracked_set = False
624 self._dirty_tracked_set = False
612 self._invalidated_context = bool(
625 self._invalidated_context = bool(
613 self._changing_level > 0
626 self._changing_level > 0
614 or self._attached_to_a_transaction
627 or self._attached_to_a_transaction
615 or self._running_status
628 or self._running_status
616 )
629 )
617 self._origpl = None
630 self._origpl = None
618
631
619 @requires_changing_any
632 @requires_changing_any
620 def copy(self, source, dest):
633 def copy(self, source, dest):
621 """Mark dest as a copy of source. Unmark dest if source is None."""
634 """Mark dest as a copy of source. Unmark dest if source is None."""
622 if source == dest:
635 if source == dest:
623 return
636 return
624 self._dirty = True
637 self._dirty = True
625 if source is not None:
638 if source is not None:
626 self._check_sparse(source)
639 self._check_sparse(source)
627 self._map.copymap[dest] = source
640 self._map.copymap[dest] = source
628 else:
641 else:
629 self._map.copymap.pop(dest, None)
642 self._map.copymap.pop(dest, None)
630
643
631 def copied(self, file):
644 def copied(self, file):
632 return self._map.copymap.get(file, None)
645 return self._map.copymap.get(file, None)
633
646
634 def copies(self):
647 def copies(self):
635 return self._map.copymap
648 return self._map.copymap
636
649
637 @requires_changing_files
650 @requires_changing_files
638 def set_tracked(self, filename, reset_copy=False):
651 def set_tracked(self, filename, reset_copy=False):
639 """a "public" method for generic code to mark a file as tracked
652 """a "public" method for generic code to mark a file as tracked
640
653
641 This function is to be called outside of "update/merge" case. For
654 This function is to be called outside of "update/merge" case. For
642 example by a command like `hg add X`.
655 example by a command like `hg add X`.
643
656
644 if reset_copy is set, any existing copy information will be dropped.
657 if reset_copy is set, any existing copy information will be dropped.
645
658
646 return True the file was previously untracked, False otherwise.
659 return True the file was previously untracked, False otherwise.
647 """
660 """
648 self._dirty = True
661 self._dirty = True
649 entry = self._map.get(filename)
662 entry = self._map.get(filename)
650 if entry is None or not entry.tracked:
663 if entry is None or not entry.tracked:
651 self._check_new_tracked_filename(filename)
664 self._check_new_tracked_filename(filename)
652 pre_tracked = self._map.set_tracked(filename)
665 pre_tracked = self._map.set_tracked(filename)
653 if reset_copy:
666 if reset_copy:
654 self._map.copymap.pop(filename, None)
667 self._map.copymap.pop(filename, None)
655 if pre_tracked:
668 if pre_tracked:
656 self._dirty_tracked_set = True
669 self._dirty_tracked_set = True
657 return pre_tracked
670 return pre_tracked
658
671
659 @requires_changing_files
672 @requires_changing_files
660 def set_untracked(self, filename):
673 def set_untracked(self, filename):
661 """a "public" method for generic code to mark a file as untracked
674 """a "public" method for generic code to mark a file as untracked
662
675
663 This function is to be called outside of "update/merge" case. For
676 This function is to be called outside of "update/merge" case. For
664 example by a command like `hg remove X`.
677 example by a command like `hg remove X`.
665
678
666 return True the file was previously tracked, False otherwise.
679 return True the file was previously tracked, False otherwise.
667 """
680 """
668 ret = self._map.set_untracked(filename)
681 ret = self._map.set_untracked(filename)
669 if ret:
682 if ret:
670 self._dirty = True
683 self._dirty = True
671 self._dirty_tracked_set = True
684 self._dirty_tracked_set = True
672 return ret
685 return ret
673
686
674 @requires_not_changing_parents
687 @requires_not_changing_parents
675 def set_clean(self, filename, parentfiledata):
688 def set_clean(self, filename, parentfiledata):
676 """record that the current state of the file on disk is known to be clean"""
689 """record that the current state of the file on disk is known to be clean"""
677 self._dirty = True
690 self._dirty = True
678 if not self._map[filename].tracked:
691 if not self._map[filename].tracked:
679 self._check_new_tracked_filename(filename)
692 self._check_new_tracked_filename(filename)
680 (mode, size, mtime) = parentfiledata
693 (mode, size, mtime) = parentfiledata
681 self._map.set_clean(filename, mode, size, mtime)
694 self._map.set_clean(filename, mode, size, mtime)
682
695
683 @requires_not_changing_parents
696 @requires_not_changing_parents
684 def set_possibly_dirty(self, filename):
697 def set_possibly_dirty(self, filename):
685 """record that the current state of the file on disk is unknown"""
698 """record that the current state of the file on disk is unknown"""
686 self._dirty = True
699 self._dirty = True
687 self._map.set_possibly_dirty(filename)
700 self._map.set_possibly_dirty(filename)
688
701
689 @requires_changing_parents
702 @requires_changing_parents
690 def update_file_p1(
703 def update_file_p1(
691 self,
704 self,
692 filename,
705 filename,
693 p1_tracked,
706 p1_tracked,
694 ):
707 ):
695 """Set a file as tracked in the parent (or not)
708 """Set a file as tracked in the parent (or not)
696
709
697 This is to be called when adjust the dirstate to a new parent after an history
710 This is to be called when adjust the dirstate to a new parent after an history
698 rewriting operation.
711 rewriting operation.
699
712
700 It should not be called during a merge (p2 != nullid) and only within
713 It should not be called during a merge (p2 != nullid) and only within
701 a `with dirstate.changing_parents(repo):` context.
714 a `with dirstate.changing_parents(repo):` context.
702 """
715 """
703 if self.in_merge:
716 if self.in_merge:
704 msg = b'update_file_reference should not be called when merging'
717 msg = b'update_file_reference should not be called when merging'
705 raise error.ProgrammingError(msg)
718 raise error.ProgrammingError(msg)
706 entry = self._map.get(filename)
719 entry = self._map.get(filename)
707 if entry is None:
720 if entry is None:
708 wc_tracked = False
721 wc_tracked = False
709 else:
722 else:
710 wc_tracked = entry.tracked
723 wc_tracked = entry.tracked
711 if not (p1_tracked or wc_tracked):
724 if not (p1_tracked or wc_tracked):
712 # the file is no longer relevant to anyone
725 # the file is no longer relevant to anyone
713 if self._map.get(filename) is not None:
726 if self._map.get(filename) is not None:
714 self._map.reset_state(filename)
727 self._map.reset_state(filename)
715 self._dirty = True
728 self._dirty = True
716 elif (not p1_tracked) and wc_tracked:
729 elif (not p1_tracked) and wc_tracked:
717 if entry is not None and entry.added:
730 if entry is not None and entry.added:
718 return # avoid dropping copy information (maybe?)
731 return # avoid dropping copy information (maybe?)
719
732
720 self._map.reset_state(
733 self._map.reset_state(
721 filename,
734 filename,
722 wc_tracked,
735 wc_tracked,
723 p1_tracked,
736 p1_tracked,
724 # the underlying reference might have changed, we will have to
737 # the underlying reference might have changed, we will have to
725 # check it.
738 # check it.
726 has_meaningful_mtime=False,
739 has_meaningful_mtime=False,
727 )
740 )
728
741
729 @requires_changing_parents
742 @requires_changing_parents
730 def update_file(
743 def update_file(
731 self,
744 self,
732 filename,
745 filename,
733 wc_tracked,
746 wc_tracked,
734 p1_tracked,
747 p1_tracked,
735 p2_info=False,
748 p2_info=False,
736 possibly_dirty=False,
749 possibly_dirty=False,
737 parentfiledata=None,
750 parentfiledata=None,
738 ):
751 ):
739 """update the information about a file in the dirstate
752 """update the information about a file in the dirstate
740
753
741 This is to be called when the direstates parent changes to keep track
754 This is to be called when the direstates parent changes to keep track
742 of what is the file situation in regards to the working copy and its parent.
755 of what is the file situation in regards to the working copy and its parent.
743
756
744 This function must be called within a `dirstate.changing_parents` context.
757 This function must be called within a `dirstate.changing_parents` context.
745
758
746 note: the API is at an early stage and we might need to adjust it
759 note: the API is at an early stage and we might need to adjust it
747 depending of what information ends up being relevant and useful to
760 depending of what information ends up being relevant and useful to
748 other processing.
761 other processing.
749 """
762 """
750 self._update_file(
763 self._update_file(
751 filename=filename,
764 filename=filename,
752 wc_tracked=wc_tracked,
765 wc_tracked=wc_tracked,
753 p1_tracked=p1_tracked,
766 p1_tracked=p1_tracked,
754 p2_info=p2_info,
767 p2_info=p2_info,
755 possibly_dirty=possibly_dirty,
768 possibly_dirty=possibly_dirty,
756 parentfiledata=parentfiledata,
769 parentfiledata=parentfiledata,
757 )
770 )
758
771
759 # XXX since this make the dirstate dirty, we should enforce that it is done
772 # XXX since this make the dirstate dirty, we should enforce that it is done
760 # withing an appropriate change-context that scope the change and ensure it
773 # withing an appropriate change-context that scope the change and ensure it
761 # eventually get written on disk (or rolled back)
774 # eventually get written on disk (or rolled back)
762 def hacky_extension_update_file(self, *args, **kwargs):
775 def hacky_extension_update_file(self, *args, **kwargs):
763 """NEVER USE THIS, YOU DO NOT NEED IT
776 """NEVER USE THIS, YOU DO NOT NEED IT
764
777
765 This function is a variant of "update_file" to be called by a small set
778 This function is a variant of "update_file" to be called by a small set
766 of extensions, it also adjust the internal state of file, but can be
779 of extensions, it also adjust the internal state of file, but can be
767 called outside an `changing_parents` context.
780 called outside an `changing_parents` context.
768
781
769 A very small number of extension meddle with the working copy content
782 A very small number of extension meddle with the working copy content
770 in a way that requires to adjust the dirstate accordingly. At the time
783 in a way that requires to adjust the dirstate accordingly. At the time
771 this command is written they are :
784 this command is written they are :
772 - keyword,
785 - keyword,
773 - largefile,
786 - largefile,
774 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
787 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
775
788
776 This function could probably be replaced by more semantic one (like
789 This function could probably be replaced by more semantic one (like
777 "adjust expected size" or "always revalidate file content", etc)
790 "adjust expected size" or "always revalidate file content", etc)
778 however at the time where this is writen, this is too much of a detour
791 however at the time where this is writen, this is too much of a detour
779 to be considered.
792 to be considered.
780 """
793 """
781 self._update_file(
794 self._update_file(
782 *args,
795 *args,
783 **kwargs,
796 **kwargs,
784 )
797 )
785
798
786 def _update_file(
799 def _update_file(
787 self,
800 self,
788 filename,
801 filename,
789 wc_tracked,
802 wc_tracked,
790 p1_tracked,
803 p1_tracked,
791 p2_info=False,
804 p2_info=False,
792 possibly_dirty=False,
805 possibly_dirty=False,
793 parentfiledata=None,
806 parentfiledata=None,
794 ):
807 ):
795
808
796 # note: I do not think we need to double check name clash here since we
809 # note: I do not think we need to double check name clash here since we
797 # are in a update/merge case that should already have taken care of
810 # are in a update/merge case that should already have taken care of
798 # this. The test agrees
811 # this. The test agrees
799
812
800 self._dirty = True
813 self._dirty = True
801 old_entry = self._map.get(filename)
814 old_entry = self._map.get(filename)
802 if old_entry is None:
815 if old_entry is None:
803 prev_tracked = False
816 prev_tracked = False
804 else:
817 else:
805 prev_tracked = old_entry.tracked
818 prev_tracked = old_entry.tracked
806 if prev_tracked != wc_tracked:
819 if prev_tracked != wc_tracked:
807 self._dirty_tracked_set = True
820 self._dirty_tracked_set = True
808
821
809 self._map.reset_state(
822 self._map.reset_state(
810 filename,
823 filename,
811 wc_tracked,
824 wc_tracked,
812 p1_tracked,
825 p1_tracked,
813 p2_info=p2_info,
826 p2_info=p2_info,
814 has_meaningful_mtime=not possibly_dirty,
827 has_meaningful_mtime=not possibly_dirty,
815 parentfiledata=parentfiledata,
828 parentfiledata=parentfiledata,
816 )
829 )
817
830
818 def _check_new_tracked_filename(self, filename):
831 def _check_new_tracked_filename(self, filename):
819 scmutil.checkfilename(filename)
832 scmutil.checkfilename(filename)
820 if self._map.hastrackeddir(filename):
833 if self._map.hastrackeddir(filename):
821 msg = _(b'directory %r already in dirstate')
834 msg = _(b'directory %r already in dirstate')
822 msg %= pycompat.bytestr(filename)
835 msg %= pycompat.bytestr(filename)
823 raise error.Abort(msg)
836 raise error.Abort(msg)
824 # shadows
837 # shadows
825 for d in pathutil.finddirs(filename):
838 for d in pathutil.finddirs(filename):
826 if self._map.hastrackeddir(d):
839 if self._map.hastrackeddir(d):
827 break
840 break
828 entry = self._map.get(d)
841 entry = self._map.get(d)
829 if entry is not None and not entry.removed:
842 if entry is not None and not entry.removed:
830 msg = _(b'file %r in dirstate clashes with %r')
843 msg = _(b'file %r in dirstate clashes with %r')
831 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
844 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
832 raise error.Abort(msg)
845 raise error.Abort(msg)
833 self._check_sparse(filename)
846 self._check_sparse(filename)
834
847
835 def _check_sparse(self, filename):
848 def _check_sparse(self, filename):
836 """Check that a filename is inside the sparse profile"""
849 """Check that a filename is inside the sparse profile"""
837 sparsematch = self._sparsematcher
850 sparsematch = self._sparsematcher
838 if sparsematch is not None and not sparsematch.always():
851 if sparsematch is not None and not sparsematch.always():
839 if not sparsematch(filename):
852 if not sparsematch(filename):
840 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
853 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
841 hint = _(
854 hint = _(
842 b'include file with `hg debugsparse --include <pattern>` or use '
855 b'include file with `hg debugsparse --include <pattern>` or use '
843 b'`hg add -s <file>` to include file directory while adding'
856 b'`hg add -s <file>` to include file directory while adding'
844 )
857 )
845 raise error.Abort(msg % filename, hint=hint)
858 raise error.Abort(msg % filename, hint=hint)
846
859
847 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
860 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
848 if exists is None:
861 if exists is None:
849 exists = os.path.lexists(os.path.join(self._root, path))
862 exists = os.path.lexists(os.path.join(self._root, path))
850 if not exists:
863 if not exists:
851 # Maybe a path component exists
864 # Maybe a path component exists
852 if not ignoremissing and b'/' in path:
865 if not ignoremissing and b'/' in path:
853 d, f = path.rsplit(b'/', 1)
866 d, f = path.rsplit(b'/', 1)
854 d = self._normalize(d, False, ignoremissing, None)
867 d = self._normalize(d, False, ignoremissing, None)
855 folded = d + b"/" + f
868 folded = d + b"/" + f
856 else:
869 else:
857 # No path components, preserve original case
870 # No path components, preserve original case
858 folded = path
871 folded = path
859 else:
872 else:
860 # recursively normalize leading directory components
873 # recursively normalize leading directory components
861 # against dirstate
874 # against dirstate
862 if b'/' in normed:
875 if b'/' in normed:
863 d, f = normed.rsplit(b'/', 1)
876 d, f = normed.rsplit(b'/', 1)
864 d = self._normalize(d, False, ignoremissing, True)
877 d = self._normalize(d, False, ignoremissing, True)
865 r = self._root + b"/" + d
878 r = self._root + b"/" + d
866 folded = d + b"/" + util.fspath(f, r)
879 folded = d + b"/" + util.fspath(f, r)
867 else:
880 else:
868 folded = util.fspath(normed, self._root)
881 folded = util.fspath(normed, self._root)
869 storemap[normed] = folded
882 storemap[normed] = folded
870
883
871 return folded
884 return folded
872
885
873 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
886 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
874 normed = util.normcase(path)
887 normed = util.normcase(path)
875 folded = self._map.filefoldmap.get(normed, None)
888 folded = self._map.filefoldmap.get(normed, None)
876 if folded is None:
889 if folded is None:
877 if isknown:
890 if isknown:
878 folded = path
891 folded = path
879 else:
892 else:
880 folded = self._discoverpath(
893 folded = self._discoverpath(
881 path, normed, ignoremissing, exists, self._map.filefoldmap
894 path, normed, ignoremissing, exists, self._map.filefoldmap
882 )
895 )
883 return folded
896 return folded
884
897
885 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
898 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
886 normed = util.normcase(path)
899 normed = util.normcase(path)
887 folded = self._map.filefoldmap.get(normed, None)
900 folded = self._map.filefoldmap.get(normed, None)
888 if folded is None:
901 if folded is None:
889 folded = self._map.dirfoldmap.get(normed, None)
902 folded = self._map.dirfoldmap.get(normed, None)
890 if folded is None:
903 if folded is None:
891 if isknown:
904 if isknown:
892 folded = path
905 folded = path
893 else:
906 else:
894 # store discovered result in dirfoldmap so that future
907 # store discovered result in dirfoldmap so that future
895 # normalizefile calls don't start matching directories
908 # normalizefile calls don't start matching directories
896 folded = self._discoverpath(
909 folded = self._discoverpath(
897 path, normed, ignoremissing, exists, self._map.dirfoldmap
910 path, normed, ignoremissing, exists, self._map.dirfoldmap
898 )
911 )
899 return folded
912 return folded
900
913
901 def normalize(self, path, isknown=False, ignoremissing=False):
914 def normalize(self, path, isknown=False, ignoremissing=False):
902 """
915 """
903 normalize the case of a pathname when on a casefolding filesystem
916 normalize the case of a pathname when on a casefolding filesystem
904
917
905 isknown specifies whether the filename came from walking the
918 isknown specifies whether the filename came from walking the
906 disk, to avoid extra filesystem access.
919 disk, to avoid extra filesystem access.
907
920
908 If ignoremissing is True, missing path are returned
921 If ignoremissing is True, missing path are returned
909 unchanged. Otherwise, we try harder to normalize possibly
922 unchanged. Otherwise, we try harder to normalize possibly
910 existing path components.
923 existing path components.
911
924
912 The normalized case is determined based on the following precedence:
925 The normalized case is determined based on the following precedence:
913
926
914 - version of name already stored in the dirstate
927 - version of name already stored in the dirstate
915 - version of name stored on disk
928 - version of name stored on disk
916 - version provided via command arguments
929 - version provided via command arguments
917 """
930 """
918
931
919 if self._checkcase:
932 if self._checkcase:
920 return self._normalize(path, isknown, ignoremissing)
933 return self._normalize(path, isknown, ignoremissing)
921 return path
934 return path
922
935
923 # XXX this method is barely used, as a result:
936 # XXX this method is barely used, as a result:
924 # - its semantic is unclear
937 # - its semantic is unclear
925 # - do we really needs it ?
938 # - do we really needs it ?
926 @requires_changing_parents
939 @requires_changing_parents
927 def clear(self):
940 def clear(self):
928 self._map.clear()
941 self._map.clear()
929 self._dirty = True
942 self._dirty = True
930
943
931 @requires_changing_parents
944 @requires_changing_parents
932 def rebuild(self, parent, allfiles, changedfiles=None):
945 def rebuild(self, parent, allfiles, changedfiles=None):
933 matcher = self._sparsematcher
946 matcher = self._sparsematcher
934 if matcher is not None and not matcher.always():
947 if matcher is not None and not matcher.always():
935 # should not add non-matching files
948 # should not add non-matching files
936 allfiles = [f for f in allfiles if matcher(f)]
949 allfiles = [f for f in allfiles if matcher(f)]
937 if changedfiles:
950 if changedfiles:
938 changedfiles = [f for f in changedfiles if matcher(f)]
951 changedfiles = [f for f in changedfiles if matcher(f)]
939
952
940 if changedfiles is not None:
953 if changedfiles is not None:
941 # these files will be deleted from the dirstate when they are
954 # these files will be deleted from the dirstate when they are
942 # not found to be in allfiles
955 # not found to be in allfiles
943 dirstatefilestoremove = {f for f in self if not matcher(f)}
956 dirstatefilestoremove = {f for f in self if not matcher(f)}
944 changedfiles = dirstatefilestoremove.union(changedfiles)
957 changedfiles = dirstatefilestoremove.union(changedfiles)
945
958
946 if changedfiles is None:
959 if changedfiles is None:
947 # Rebuild entire dirstate
960 # Rebuild entire dirstate
948 to_lookup = allfiles
961 to_lookup = allfiles
949 to_drop = []
962 to_drop = []
950 self.clear()
963 self.clear()
951 elif len(changedfiles) < 10:
964 elif len(changedfiles) < 10:
952 # Avoid turning allfiles into a set, which can be expensive if it's
965 # Avoid turning allfiles into a set, which can be expensive if it's
953 # large.
966 # large.
954 to_lookup = []
967 to_lookup = []
955 to_drop = []
968 to_drop = []
956 for f in changedfiles:
969 for f in changedfiles:
957 if f in allfiles:
970 if f in allfiles:
958 to_lookup.append(f)
971 to_lookup.append(f)
959 else:
972 else:
960 to_drop.append(f)
973 to_drop.append(f)
961 else:
974 else:
962 changedfilesset = set(changedfiles)
975 changedfilesset = set(changedfiles)
963 to_lookup = changedfilesset & set(allfiles)
976 to_lookup = changedfilesset & set(allfiles)
964 to_drop = changedfilesset - to_lookup
977 to_drop = changedfilesset - to_lookup
965
978
966 if self._origpl is None:
979 if self._origpl is None:
967 self._origpl = self._pl
980 self._origpl = self._pl
968 self._map.setparents(parent, self._nodeconstants.nullid)
981 self._map.setparents(parent, self._nodeconstants.nullid)
969
982
970 for f in to_lookup:
983 for f in to_lookup:
971 if self.in_merge:
984 if self.in_merge:
972 self.set_tracked(f)
985 self.set_tracked(f)
973 else:
986 else:
974 self._map.reset_state(
987 self._map.reset_state(
975 f,
988 f,
976 wc_tracked=True,
989 wc_tracked=True,
977 p1_tracked=True,
990 p1_tracked=True,
978 )
991 )
979 for f in to_drop:
992 for f in to_drop:
980 self._map.reset_state(f)
993 self._map.reset_state(f)
981
994
982 self._dirty = True
995 self._dirty = True
983
996
984 def identity(self):
997 def identity(self):
985 """Return identity of dirstate itself to detect changing in storage
998 """Return identity of dirstate itself to detect changing in storage
986
999
987 If identity of previous dirstate is equal to this, writing
1000 If identity of previous dirstate is equal to this, writing
988 changes based on the former dirstate out can keep consistency.
1001 changes based on the former dirstate out can keep consistency.
989 """
1002 """
990 return self._map.identity
1003 return self._map.identity
991
1004
992 def write(self, tr):
1005 def write(self, tr):
993 if not self._dirty:
1006 if not self._dirty:
994 return
1007 return
995 # make sure we don't request a write of invalidated content
1008 # make sure we don't request a write of invalidated content
996 # XXX move before the dirty check once `unlock` stop calling `write`
1009 # XXX move before the dirty check once `unlock` stop calling `write`
997 assert not self._invalidated_context
1010 assert not self._invalidated_context
998
1011
999 write_key = self._use_tracked_hint and self._dirty_tracked_set
1012 write_key = self._use_tracked_hint and self._dirty_tracked_set
1000 if tr:
1013 if tr:
1001
1014
1002 def on_abort(tr):
1015 def on_abort(tr):
1003 self._attached_to_a_transaction = False
1016 self._attached_to_a_transaction = False
1004 self.invalidate()
1017 self.invalidate()
1005
1018
1006 # make sure we invalidate the current change on abort
1019 # make sure we invalidate the current change on abort
1007 if tr is not None:
1020 if tr is not None:
1008 tr.addabort(
1021 tr.addabort(
1009 b'dirstate-invalidate%s' % self._tr_key_suffix,
1022 b'dirstate-invalidate%s' % self._tr_key_suffix,
1010 on_abort,
1023 on_abort,
1011 )
1024 )
1012
1025
1013 self._attached_to_a_transaction = True
1026 self._attached_to_a_transaction = True
1014
1027
1015 def on_success(f):
1028 def on_success(f):
1016 self._attached_to_a_transaction = False
1029 self._attached_to_a_transaction = False
1017 self._writedirstate(tr, f),
1030 self._writedirstate(tr, f),
1018
1031
1019 # delay writing in-memory changes out
1032 # delay writing in-memory changes out
1020 tr.addfilegenerator(
1033 tr.addfilegenerator(
1021 b'dirstate-1-main%s' % self._tr_key_suffix,
1034 b'dirstate-1-main%s' % self._tr_key_suffix,
1022 (self._filename,),
1035 (self._filename,),
1023 on_success,
1036 on_success,
1024 location=b'plain',
1037 location=b'plain',
1025 post_finalize=True,
1038 post_finalize=True,
1026 )
1039 )
1027 if write_key:
1040 if write_key:
1028 tr.addfilegenerator(
1041 tr.addfilegenerator(
1029 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1042 b'dirstate-2-key-post%s' % self._tr_key_suffix,
1030 (self._filename_th,),
1043 (self._filename_th,),
1031 lambda f: self._write_tracked_hint(tr, f),
1044 lambda f: self._write_tracked_hint(tr, f),
1032 location=b'plain',
1045 location=b'plain',
1033 post_finalize=True,
1046 post_finalize=True,
1034 )
1047 )
1035 return
1048 return
1036
1049
1037 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1050 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
1038 with file(self._filename) as f:
1051 with file(self._filename) as f:
1039 self._writedirstate(tr, f)
1052 self._writedirstate(tr, f)
1040 if write_key:
1053 if write_key:
1041 # we update the key-file after writing to make sure reader have a
1054 # we update the key-file after writing to make sure reader have a
1042 # key that match the newly written content
1055 # key that match the newly written content
1043 with file(self._filename_th) as f:
1056 with file(self._filename_th) as f:
1044 self._write_tracked_hint(tr, f)
1057 self._write_tracked_hint(tr, f)
1045
1058
1046 def delete_tracked_hint(self):
1059 def delete_tracked_hint(self):
1047 """remove the tracked_hint file
1060 """remove the tracked_hint file
1048
1061
1049 To be used by format downgrades operation"""
1062 To be used by format downgrades operation"""
1050 self._opener.unlink(self._filename_th)
1063 self._opener.unlink(self._filename_th)
1051 self._use_tracked_hint = False
1064 self._use_tracked_hint = False
1052
1065
1053 def addparentchangecallback(self, category, callback):
1066 def addparentchangecallback(self, category, callback):
1054 """add a callback to be called when the wd parents are changed
1067 """add a callback to be called when the wd parents are changed
1055
1068
1056 Callback will be called with the following arguments:
1069 Callback will be called with the following arguments:
1057 dirstate, (oldp1, oldp2), (newp1, newp2)
1070 dirstate, (oldp1, oldp2), (newp1, newp2)
1058
1071
1059 Category is a unique identifier to allow overwriting an old callback
1072 Category is a unique identifier to allow overwriting an old callback
1060 with a newer callback.
1073 with a newer callback.
1061 """
1074 """
1062 self._plchangecallbacks[category] = callback
1075 self._plchangecallbacks[category] = callback
1063
1076
1064 def _writedirstate(self, tr, st):
1077 def _writedirstate(self, tr, st):
1065 # make sure we don't write invalidated content
1078 # make sure we don't write invalidated content
1066 assert not self._invalidated_context
1079 assert not self._invalidated_context
1067 # notify callbacks about parents change
1080 # notify callbacks about parents change
1068 if self._origpl is not None and self._origpl != self._pl:
1081 if self._origpl is not None and self._origpl != self._pl:
1069 for c, callback in sorted(self._plchangecallbacks.items()):
1082 for c, callback in sorted(self._plchangecallbacks.items()):
1070 callback(self, self._origpl, self._pl)
1083 callback(self, self._origpl, self._pl)
1071 self._origpl = None
1084 self._origpl = None
1072 self._map.write(tr, st)
1085 self._map.write(tr, st)
1073 self._dirty = False
1086 self._dirty = False
1074 self._dirty_tracked_set = False
1087 self._dirty_tracked_set = False
1075
1088
1076 def _write_tracked_hint(self, tr, f):
1089 def _write_tracked_hint(self, tr, f):
1077 key = node.hex(uuid.uuid4().bytes)
1090 key = node.hex(uuid.uuid4().bytes)
1078 f.write(b"1\n%s\n" % key) # 1 is the format version
1091 f.write(b"1\n%s\n" % key) # 1 is the format version
1079
1092
1080 def _dirignore(self, f):
1093 def _dirignore(self, f):
1081 if self._ignore(f):
1094 if self._ignore(f):
1082 return True
1095 return True
1083 for p in pathutil.finddirs(f):
1096 for p in pathutil.finddirs(f):
1084 if self._ignore(p):
1097 if self._ignore(p):
1085 return True
1098 return True
1086 return False
1099 return False
1087
1100
1088 def _ignorefiles(self):
1101 def _ignorefiles(self):
1089 files = []
1102 files = []
1090 if os.path.exists(self._join(b'.hgignore')):
1103 if os.path.exists(self._join(b'.hgignore')):
1091 files.append(self._join(b'.hgignore'))
1104 files.append(self._join(b'.hgignore'))
1092 for name, path in self._ui.configitems(b"ui"):
1105 for name, path in self._ui.configitems(b"ui"):
1093 if name == b'ignore' or name.startswith(b'ignore.'):
1106 if name == b'ignore' or name.startswith(b'ignore.'):
1094 # we need to use os.path.join here rather than self._join
1107 # we need to use os.path.join here rather than self._join
1095 # because path is arbitrary and user-specified
1108 # because path is arbitrary and user-specified
1096 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1109 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1097 return files
1110 return files
1098
1111
1099 def _ignorefileandline(self, f):
1112 def _ignorefileandline(self, f):
1100 files = collections.deque(self._ignorefiles())
1113 files = collections.deque(self._ignorefiles())
1101 visited = set()
1114 visited = set()
1102 while files:
1115 while files:
1103 i = files.popleft()
1116 i = files.popleft()
1104 patterns = matchmod.readpatternfile(
1117 patterns = matchmod.readpatternfile(
1105 i, self._ui.warn, sourceinfo=True
1118 i, self._ui.warn, sourceinfo=True
1106 )
1119 )
1107 for pattern, lineno, line in patterns:
1120 for pattern, lineno, line in patterns:
1108 kind, p = matchmod._patsplit(pattern, b'glob')
1121 kind, p = matchmod._patsplit(pattern, b'glob')
1109 if kind == b"subinclude":
1122 if kind == b"subinclude":
1110 if p not in visited:
1123 if p not in visited:
1111 files.append(p)
1124 files.append(p)
1112 continue
1125 continue
1113 m = matchmod.match(
1126 m = matchmod.match(
1114 self._root, b'', [], [pattern], warn=self._ui.warn
1127 self._root, b'', [], [pattern], warn=self._ui.warn
1115 )
1128 )
1116 if m(f):
1129 if m(f):
1117 return (i, lineno, line)
1130 return (i, lineno, line)
1118 visited.add(i)
1131 visited.add(i)
1119 return (None, -1, b"")
1132 return (None, -1, b"")
1120
1133
1121 def _walkexplicit(self, match, subrepos):
1134 def _walkexplicit(self, match, subrepos):
1122 """Get stat data about the files explicitly specified by match.
1135 """Get stat data about the files explicitly specified by match.
1123
1136
1124 Return a triple (results, dirsfound, dirsnotfound).
1137 Return a triple (results, dirsfound, dirsnotfound).
1125 - results is a mapping from filename to stat result. It also contains
1138 - results is a mapping from filename to stat result. It also contains
1126 listings mapping subrepos and .hg to None.
1139 listings mapping subrepos and .hg to None.
1127 - dirsfound is a list of files found to be directories.
1140 - dirsfound is a list of files found to be directories.
1128 - dirsnotfound is a list of files that the dirstate thinks are
1141 - dirsnotfound is a list of files that the dirstate thinks are
1129 directories and that were not found."""
1142 directories and that were not found."""
1130
1143
1131 def badtype(mode):
1144 def badtype(mode):
1132 kind = _(b'unknown')
1145 kind = _(b'unknown')
1133 if stat.S_ISCHR(mode):
1146 if stat.S_ISCHR(mode):
1134 kind = _(b'character device')
1147 kind = _(b'character device')
1135 elif stat.S_ISBLK(mode):
1148 elif stat.S_ISBLK(mode):
1136 kind = _(b'block device')
1149 kind = _(b'block device')
1137 elif stat.S_ISFIFO(mode):
1150 elif stat.S_ISFIFO(mode):
1138 kind = _(b'fifo')
1151 kind = _(b'fifo')
1139 elif stat.S_ISSOCK(mode):
1152 elif stat.S_ISSOCK(mode):
1140 kind = _(b'socket')
1153 kind = _(b'socket')
1141 elif stat.S_ISDIR(mode):
1154 elif stat.S_ISDIR(mode):
1142 kind = _(b'directory')
1155 kind = _(b'directory')
1143 return _(b'unsupported file type (type is %s)') % kind
1156 return _(b'unsupported file type (type is %s)') % kind
1144
1157
1145 badfn = match.bad
1158 badfn = match.bad
1146 dmap = self._map
1159 dmap = self._map
1147 lstat = os.lstat
1160 lstat = os.lstat
1148 getkind = stat.S_IFMT
1161 getkind = stat.S_IFMT
1149 dirkind = stat.S_IFDIR
1162 dirkind = stat.S_IFDIR
1150 regkind = stat.S_IFREG
1163 regkind = stat.S_IFREG
1151 lnkkind = stat.S_IFLNK
1164 lnkkind = stat.S_IFLNK
1152 join = self._join
1165 join = self._join
1153 dirsfound = []
1166 dirsfound = []
1154 foundadd = dirsfound.append
1167 foundadd = dirsfound.append
1155 dirsnotfound = []
1168 dirsnotfound = []
1156 notfoundadd = dirsnotfound.append
1169 notfoundadd = dirsnotfound.append
1157
1170
1158 if not match.isexact() and self._checkcase:
1171 if not match.isexact() and self._checkcase:
1159 normalize = self._normalize
1172 normalize = self._normalize
1160 else:
1173 else:
1161 normalize = None
1174 normalize = None
1162
1175
1163 files = sorted(match.files())
1176 files = sorted(match.files())
1164 subrepos.sort()
1177 subrepos.sort()
1165 i, j = 0, 0
1178 i, j = 0, 0
1166 while i < len(files) and j < len(subrepos):
1179 while i < len(files) and j < len(subrepos):
1167 subpath = subrepos[j] + b"/"
1180 subpath = subrepos[j] + b"/"
1168 if files[i] < subpath:
1181 if files[i] < subpath:
1169 i += 1
1182 i += 1
1170 continue
1183 continue
1171 while i < len(files) and files[i].startswith(subpath):
1184 while i < len(files) and files[i].startswith(subpath):
1172 del files[i]
1185 del files[i]
1173 j += 1
1186 j += 1
1174
1187
1175 if not files or b'' in files:
1188 if not files or b'' in files:
1176 files = [b'']
1189 files = [b'']
1177 # constructing the foldmap is expensive, so don't do it for the
1190 # constructing the foldmap is expensive, so don't do it for the
1178 # common case where files is ['']
1191 # common case where files is ['']
1179 normalize = None
1192 normalize = None
1180 results = dict.fromkeys(subrepos)
1193 results = dict.fromkeys(subrepos)
1181 results[b'.hg'] = None
1194 results[b'.hg'] = None
1182
1195
1183 for ff in files:
1196 for ff in files:
1184 if normalize:
1197 if normalize:
1185 nf = normalize(ff, False, True)
1198 nf = normalize(ff, False, True)
1186 else:
1199 else:
1187 nf = ff
1200 nf = ff
1188 if nf in results:
1201 if nf in results:
1189 continue
1202 continue
1190
1203
1191 try:
1204 try:
1192 st = lstat(join(nf))
1205 st = lstat(join(nf))
1193 kind = getkind(st.st_mode)
1206 kind = getkind(st.st_mode)
1194 if kind == dirkind:
1207 if kind == dirkind:
1195 if nf in dmap:
1208 if nf in dmap:
1196 # file replaced by dir on disk but still in dirstate
1209 # file replaced by dir on disk but still in dirstate
1197 results[nf] = None
1210 results[nf] = None
1198 foundadd((nf, ff))
1211 foundadd((nf, ff))
1199 elif kind == regkind or kind == lnkkind:
1212 elif kind == regkind or kind == lnkkind:
1200 results[nf] = st
1213 results[nf] = st
1201 else:
1214 else:
1202 badfn(ff, badtype(kind))
1215 badfn(ff, badtype(kind))
1203 if nf in dmap:
1216 if nf in dmap:
1204 results[nf] = None
1217 results[nf] = None
1205 except (OSError) as inst:
1218 except (OSError) as inst:
1206 # nf not found on disk - it is dirstate only
1219 # nf not found on disk - it is dirstate only
1207 if nf in dmap: # does it exactly match a missing file?
1220 if nf in dmap: # does it exactly match a missing file?
1208 results[nf] = None
1221 results[nf] = None
1209 else: # does it match a missing directory?
1222 else: # does it match a missing directory?
1210 if self._map.hasdir(nf):
1223 if self._map.hasdir(nf):
1211 notfoundadd(nf)
1224 notfoundadd(nf)
1212 else:
1225 else:
1213 badfn(ff, encoding.strtolocal(inst.strerror))
1226 badfn(ff, encoding.strtolocal(inst.strerror))
1214
1227
1215 # match.files() may contain explicitly-specified paths that shouldn't
1228 # match.files() may contain explicitly-specified paths that shouldn't
1216 # be taken; drop them from the list of files found. dirsfound/notfound
1229 # be taken; drop them from the list of files found. dirsfound/notfound
1217 # aren't filtered here because they will be tested later.
1230 # aren't filtered here because they will be tested later.
1218 if match.anypats():
1231 if match.anypats():
1219 for f in list(results):
1232 for f in list(results):
1220 if f == b'.hg' or f in subrepos:
1233 if f == b'.hg' or f in subrepos:
1221 # keep sentinel to disable further out-of-repo walks
1234 # keep sentinel to disable further out-of-repo walks
1222 continue
1235 continue
1223 if not match(f):
1236 if not match(f):
1224 del results[f]
1237 del results[f]
1225
1238
1226 # Case insensitive filesystems cannot rely on lstat() failing to detect
1239 # Case insensitive filesystems cannot rely on lstat() failing to detect
1227 # a case-only rename. Prune the stat object for any file that does not
1240 # a case-only rename. Prune the stat object for any file that does not
1228 # match the case in the filesystem, if there are multiple files that
1241 # match the case in the filesystem, if there are multiple files that
1229 # normalize to the same path.
1242 # normalize to the same path.
1230 if match.isexact() and self._checkcase:
1243 if match.isexact() and self._checkcase:
1231 normed = {}
1244 normed = {}
1232
1245
1233 for f, st in results.items():
1246 for f, st in results.items():
1234 if st is None:
1247 if st is None:
1235 continue
1248 continue
1236
1249
1237 nc = util.normcase(f)
1250 nc = util.normcase(f)
1238 paths = normed.get(nc)
1251 paths = normed.get(nc)
1239
1252
1240 if paths is None:
1253 if paths is None:
1241 paths = set()
1254 paths = set()
1242 normed[nc] = paths
1255 normed[nc] = paths
1243
1256
1244 paths.add(f)
1257 paths.add(f)
1245
1258
1246 for norm, paths in normed.items():
1259 for norm, paths in normed.items():
1247 if len(paths) > 1:
1260 if len(paths) > 1:
1248 for path in paths:
1261 for path in paths:
1249 folded = self._discoverpath(
1262 folded = self._discoverpath(
1250 path, norm, True, None, self._map.dirfoldmap
1263 path, norm, True, None, self._map.dirfoldmap
1251 )
1264 )
1252 if path != folded:
1265 if path != folded:
1253 results[path] = None
1266 results[path] = None
1254
1267
1255 return results, dirsfound, dirsnotfound
1268 return results, dirsfound, dirsnotfound
1256
1269
1257 def walk(self, match, subrepos, unknown, ignored, full=True):
1270 def walk(self, match, subrepos, unknown, ignored, full=True):
1258 """
1271 """
1259 Walk recursively through the directory tree, finding all files
1272 Walk recursively through the directory tree, finding all files
1260 matched by match.
1273 matched by match.
1261
1274
1262 If full is False, maybe skip some known-clean files.
1275 If full is False, maybe skip some known-clean files.
1263
1276
1264 Return a dict mapping filename to stat-like object (either
1277 Return a dict mapping filename to stat-like object (either
1265 mercurial.osutil.stat instance or return value of os.stat()).
1278 mercurial.osutil.stat instance or return value of os.stat()).
1266
1279
1267 """
1280 """
1268 # full is a flag that extensions that hook into walk can use -- this
1281 # full is a flag that extensions that hook into walk can use -- this
1269 # implementation doesn't use it at all. This satisfies the contract
1282 # implementation doesn't use it at all. This satisfies the contract
1270 # because we only guarantee a "maybe".
1283 # because we only guarantee a "maybe".
1271
1284
1272 if ignored:
1285 if ignored:
1273 ignore = util.never
1286 ignore = util.never
1274 dirignore = util.never
1287 dirignore = util.never
1275 elif unknown:
1288 elif unknown:
1276 ignore = self._ignore
1289 ignore = self._ignore
1277 dirignore = self._dirignore
1290 dirignore = self._dirignore
1278 else:
1291 else:
1279 # if not unknown and not ignored, drop dir recursion and step 2
1292 # if not unknown and not ignored, drop dir recursion and step 2
1280 ignore = util.always
1293 ignore = util.always
1281 dirignore = util.always
1294 dirignore = util.always
1282
1295
1283 if self._sparsematchfn is not None:
1296 if self._sparsematchfn is not None:
1284 em = matchmod.exact(match.files())
1297 em = matchmod.exact(match.files())
1285 sm = matchmod.unionmatcher([self._sparsematcher, em])
1298 sm = matchmod.unionmatcher([self._sparsematcher, em])
1286 match = matchmod.intersectmatchers(match, sm)
1299 match = matchmod.intersectmatchers(match, sm)
1287
1300
1288 matchfn = match.matchfn
1301 matchfn = match.matchfn
1289 matchalways = match.always()
1302 matchalways = match.always()
1290 matchtdir = match.traversedir
1303 matchtdir = match.traversedir
1291 dmap = self._map
1304 dmap = self._map
1292 listdir = util.listdir
1305 listdir = util.listdir
1293 lstat = os.lstat
1306 lstat = os.lstat
1294 dirkind = stat.S_IFDIR
1307 dirkind = stat.S_IFDIR
1295 regkind = stat.S_IFREG
1308 regkind = stat.S_IFREG
1296 lnkkind = stat.S_IFLNK
1309 lnkkind = stat.S_IFLNK
1297 join = self._join
1310 join = self._join
1298
1311
1299 exact = skipstep3 = False
1312 exact = skipstep3 = False
1300 if match.isexact(): # match.exact
1313 if match.isexact(): # match.exact
1301 exact = True
1314 exact = True
1302 dirignore = util.always # skip step 2
1315 dirignore = util.always # skip step 2
1303 elif match.prefix(): # match.match, no patterns
1316 elif match.prefix(): # match.match, no patterns
1304 skipstep3 = True
1317 skipstep3 = True
1305
1318
1306 if not exact and self._checkcase:
1319 if not exact and self._checkcase:
1307 normalize = self._normalize
1320 normalize = self._normalize
1308 normalizefile = self._normalizefile
1321 normalizefile = self._normalizefile
1309 skipstep3 = False
1322 skipstep3 = False
1310 else:
1323 else:
1311 normalize = self._normalize
1324 normalize = self._normalize
1312 normalizefile = None
1325 normalizefile = None
1313
1326
1314 # step 1: find all explicit files
1327 # step 1: find all explicit files
1315 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1328 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1316 if matchtdir:
1329 if matchtdir:
1317 for d in work:
1330 for d in work:
1318 matchtdir(d[0])
1331 matchtdir(d[0])
1319 for d in dirsnotfound:
1332 for d in dirsnotfound:
1320 matchtdir(d)
1333 matchtdir(d)
1321
1334
1322 skipstep3 = skipstep3 and not (work or dirsnotfound)
1335 skipstep3 = skipstep3 and not (work or dirsnotfound)
1323 work = [d for d in work if not dirignore(d[0])]
1336 work = [d for d in work if not dirignore(d[0])]
1324
1337
1325 # step 2: visit subdirectories
1338 # step 2: visit subdirectories
1326 def traverse(work, alreadynormed):
1339 def traverse(work, alreadynormed):
1327 wadd = work.append
1340 wadd = work.append
1328 while work:
1341 while work:
1329 tracing.counter('dirstate.walk work', len(work))
1342 tracing.counter('dirstate.walk work', len(work))
1330 nd = work.pop()
1343 nd = work.pop()
1331 visitentries = match.visitchildrenset(nd)
1344 visitentries = match.visitchildrenset(nd)
1332 if not visitentries:
1345 if not visitentries:
1333 continue
1346 continue
1334 if visitentries == b'this' or visitentries == b'all':
1347 if visitentries == b'this' or visitentries == b'all':
1335 visitentries = None
1348 visitentries = None
1336 skip = None
1349 skip = None
1337 if nd != b'':
1350 if nd != b'':
1338 skip = b'.hg'
1351 skip = b'.hg'
1339 try:
1352 try:
1340 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1353 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1341 entries = listdir(join(nd), stat=True, skip=skip)
1354 entries = listdir(join(nd), stat=True, skip=skip)
1342 except (PermissionError, FileNotFoundError) as inst:
1355 except (PermissionError, FileNotFoundError) as inst:
1343 match.bad(
1356 match.bad(
1344 self.pathto(nd), encoding.strtolocal(inst.strerror)
1357 self.pathto(nd), encoding.strtolocal(inst.strerror)
1345 )
1358 )
1346 continue
1359 continue
1347 for f, kind, st in entries:
1360 for f, kind, st in entries:
1348 # Some matchers may return files in the visitentries set,
1361 # Some matchers may return files in the visitentries set,
1349 # instead of 'this', if the matcher explicitly mentions them
1362 # instead of 'this', if the matcher explicitly mentions them
1350 # and is not an exactmatcher. This is acceptable; we do not
1363 # and is not an exactmatcher. This is acceptable; we do not
1351 # make any hard assumptions about file-or-directory below
1364 # make any hard assumptions about file-or-directory below
1352 # based on the presence of `f` in visitentries. If
1365 # based on the presence of `f` in visitentries. If
1353 # visitchildrenset returned a set, we can always skip the
1366 # visitchildrenset returned a set, we can always skip the
1354 # entries *not* in the set it provided regardless of whether
1367 # entries *not* in the set it provided regardless of whether
1355 # they're actually a file or a directory.
1368 # they're actually a file or a directory.
1356 if visitentries and f not in visitentries:
1369 if visitentries and f not in visitentries:
1357 continue
1370 continue
1358 if normalizefile:
1371 if normalizefile:
1359 # even though f might be a directory, we're only
1372 # even though f might be a directory, we're only
1360 # interested in comparing it to files currently in the
1373 # interested in comparing it to files currently in the
1361 # dmap -- therefore normalizefile is enough
1374 # dmap -- therefore normalizefile is enough
1362 nf = normalizefile(
1375 nf = normalizefile(
1363 nd and (nd + b"/" + f) or f, True, True
1376 nd and (nd + b"/" + f) or f, True, True
1364 )
1377 )
1365 else:
1378 else:
1366 nf = nd and (nd + b"/" + f) or f
1379 nf = nd and (nd + b"/" + f) or f
1367 if nf not in results:
1380 if nf not in results:
1368 if kind == dirkind:
1381 if kind == dirkind:
1369 if not ignore(nf):
1382 if not ignore(nf):
1370 if matchtdir:
1383 if matchtdir:
1371 matchtdir(nf)
1384 matchtdir(nf)
1372 wadd(nf)
1385 wadd(nf)
1373 if nf in dmap and (matchalways or matchfn(nf)):
1386 if nf in dmap and (matchalways or matchfn(nf)):
1374 results[nf] = None
1387 results[nf] = None
1375 elif kind == regkind or kind == lnkkind:
1388 elif kind == regkind or kind == lnkkind:
1376 if nf in dmap:
1389 if nf in dmap:
1377 if matchalways or matchfn(nf):
1390 if matchalways or matchfn(nf):
1378 results[nf] = st
1391 results[nf] = st
1379 elif (matchalways or matchfn(nf)) and not ignore(
1392 elif (matchalways or matchfn(nf)) and not ignore(
1380 nf
1393 nf
1381 ):
1394 ):
1382 # unknown file -- normalize if necessary
1395 # unknown file -- normalize if necessary
1383 if not alreadynormed:
1396 if not alreadynormed:
1384 nf = normalize(nf, False, True)
1397 nf = normalize(nf, False, True)
1385 results[nf] = st
1398 results[nf] = st
1386 elif nf in dmap and (matchalways or matchfn(nf)):
1399 elif nf in dmap and (matchalways or matchfn(nf)):
1387 results[nf] = None
1400 results[nf] = None
1388
1401
1389 for nd, d in work:
1402 for nd, d in work:
1390 # alreadynormed means that processwork doesn't have to do any
1403 # alreadynormed means that processwork doesn't have to do any
1391 # expensive directory normalization
1404 # expensive directory normalization
1392 alreadynormed = not normalize or nd == d
1405 alreadynormed = not normalize or nd == d
1393 traverse([d], alreadynormed)
1406 traverse([d], alreadynormed)
1394
1407
1395 for s in subrepos:
1408 for s in subrepos:
1396 del results[s]
1409 del results[s]
1397 del results[b'.hg']
1410 del results[b'.hg']
1398
1411
1399 # step 3: visit remaining files from dmap
1412 # step 3: visit remaining files from dmap
1400 if not skipstep3 and not exact:
1413 if not skipstep3 and not exact:
1401 # If a dmap file is not in results yet, it was either
1414 # If a dmap file is not in results yet, it was either
1402 # a) not matching matchfn b) ignored, c) missing, or d) under a
1415 # a) not matching matchfn b) ignored, c) missing, or d) under a
1403 # symlink directory.
1416 # symlink directory.
1404 if not results and matchalways:
1417 if not results and matchalways:
1405 visit = [f for f in dmap]
1418 visit = [f for f in dmap]
1406 else:
1419 else:
1407 visit = [f for f in dmap if f not in results and matchfn(f)]
1420 visit = [f for f in dmap if f not in results and matchfn(f)]
1408 visit.sort()
1421 visit.sort()
1409
1422
1410 if unknown:
1423 if unknown:
1411 # unknown == True means we walked all dirs under the roots
1424 # unknown == True means we walked all dirs under the roots
1412 # that wasn't ignored, and everything that matched was stat'ed
1425 # that wasn't ignored, and everything that matched was stat'ed
1413 # and is already in results.
1426 # and is already in results.
1414 # The rest must thus be ignored or under a symlink.
1427 # The rest must thus be ignored or under a symlink.
1415 audit_path = pathutil.pathauditor(self._root, cached=True)
1428 audit_path = pathutil.pathauditor(self._root, cached=True)
1416
1429
1417 for nf in iter(visit):
1430 for nf in iter(visit):
1418 # If a stat for the same file was already added with a
1431 # If a stat for the same file was already added with a
1419 # different case, don't add one for this, since that would
1432 # different case, don't add one for this, since that would
1420 # make it appear as if the file exists under both names
1433 # make it appear as if the file exists under both names
1421 # on disk.
1434 # on disk.
1422 if (
1435 if (
1423 normalizefile
1436 normalizefile
1424 and normalizefile(nf, True, True) in results
1437 and normalizefile(nf, True, True) in results
1425 ):
1438 ):
1426 results[nf] = None
1439 results[nf] = None
1427 # Report ignored items in the dmap as long as they are not
1440 # Report ignored items in the dmap as long as they are not
1428 # under a symlink directory.
1441 # under a symlink directory.
1429 elif audit_path.check(nf):
1442 elif audit_path.check(nf):
1430 try:
1443 try:
1431 results[nf] = lstat(join(nf))
1444 results[nf] = lstat(join(nf))
1432 # file was just ignored, no links, and exists
1445 # file was just ignored, no links, and exists
1433 except OSError:
1446 except OSError:
1434 # file doesn't exist
1447 # file doesn't exist
1435 results[nf] = None
1448 results[nf] = None
1436 else:
1449 else:
1437 # It's either missing or under a symlink directory
1450 # It's either missing or under a symlink directory
1438 # which we in this case report as missing
1451 # which we in this case report as missing
1439 results[nf] = None
1452 results[nf] = None
1440 else:
1453 else:
1441 # We may not have walked the full directory tree above,
1454 # We may not have walked the full directory tree above,
1442 # so stat and check everything we missed.
1455 # so stat and check everything we missed.
1443 iv = iter(visit)
1456 iv = iter(visit)
1444 for st in util.statfiles([join(i) for i in visit]):
1457 for st in util.statfiles([join(i) for i in visit]):
1445 results[next(iv)] = st
1458 results[next(iv)] = st
1446 return results
1459 return results
1447
1460
1448 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1461 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1449 if self._sparsematchfn is not None:
1462 if self._sparsematchfn is not None:
1450 em = matchmod.exact(matcher.files())
1463 em = matchmod.exact(matcher.files())
1451 sm = matchmod.unionmatcher([self._sparsematcher, em])
1464 sm = matchmod.unionmatcher([self._sparsematcher, em])
1452 matcher = matchmod.intersectmatchers(matcher, sm)
1465 matcher = matchmod.intersectmatchers(matcher, sm)
1453 # Force Rayon (Rust parallelism library) to respect the number of
1466 # Force Rayon (Rust parallelism library) to respect the number of
1454 # workers. This is a temporary workaround until Rust code knows
1467 # workers. This is a temporary workaround until Rust code knows
1455 # how to read the config file.
1468 # how to read the config file.
1456 numcpus = self._ui.configint(b"worker", b"numcpus")
1469 numcpus = self._ui.configint(b"worker", b"numcpus")
1457 if numcpus is not None:
1470 if numcpus is not None:
1458 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1471 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1459
1472
1460 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1473 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1461 if not workers_enabled:
1474 if not workers_enabled:
1462 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1475 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1463
1476
1464 (
1477 (
1465 lookup,
1478 lookup,
1466 modified,
1479 modified,
1467 added,
1480 added,
1468 removed,
1481 removed,
1469 deleted,
1482 deleted,
1470 clean,
1483 clean,
1471 ignored,
1484 ignored,
1472 unknown,
1485 unknown,
1473 warnings,
1486 warnings,
1474 bad,
1487 bad,
1475 traversed,
1488 traversed,
1476 dirty,
1489 dirty,
1477 ) = rustmod.status(
1490 ) = rustmod.status(
1478 self._map._map,
1491 self._map._map,
1479 matcher,
1492 matcher,
1480 self._rootdir,
1493 self._rootdir,
1481 self._ignorefiles(),
1494 self._ignorefiles(),
1482 self._checkexec,
1495 self._checkexec,
1483 bool(list_clean),
1496 bool(list_clean),
1484 bool(list_ignored),
1497 bool(list_ignored),
1485 bool(list_unknown),
1498 bool(list_unknown),
1486 bool(matcher.traversedir),
1499 bool(matcher.traversedir),
1487 )
1500 )
1488
1501
1489 self._dirty |= dirty
1502 self._dirty |= dirty
1490
1503
1491 if matcher.traversedir:
1504 if matcher.traversedir:
1492 for dir in traversed:
1505 for dir in traversed:
1493 matcher.traversedir(dir)
1506 matcher.traversedir(dir)
1494
1507
1495 if self._ui.warn:
1508 if self._ui.warn:
1496 for item in warnings:
1509 for item in warnings:
1497 if isinstance(item, tuple):
1510 if isinstance(item, tuple):
1498 file_path, syntax = item
1511 file_path, syntax = item
1499 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1512 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1500 file_path,
1513 file_path,
1501 syntax,
1514 syntax,
1502 )
1515 )
1503 self._ui.warn(msg)
1516 self._ui.warn(msg)
1504 else:
1517 else:
1505 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1518 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1506 self._ui.warn(
1519 self._ui.warn(
1507 msg
1520 msg
1508 % (
1521 % (
1509 pathutil.canonpath(
1522 pathutil.canonpath(
1510 self._rootdir, self._rootdir, item
1523 self._rootdir, self._rootdir, item
1511 ),
1524 ),
1512 b"No such file or directory",
1525 b"No such file or directory",
1513 )
1526 )
1514 )
1527 )
1515
1528
1516 for fn, message in bad:
1529 for fn, message in bad:
1517 matcher.bad(fn, encoding.strtolocal(message))
1530 matcher.bad(fn, encoding.strtolocal(message))
1518
1531
1519 status = scmutil.status(
1532 status = scmutil.status(
1520 modified=modified,
1533 modified=modified,
1521 added=added,
1534 added=added,
1522 removed=removed,
1535 removed=removed,
1523 deleted=deleted,
1536 deleted=deleted,
1524 unknown=unknown,
1537 unknown=unknown,
1525 ignored=ignored,
1538 ignored=ignored,
1526 clean=clean,
1539 clean=clean,
1527 )
1540 )
1528 return (lookup, status)
1541 return (lookup, status)
1529
1542
1530 # XXX since this can make the dirstate dirty (through rust), we should
1543 # XXX since this can make the dirstate dirty (through rust), we should
1531 # enforce that it is done withing an appropriate change-context that scope
1544 # enforce that it is done withing an appropriate change-context that scope
1532 # the change and ensure it eventually get written on disk (or rolled back)
1545 # the change and ensure it eventually get written on disk (or rolled back)
1533 def status(self, match, subrepos, ignored, clean, unknown):
1546 def status(self, match, subrepos, ignored, clean, unknown):
1534 """Determine the status of the working copy relative to the
1547 """Determine the status of the working copy relative to the
1535 dirstate and return a pair of (unsure, status), where status is of type
1548 dirstate and return a pair of (unsure, status), where status is of type
1536 scmutil.status and:
1549 scmutil.status and:
1537
1550
1538 unsure:
1551 unsure:
1539 files that might have been modified since the dirstate was
1552 files that might have been modified since the dirstate was
1540 written, but need to be read to be sure (size is the same
1553 written, but need to be read to be sure (size is the same
1541 but mtime differs)
1554 but mtime differs)
1542 status.modified:
1555 status.modified:
1543 files that have definitely been modified since the dirstate
1556 files that have definitely been modified since the dirstate
1544 was written (different size or mode)
1557 was written (different size or mode)
1545 status.clean:
1558 status.clean:
1546 files that have definitely not been modified since the
1559 files that have definitely not been modified since the
1547 dirstate was written
1560 dirstate was written
1548 """
1561 """
1549 listignored, listclean, listunknown = ignored, clean, unknown
1562 listignored, listclean, listunknown = ignored, clean, unknown
1550 lookup, modified, added, unknown, ignored = [], [], [], [], []
1563 lookup, modified, added, unknown, ignored = [], [], [], [], []
1551 removed, deleted, clean = [], [], []
1564 removed, deleted, clean = [], [], []
1552
1565
1553 dmap = self._map
1566 dmap = self._map
1554 dmap.preload()
1567 dmap.preload()
1555
1568
1556 use_rust = True
1569 use_rust = True
1557
1570
1558 allowed_matchers = (
1571 allowed_matchers = (
1559 matchmod.alwaysmatcher,
1572 matchmod.alwaysmatcher,
1560 matchmod.differencematcher,
1573 matchmod.differencematcher,
1561 matchmod.exactmatcher,
1574 matchmod.exactmatcher,
1562 matchmod.includematcher,
1575 matchmod.includematcher,
1563 matchmod.intersectionmatcher,
1576 matchmod.intersectionmatcher,
1564 matchmod.nevermatcher,
1577 matchmod.nevermatcher,
1565 matchmod.unionmatcher,
1578 matchmod.unionmatcher,
1566 )
1579 )
1567
1580
1568 if rustmod is None:
1581 if rustmod is None:
1569 use_rust = False
1582 use_rust = False
1570 elif self._checkcase:
1583 elif self._checkcase:
1571 # Case-insensitive filesystems are not handled yet
1584 # Case-insensitive filesystems are not handled yet
1572 use_rust = False
1585 use_rust = False
1573 elif subrepos:
1586 elif subrepos:
1574 use_rust = False
1587 use_rust = False
1575 elif not isinstance(match, allowed_matchers):
1588 elif not isinstance(match, allowed_matchers):
1576 # Some matchers have yet to be implemented
1589 # Some matchers have yet to be implemented
1577 use_rust = False
1590 use_rust = False
1578
1591
1579 # Get the time from the filesystem so we can disambiguate files that
1592 # Get the time from the filesystem so we can disambiguate files that
1580 # appear modified in the present or future.
1593 # appear modified in the present or future.
1581 try:
1594 try:
1582 mtime_boundary = timestamp.get_fs_now(self._opener)
1595 mtime_boundary = timestamp.get_fs_now(self._opener)
1583 except OSError:
1596 except OSError:
1584 # In largefiles or readonly context
1597 # In largefiles or readonly context
1585 mtime_boundary = None
1598 mtime_boundary = None
1586
1599
1587 if use_rust:
1600 if use_rust:
1588 try:
1601 try:
1589 res = self._rust_status(
1602 res = self._rust_status(
1590 match, listclean, listignored, listunknown
1603 match, listclean, listignored, listunknown
1591 )
1604 )
1592 return res + (mtime_boundary,)
1605 return res + (mtime_boundary,)
1593 except rustmod.FallbackError:
1606 except rustmod.FallbackError:
1594 pass
1607 pass
1595
1608
1596 def noop(f):
1609 def noop(f):
1597 pass
1610 pass
1598
1611
1599 dcontains = dmap.__contains__
1612 dcontains = dmap.__contains__
1600 dget = dmap.__getitem__
1613 dget = dmap.__getitem__
1601 ladd = lookup.append # aka "unsure"
1614 ladd = lookup.append # aka "unsure"
1602 madd = modified.append
1615 madd = modified.append
1603 aadd = added.append
1616 aadd = added.append
1604 uadd = unknown.append if listunknown else noop
1617 uadd = unknown.append if listunknown else noop
1605 iadd = ignored.append if listignored else noop
1618 iadd = ignored.append if listignored else noop
1606 radd = removed.append
1619 radd = removed.append
1607 dadd = deleted.append
1620 dadd = deleted.append
1608 cadd = clean.append if listclean else noop
1621 cadd = clean.append if listclean else noop
1609 mexact = match.exact
1622 mexact = match.exact
1610 dirignore = self._dirignore
1623 dirignore = self._dirignore
1611 checkexec = self._checkexec
1624 checkexec = self._checkexec
1612 checklink = self._checklink
1625 checklink = self._checklink
1613 copymap = self._map.copymap
1626 copymap = self._map.copymap
1614
1627
1615 # We need to do full walks when either
1628 # We need to do full walks when either
1616 # - we're listing all clean files, or
1629 # - we're listing all clean files, or
1617 # - match.traversedir does something, because match.traversedir should
1630 # - match.traversedir does something, because match.traversedir should
1618 # be called for every dir in the working dir
1631 # be called for every dir in the working dir
1619 full = listclean or match.traversedir is not None
1632 full = listclean or match.traversedir is not None
1620 for fn, st in self.walk(
1633 for fn, st in self.walk(
1621 match, subrepos, listunknown, listignored, full=full
1634 match, subrepos, listunknown, listignored, full=full
1622 ).items():
1635 ).items():
1623 if not dcontains(fn):
1636 if not dcontains(fn):
1624 if (listignored or mexact(fn)) and dirignore(fn):
1637 if (listignored or mexact(fn)) and dirignore(fn):
1625 if listignored:
1638 if listignored:
1626 iadd(fn)
1639 iadd(fn)
1627 else:
1640 else:
1628 uadd(fn)
1641 uadd(fn)
1629 continue
1642 continue
1630
1643
1631 t = dget(fn)
1644 t = dget(fn)
1632 mode = t.mode
1645 mode = t.mode
1633 size = t.size
1646 size = t.size
1634
1647
1635 if not st and t.tracked:
1648 if not st and t.tracked:
1636 dadd(fn)
1649 dadd(fn)
1637 elif t.p2_info:
1650 elif t.p2_info:
1638 madd(fn)
1651 madd(fn)
1639 elif t.added:
1652 elif t.added:
1640 aadd(fn)
1653 aadd(fn)
1641 elif t.removed:
1654 elif t.removed:
1642 radd(fn)
1655 radd(fn)
1643 elif t.tracked:
1656 elif t.tracked:
1644 if not checklink and t.has_fallback_symlink:
1657 if not checklink and t.has_fallback_symlink:
1645 # If the file system does not support symlink, the mode
1658 # If the file system does not support symlink, the mode
1646 # might not be correctly stored in the dirstate, so do not
1659 # might not be correctly stored in the dirstate, so do not
1647 # trust it.
1660 # trust it.
1648 ladd(fn)
1661 ladd(fn)
1649 elif not checkexec and t.has_fallback_exec:
1662 elif not checkexec and t.has_fallback_exec:
1650 # If the file system does not support exec bits, the mode
1663 # If the file system does not support exec bits, the mode
1651 # might not be correctly stored in the dirstate, so do not
1664 # might not be correctly stored in the dirstate, so do not
1652 # trust it.
1665 # trust it.
1653 ladd(fn)
1666 ladd(fn)
1654 elif (
1667 elif (
1655 size >= 0
1668 size >= 0
1656 and (
1669 and (
1657 (size != st.st_size and size != st.st_size & _rangemask)
1670 (size != st.st_size and size != st.st_size & _rangemask)
1658 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1671 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1659 )
1672 )
1660 or fn in copymap
1673 or fn in copymap
1661 ):
1674 ):
1662 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1675 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1663 # issue6456: Size returned may be longer due to
1676 # issue6456: Size returned may be longer due to
1664 # encryption on EXT-4 fscrypt, undecided.
1677 # encryption on EXT-4 fscrypt, undecided.
1665 ladd(fn)
1678 ladd(fn)
1666 else:
1679 else:
1667 madd(fn)
1680 madd(fn)
1668 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1681 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1669 # There might be a change in the future if for example the
1682 # There might be a change in the future if for example the
1670 # internal clock is off, but this is a case where the issues
1683 # internal clock is off, but this is a case where the issues
1671 # the user would face would be a lot worse and there is
1684 # the user would face would be a lot worse and there is
1672 # nothing we can really do.
1685 # nothing we can really do.
1673 ladd(fn)
1686 ladd(fn)
1674 elif listclean:
1687 elif listclean:
1675 cadd(fn)
1688 cadd(fn)
1676 status = scmutil.status(
1689 status = scmutil.status(
1677 modified, added, removed, deleted, unknown, ignored, clean
1690 modified, added, removed, deleted, unknown, ignored, clean
1678 )
1691 )
1679 return (lookup, status, mtime_boundary)
1692 return (lookup, status, mtime_boundary)
1680
1693
1681 def matches(self, match):
1694 def matches(self, match):
1682 """
1695 """
1683 return files in the dirstate (in whatever state) filtered by match
1696 return files in the dirstate (in whatever state) filtered by match
1684 """
1697 """
1685 dmap = self._map
1698 dmap = self._map
1686 if rustmod is not None:
1699 if rustmod is not None:
1687 dmap = self._map._map
1700 dmap = self._map._map
1688
1701
1689 if match.always():
1702 if match.always():
1690 return dmap.keys()
1703 return dmap.keys()
1691 files = match.files()
1704 files = match.files()
1692 if match.isexact():
1705 if match.isexact():
1693 # fast path -- filter the other way around, since typically files is
1706 # fast path -- filter the other way around, since typically files is
1694 # much smaller than dmap
1707 # much smaller than dmap
1695 return [f for f in files if f in dmap]
1708 return [f for f in files if f in dmap]
1696 if match.prefix() and all(fn in dmap for fn in files):
1709 if match.prefix() and all(fn in dmap for fn in files):
1697 # fast path -- all the values are known to be files, so just return
1710 # fast path -- all the values are known to be files, so just return
1698 # that
1711 # that
1699 return list(files)
1712 return list(files)
1700 return [f for f in dmap if match(f)]
1713 return [f for f in dmap if match(f)]
1701
1714
1702 def _actualfilename(self, tr):
1715 def _actualfilename(self, tr):
1703 if tr:
1716 if tr:
1704 return self._pendingfilename
1717 return self._pendingfilename
1705 else:
1718 else:
1706 return self._filename
1719 return self._filename
1707
1720
1708 def all_file_names(self):
1721 def all_file_names(self):
1709 """list all filename currently used by this dirstate
1722 """list all filename currently used by this dirstate
1710
1723
1711 This is only used to do `hg rollback` related backup in the transaction
1724 This is only used to do `hg rollback` related backup in the transaction
1712 """
1725 """
1713 if not self._opener.exists(self._filename):
1726 if not self._opener.exists(self._filename):
1714 # no data every written to disk yet
1727 # no data every written to disk yet
1715 return ()
1728 return ()
1716 elif self._use_dirstate_v2:
1729 elif self._use_dirstate_v2:
1717 return (
1730 return (
1718 self._filename,
1731 self._filename,
1719 self._map.docket.data_filename(),
1732 self._map.docket.data_filename(),
1720 )
1733 )
1721 else:
1734 else:
1722 return (self._filename,)
1735 return (self._filename,)
1723
1736
1724 def verify(self, m1, m2, p1, narrow_matcher=None):
1737 def verify(self, m1, m2, p1, narrow_matcher=None):
1725 """
1738 """
1726 check the dirstate contents against the parent manifest and yield errors
1739 check the dirstate contents against the parent manifest and yield errors
1727 """
1740 """
1728 missing_from_p1 = _(
1741 missing_from_p1 = _(
1729 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1742 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1730 )
1743 )
1731 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1744 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1732 missing_from_ps = _(
1745 missing_from_ps = _(
1733 b"%s marked as modified, but not in either manifest\n"
1746 b"%s marked as modified, but not in either manifest\n"
1734 )
1747 )
1735 missing_from_ds = _(
1748 missing_from_ds = _(
1736 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1749 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1737 )
1750 )
1738 for f, entry in self.items():
1751 for f, entry in self.items():
1739 if entry.p1_tracked:
1752 if entry.p1_tracked:
1740 if entry.modified and f not in m1 and f not in m2:
1753 if entry.modified and f not in m1 and f not in m2:
1741 yield missing_from_ps % f
1754 yield missing_from_ps % f
1742 elif f not in m1:
1755 elif f not in m1:
1743 yield missing_from_p1 % (f, node.short(p1))
1756 yield missing_from_p1 % (f, node.short(p1))
1744 if entry.added and f in m1:
1757 if entry.added and f in m1:
1745 yield unexpected_in_p1 % f
1758 yield unexpected_in_p1 % f
1746 for f in m1:
1759 for f in m1:
1747 if narrow_matcher is not None and not narrow_matcher(f):
1760 if narrow_matcher is not None and not narrow_matcher(f):
1748 continue
1761 continue
1749 entry = self.get_entry(f)
1762 entry = self.get_entry(f)
1750 if not entry.p1_tracked:
1763 if not entry.p1_tracked:
1751 yield missing_from_ds % (f, node.short(p1))
1764 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now