##// END OF EJS Templates
dirstate: warn if dirty when starting an edition...
marmoute -
r51397:3433723d default
parent child Browse files
Show More
@@ -1,1669 +1,1672 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def requires_changing_parents(func):
68 def requires_changing_parents(func):
69 def wrap(self, *args, **kwargs):
69 def wrap(self, *args, **kwargs):
70 if not self.is_changing_parents:
70 if not self.is_changing_parents:
71 msg = 'calling `%s` outside of a changing_parents context'
71 msg = 'calling `%s` outside of a changing_parents context'
72 msg %= func.__name__
72 msg %= func.__name__
73 raise error.ProgrammingError(msg)
73 raise error.ProgrammingError(msg)
74 if self._invalidated_context:
74 if self._invalidated_context:
75 msg = 'calling `%s` after the dirstate was invalidated'
75 msg = 'calling `%s` after the dirstate was invalidated'
76 raise error.ProgrammingError(msg)
76 raise error.ProgrammingError(msg)
77 return func(self, *args, **kwargs)
77 return func(self, *args, **kwargs)
78
78
79 return wrap
79 return wrap
80
80
81
81
82 def requires_changing_files(func):
82 def requires_changing_files(func):
83 def wrap(self, *args, **kwargs):
83 def wrap(self, *args, **kwargs):
84 if not self.is_changing_files:
84 if not self.is_changing_files:
85 msg = 'calling `%s` outside of a `changing_files`'
85 msg = 'calling `%s` outside of a `changing_files`'
86 msg %= func.__name__
86 msg %= func.__name__
87 raise error.ProgrammingError(msg)
87 raise error.ProgrammingError(msg)
88 return func(self, *args, **kwargs)
88 return func(self, *args, **kwargs)
89
89
90 return wrap
90 return wrap
91
91
92
92
93 def requires_changing_any(func):
93 def requires_changing_any(func):
94 def wrap(self, *args, **kwargs):
94 def wrap(self, *args, **kwargs):
95 if not self.is_changing_any:
95 if not self.is_changing_any:
96 msg = 'calling `%s` outside of a changing context'
96 msg = 'calling `%s` outside of a changing context'
97 msg %= func.__name__
97 msg %= func.__name__
98 raise error.ProgrammingError(msg)
98 raise error.ProgrammingError(msg)
99 if self._invalidated_context:
99 if self._invalidated_context:
100 msg = 'calling `%s` after the dirstate was invalidated'
100 msg = 'calling `%s` after the dirstate was invalidated'
101 raise error.ProgrammingError(msg)
101 raise error.ProgrammingError(msg)
102 return func(self, *args, **kwargs)
102 return func(self, *args, **kwargs)
103
103
104 return wrap
104 return wrap
105
105
106
106
107 def requires_not_changing_parents(func):
107 def requires_not_changing_parents(func):
108 def wrap(self, *args, **kwargs):
108 def wrap(self, *args, **kwargs):
109 if self.is_changing_parents:
109 if self.is_changing_parents:
110 msg = 'calling `%s` inside of a changing_parents context'
110 msg = 'calling `%s` inside of a changing_parents context'
111 msg %= func.__name__
111 msg %= func.__name__
112 raise error.ProgrammingError(msg)
112 raise error.ProgrammingError(msg)
113 return func(self, *args, **kwargs)
113 return func(self, *args, **kwargs)
114
114
115 return wrap
115 return wrap
116
116
117
117
118 CHANGE_TYPE_PARENTS = "parents"
118 CHANGE_TYPE_PARENTS = "parents"
119 CHANGE_TYPE_FILES = "files"
119 CHANGE_TYPE_FILES = "files"
120
120
121
121
122 @interfaceutil.implementer(intdirstate.idirstate)
122 @interfaceutil.implementer(intdirstate.idirstate)
123 class dirstate:
123 class dirstate:
124 def __init__(
124 def __init__(
125 self,
125 self,
126 opener,
126 opener,
127 ui,
127 ui,
128 root,
128 root,
129 validate,
129 validate,
130 sparsematchfn,
130 sparsematchfn,
131 nodeconstants,
131 nodeconstants,
132 use_dirstate_v2,
132 use_dirstate_v2,
133 use_tracked_hint=False,
133 use_tracked_hint=False,
134 ):
134 ):
135 """Create a new dirstate object.
135 """Create a new dirstate object.
136
136
137 opener is an open()-like callable that can be used to open the
137 opener is an open()-like callable that can be used to open the
138 dirstate file; root is the root of the directory tracked by
138 dirstate file; root is the root of the directory tracked by
139 the dirstate.
139 the dirstate.
140 """
140 """
141 self._use_dirstate_v2 = use_dirstate_v2
141 self._use_dirstate_v2 = use_dirstate_v2
142 self._use_tracked_hint = use_tracked_hint
142 self._use_tracked_hint = use_tracked_hint
143 self._nodeconstants = nodeconstants
143 self._nodeconstants = nodeconstants
144 self._opener = opener
144 self._opener = opener
145 self._validate = validate
145 self._validate = validate
146 self._root = root
146 self._root = root
147 # Either build a sparse-matcher or None if sparse is disabled
147 # Either build a sparse-matcher or None if sparse is disabled
148 self._sparsematchfn = sparsematchfn
148 self._sparsematchfn = sparsematchfn
149 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
149 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
150 # UNC path pointing to root share (issue4557)
150 # UNC path pointing to root share (issue4557)
151 self._rootdir = pathutil.normasprefix(root)
151 self._rootdir = pathutil.normasprefix(root)
152 # True is any internal state may be different
152 # True is any internal state may be different
153 self._dirty = False
153 self._dirty = False
154 # True if the set of tracked file may be different
154 # True if the set of tracked file may be different
155 self._dirty_tracked_set = False
155 self._dirty_tracked_set = False
156 self._ui = ui
156 self._ui = ui
157 self._filecache = {}
157 self._filecache = {}
158 # nesting level of `changing_parents` context
158 # nesting level of `changing_parents` context
159 self._changing_level = 0
159 self._changing_level = 0
160 # the change currently underway
160 # the change currently underway
161 self._change_type = None
161 self._change_type = None
162 # True if the current dirstate changing operations have been
162 # True if the current dirstate changing operations have been
163 # invalidated (used to make sure all nested contexts have been exited)
163 # invalidated (used to make sure all nested contexts have been exited)
164 self._invalidated_context = False
164 self._invalidated_context = False
165 self._filename = b'dirstate'
165 self._filename = b'dirstate'
166 self._filename_th = b'dirstate-tracked-hint'
166 self._filename_th = b'dirstate-tracked-hint'
167 self._pendingfilename = b'%s.pending' % self._filename
167 self._pendingfilename = b'%s.pending' % self._filename
168 self._plchangecallbacks = {}
168 self._plchangecallbacks = {}
169 self._origpl = None
169 self._origpl = None
170 self._mapcls = dirstatemap.dirstatemap
170 self._mapcls = dirstatemap.dirstatemap
171 # Access and cache cwd early, so we don't access it for the first time
171 # Access and cache cwd early, so we don't access it for the first time
172 # after a working-copy update caused it to not exist (accessing it then
172 # after a working-copy update caused it to not exist (accessing it then
173 # raises an exception).
173 # raises an exception).
174 self._cwd
174 self._cwd
175
175
176 def prefetch_parents(self):
176 def prefetch_parents(self):
177 """make sure the parents are loaded
177 """make sure the parents are loaded
178
178
179 Used to avoid a race condition.
179 Used to avoid a race condition.
180 """
180 """
181 self._pl
181 self._pl
182
182
183 @contextlib.contextmanager
183 @contextlib.contextmanager
184 def _changing(self, repo, change_type):
184 def _changing(self, repo, change_type):
185 if repo.currentwlock() is None:
185 if repo.currentwlock() is None:
186 msg = b"trying to change the dirstate without holding the wlock"
186 msg = b"trying to change the dirstate without holding the wlock"
187 raise error.ProgrammingError(msg)
187 raise error.ProgrammingError(msg)
188 if self._invalidated_context:
188 if self._invalidated_context:
189 msg = "trying to use an invalidated dirstate before it has reset"
189 msg = "trying to use an invalidated dirstate before it has reset"
190 raise error.ProgrammingError(msg)
190 raise error.ProgrammingError(msg)
191
191
192 has_tr = repo.currenttransaction() is not None
192 has_tr = repo.currenttransaction() is not None
193 if not has_tr and self._changing_level == 0 and self._dirty:
194 msg = "entering a changing context, but dirstate is already dirty"
195 raise error.ProgrammingError(msg)
193
196
194 # different type of change are mutually exclusive
197 # different type of change are mutually exclusive
195 if self._change_type is None:
198 if self._change_type is None:
196 assert self._changing_level == 0
199 assert self._changing_level == 0
197 self._change_type = change_type
200 self._change_type = change_type
198 elif self._change_type != change_type:
201 elif self._change_type != change_type:
199 msg = (
202 msg = (
200 'trying to open "%s" dirstate-changing context while a "%s" is'
203 'trying to open "%s" dirstate-changing context while a "%s" is'
201 ' already open'
204 ' already open'
202 )
205 )
203 msg %= (change_type, self._change_type)
206 msg %= (change_type, self._change_type)
204 raise error.ProgrammingError(msg)
207 raise error.ProgrammingError(msg)
205 self._changing_level += 1
208 self._changing_level += 1
206 try:
209 try:
207 yield
210 yield
208 except: # re-raises
211 except: # re-raises
209 self.invalidate()
212 self.invalidate()
210 raise
213 raise
211 finally:
214 finally:
212 tr = repo.currenttransaction()
215 tr = repo.currenttransaction()
213 if self._changing_level > 0:
216 if self._changing_level > 0:
214 if self._invalidated_context:
217 if self._invalidated_context:
215 # make sure we invalidate anything an upper context might
218 # make sure we invalidate anything an upper context might
216 # have changed.
219 # have changed.
217 self.invalidate()
220 self.invalidate()
218 self._changing_level -= 1
221 self._changing_level -= 1
219 # The invalidation is complete once we exit the final context
222 # The invalidation is complete once we exit the final context
220 # manager
223 # manager
221 if self._changing_level <= 0:
224 if self._changing_level <= 0:
222 self._change_type = None
225 self._change_type = None
223 assert self._changing_level == 0
226 assert self._changing_level == 0
224 if self._invalidated_context:
227 if self._invalidated_context:
225 self._invalidated_context = False
228 self._invalidated_context = False
226 else:
229 else:
227 # When an exception occured, `_invalidated_context`
230 # When an exception occured, `_invalidated_context`
228 # would have been set to True by the `invalidate`
231 # would have been set to True by the `invalidate`
229 # call earlier.
232 # call earlier.
230 #
233 #
231 # We don't have more straightforward code, because the
234 # We don't have more straightforward code, because the
232 # Exception catching (and the associated `invalidate`
235 # Exception catching (and the associated `invalidate`
233 # calling) might have been called by a nested context
236 # calling) might have been called by a nested context
234 # instead of the top level one.
237 # instead of the top level one.
235 self.write(tr)
238 self.write(tr)
236 if has_tr != (tr is not None):
239 if has_tr != (tr is not None):
237 if has_tr:
240 if has_tr:
238 m = "transaction vanished while changing dirstate"
241 m = "transaction vanished while changing dirstate"
239 else:
242 else:
240 m = "transaction appeared while changing dirstate"
243 m = "transaction appeared while changing dirstate"
241 raise error.ProgrammingError(m)
244 raise error.ProgrammingError(m)
242
245
243 @contextlib.contextmanager
246 @contextlib.contextmanager
244 def changing_parents(self, repo):
247 def changing_parents(self, repo):
245 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
248 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
246 yield c
249 yield c
247
250
248 @contextlib.contextmanager
251 @contextlib.contextmanager
249 def changing_files(self, repo):
252 def changing_files(self, repo):
250 with self._changing(repo, CHANGE_TYPE_FILES) as c:
253 with self._changing(repo, CHANGE_TYPE_FILES) as c:
251 yield c
254 yield c
252
255
253 # here to help migration to the new code
256 # here to help migration to the new code
254 def parentchange(self):
257 def parentchange(self):
255 msg = (
258 msg = (
256 "Mercurial 6.4 and later requires call to "
259 "Mercurial 6.4 and later requires call to "
257 "`dirstate.changing_parents(repo)`"
260 "`dirstate.changing_parents(repo)`"
258 )
261 )
259 raise error.ProgrammingError(msg)
262 raise error.ProgrammingError(msg)
260
263
261 @property
264 @property
262 def is_changing_any(self):
265 def is_changing_any(self):
263 """Returns true if the dirstate is in the middle of a set of changes.
266 """Returns true if the dirstate is in the middle of a set of changes.
264
267
265 This returns True for any kind of change.
268 This returns True for any kind of change.
266 """
269 """
267 return self._changing_level > 0
270 return self._changing_level > 0
268
271
269 def pendingparentchange(self):
272 def pendingparentchange(self):
270 return self.is_changing_parent()
273 return self.is_changing_parent()
271
274
272 def is_changing_parent(self):
275 def is_changing_parent(self):
273 """Returns true if the dirstate is in the middle of a set of changes
276 """Returns true if the dirstate is in the middle of a set of changes
274 that modify the dirstate parent.
277 that modify the dirstate parent.
275 """
278 """
276 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
279 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
277 return self.is_changing_parents
280 return self.is_changing_parents
278
281
279 @property
282 @property
280 def is_changing_parents(self):
283 def is_changing_parents(self):
281 """Returns true if the dirstate is in the middle of a set of changes
284 """Returns true if the dirstate is in the middle of a set of changes
282 that modify the dirstate parent.
285 that modify the dirstate parent.
283 """
286 """
284 if self._changing_level <= 0:
287 if self._changing_level <= 0:
285 return False
288 return False
286 return self._change_type == CHANGE_TYPE_PARENTS
289 return self._change_type == CHANGE_TYPE_PARENTS
287
290
288 @property
291 @property
289 def is_changing_files(self):
292 def is_changing_files(self):
290 """Returns true if the dirstate is in the middle of a set of changes
293 """Returns true if the dirstate is in the middle of a set of changes
291 that modify the files tracked or their sources.
294 that modify the files tracked or their sources.
292 """
295 """
293 if self._changing_level <= 0:
296 if self._changing_level <= 0:
294 return False
297 return False
295 return self._change_type == CHANGE_TYPE_FILES
298 return self._change_type == CHANGE_TYPE_FILES
296
299
297 @propertycache
300 @propertycache
298 def _map(self):
301 def _map(self):
299 """Return the dirstate contents (see documentation for dirstatemap)."""
302 """Return the dirstate contents (see documentation for dirstatemap)."""
300 self._map = self._mapcls(
303 self._map = self._mapcls(
301 self._ui,
304 self._ui,
302 self._opener,
305 self._opener,
303 self._root,
306 self._root,
304 self._nodeconstants,
307 self._nodeconstants,
305 self._use_dirstate_v2,
308 self._use_dirstate_v2,
306 )
309 )
307 return self._map
310 return self._map
308
311
309 @property
312 @property
310 def _sparsematcher(self):
313 def _sparsematcher(self):
311 """The matcher for the sparse checkout.
314 """The matcher for the sparse checkout.
312
315
313 The working directory may not include every file from a manifest. The
316 The working directory may not include every file from a manifest. The
314 matcher obtained by this property will match a path if it is to be
317 matcher obtained by this property will match a path if it is to be
315 included in the working directory.
318 included in the working directory.
316
319
317 When sparse if disabled, return None.
320 When sparse if disabled, return None.
318 """
321 """
319 if self._sparsematchfn is None:
322 if self._sparsematchfn is None:
320 return None
323 return None
321 # TODO there is potential to cache this property. For now, the matcher
324 # TODO there is potential to cache this property. For now, the matcher
322 # is resolved on every access. (But the called function does use a
325 # is resolved on every access. (But the called function does use a
323 # cache to keep the lookup fast.)
326 # cache to keep the lookup fast.)
324 return self._sparsematchfn()
327 return self._sparsematchfn()
325
328
326 @repocache(b'branch')
329 @repocache(b'branch')
327 def _branch(self):
330 def _branch(self):
328 try:
331 try:
329 return self._opener.read(b"branch").strip() or b"default"
332 return self._opener.read(b"branch").strip() or b"default"
330 except FileNotFoundError:
333 except FileNotFoundError:
331 return b"default"
334 return b"default"
332
335
333 @property
336 @property
334 def _pl(self):
337 def _pl(self):
335 return self._map.parents()
338 return self._map.parents()
336
339
337 def hasdir(self, d):
340 def hasdir(self, d):
338 return self._map.hastrackeddir(d)
341 return self._map.hastrackeddir(d)
339
342
340 @rootcache(b'.hgignore')
343 @rootcache(b'.hgignore')
341 def _ignore(self):
344 def _ignore(self):
342 files = self._ignorefiles()
345 files = self._ignorefiles()
343 if not files:
346 if not files:
344 return matchmod.never()
347 return matchmod.never()
345
348
346 pats = [b'include:%s' % f for f in files]
349 pats = [b'include:%s' % f for f in files]
347 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
350 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
348
351
349 @propertycache
352 @propertycache
350 def _slash(self):
353 def _slash(self):
351 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
354 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
352
355
353 @propertycache
356 @propertycache
354 def _checklink(self):
357 def _checklink(self):
355 return util.checklink(self._root)
358 return util.checklink(self._root)
356
359
357 @propertycache
360 @propertycache
358 def _checkexec(self):
361 def _checkexec(self):
359 return bool(util.checkexec(self._root))
362 return bool(util.checkexec(self._root))
360
363
361 @propertycache
364 @propertycache
362 def _checkcase(self):
365 def _checkcase(self):
363 return not util.fscasesensitive(self._join(b'.hg'))
366 return not util.fscasesensitive(self._join(b'.hg'))
364
367
365 def _join(self, f):
368 def _join(self, f):
366 # much faster than os.path.join()
369 # much faster than os.path.join()
367 # it's safe because f is always a relative path
370 # it's safe because f is always a relative path
368 return self._rootdir + f
371 return self._rootdir + f
369
372
370 def flagfunc(self, buildfallback):
373 def flagfunc(self, buildfallback):
371 """build a callable that returns flags associated with a filename
374 """build a callable that returns flags associated with a filename
372
375
373 The information is extracted from three possible layers:
376 The information is extracted from three possible layers:
374 1. the file system if it supports the information
377 1. the file system if it supports the information
375 2. the "fallback" information stored in the dirstate if any
378 2. the "fallback" information stored in the dirstate if any
376 3. a more expensive mechanism inferring the flags from the parents.
379 3. a more expensive mechanism inferring the flags from the parents.
377 """
380 """
378
381
379 # small hack to cache the result of buildfallback()
382 # small hack to cache the result of buildfallback()
380 fallback_func = []
383 fallback_func = []
381
384
382 def get_flags(x):
385 def get_flags(x):
383 entry = None
386 entry = None
384 fallback_value = None
387 fallback_value = None
385 try:
388 try:
386 st = os.lstat(self._join(x))
389 st = os.lstat(self._join(x))
387 except OSError:
390 except OSError:
388 return b''
391 return b''
389
392
390 if self._checklink:
393 if self._checklink:
391 if util.statislink(st):
394 if util.statislink(st):
392 return b'l'
395 return b'l'
393 else:
396 else:
394 entry = self.get_entry(x)
397 entry = self.get_entry(x)
395 if entry.has_fallback_symlink:
398 if entry.has_fallback_symlink:
396 if entry.fallback_symlink:
399 if entry.fallback_symlink:
397 return b'l'
400 return b'l'
398 else:
401 else:
399 if not fallback_func:
402 if not fallback_func:
400 fallback_func.append(buildfallback())
403 fallback_func.append(buildfallback())
401 fallback_value = fallback_func[0](x)
404 fallback_value = fallback_func[0](x)
402 if b'l' in fallback_value:
405 if b'l' in fallback_value:
403 return b'l'
406 return b'l'
404
407
405 if self._checkexec:
408 if self._checkexec:
406 if util.statisexec(st):
409 if util.statisexec(st):
407 return b'x'
410 return b'x'
408 else:
411 else:
409 if entry is None:
412 if entry is None:
410 entry = self.get_entry(x)
413 entry = self.get_entry(x)
411 if entry.has_fallback_exec:
414 if entry.has_fallback_exec:
412 if entry.fallback_exec:
415 if entry.fallback_exec:
413 return b'x'
416 return b'x'
414 else:
417 else:
415 if fallback_value is None:
418 if fallback_value is None:
416 if not fallback_func:
419 if not fallback_func:
417 fallback_func.append(buildfallback())
420 fallback_func.append(buildfallback())
418 fallback_value = fallback_func[0](x)
421 fallback_value = fallback_func[0](x)
419 if b'x' in fallback_value:
422 if b'x' in fallback_value:
420 return b'x'
423 return b'x'
421 return b''
424 return b''
422
425
423 return get_flags
426 return get_flags
424
427
425 @propertycache
428 @propertycache
426 def _cwd(self):
429 def _cwd(self):
427 # internal config: ui.forcecwd
430 # internal config: ui.forcecwd
428 forcecwd = self._ui.config(b'ui', b'forcecwd')
431 forcecwd = self._ui.config(b'ui', b'forcecwd')
429 if forcecwd:
432 if forcecwd:
430 return forcecwd
433 return forcecwd
431 return encoding.getcwd()
434 return encoding.getcwd()
432
435
433 def getcwd(self):
436 def getcwd(self):
434 """Return the path from which a canonical path is calculated.
437 """Return the path from which a canonical path is calculated.
435
438
436 This path should be used to resolve file patterns or to convert
439 This path should be used to resolve file patterns or to convert
437 canonical paths back to file paths for display. It shouldn't be
440 canonical paths back to file paths for display. It shouldn't be
438 used to get real file paths. Use vfs functions instead.
441 used to get real file paths. Use vfs functions instead.
439 """
442 """
440 cwd = self._cwd
443 cwd = self._cwd
441 if cwd == self._root:
444 if cwd == self._root:
442 return b''
445 return b''
443 # self._root ends with a path separator if self._root is '/' or 'C:\'
446 # self._root ends with a path separator if self._root is '/' or 'C:\'
444 rootsep = self._root
447 rootsep = self._root
445 if not util.endswithsep(rootsep):
448 if not util.endswithsep(rootsep):
446 rootsep += pycompat.ossep
449 rootsep += pycompat.ossep
447 if cwd.startswith(rootsep):
450 if cwd.startswith(rootsep):
448 return cwd[len(rootsep) :]
451 return cwd[len(rootsep) :]
449 else:
452 else:
450 # we're outside the repo. return an absolute path.
453 # we're outside the repo. return an absolute path.
451 return cwd
454 return cwd
452
455
453 def pathto(self, f, cwd=None):
456 def pathto(self, f, cwd=None):
454 if cwd is None:
457 if cwd is None:
455 cwd = self.getcwd()
458 cwd = self.getcwd()
456 path = util.pathto(self._root, cwd, f)
459 path = util.pathto(self._root, cwd, f)
457 if self._slash:
460 if self._slash:
458 return util.pconvert(path)
461 return util.pconvert(path)
459 return path
462 return path
460
463
461 def get_entry(self, path):
464 def get_entry(self, path):
462 """return a DirstateItem for the associated path"""
465 """return a DirstateItem for the associated path"""
463 entry = self._map.get(path)
466 entry = self._map.get(path)
464 if entry is None:
467 if entry is None:
465 return DirstateItem()
468 return DirstateItem()
466 return entry
469 return entry
467
470
468 def __contains__(self, key):
471 def __contains__(self, key):
469 return key in self._map
472 return key in self._map
470
473
471 def __iter__(self):
474 def __iter__(self):
472 return iter(sorted(self._map))
475 return iter(sorted(self._map))
473
476
474 def items(self):
477 def items(self):
475 return self._map.items()
478 return self._map.items()
476
479
477 iteritems = items
480 iteritems = items
478
481
479 def parents(self):
482 def parents(self):
480 return [self._validate(p) for p in self._pl]
483 return [self._validate(p) for p in self._pl]
481
484
482 def p1(self):
485 def p1(self):
483 return self._validate(self._pl[0])
486 return self._validate(self._pl[0])
484
487
485 def p2(self):
488 def p2(self):
486 return self._validate(self._pl[1])
489 return self._validate(self._pl[1])
487
490
488 @property
491 @property
489 def in_merge(self):
492 def in_merge(self):
490 """True if a merge is in progress"""
493 """True if a merge is in progress"""
491 return self._pl[1] != self._nodeconstants.nullid
494 return self._pl[1] != self._nodeconstants.nullid
492
495
493 def branch(self):
496 def branch(self):
494 return encoding.tolocal(self._branch)
497 return encoding.tolocal(self._branch)
495
498
496 @requires_changing_parents
499 @requires_changing_parents
497 def setparents(self, p1, p2=None):
500 def setparents(self, p1, p2=None):
498 """Set dirstate parents to p1 and p2.
501 """Set dirstate parents to p1 and p2.
499
502
500 When moving from two parents to one, "merged" entries a
503 When moving from two parents to one, "merged" entries a
501 adjusted to normal and previous copy records discarded and
504 adjusted to normal and previous copy records discarded and
502 returned by the call.
505 returned by the call.
503
506
504 See localrepo.setparents()
507 See localrepo.setparents()
505 """
508 """
506 if p2 is None:
509 if p2 is None:
507 p2 = self._nodeconstants.nullid
510 p2 = self._nodeconstants.nullid
508 if self._changing_level == 0:
511 if self._changing_level == 0:
509 raise ValueError(
512 raise ValueError(
510 b"cannot set dirstate parent outside of "
513 b"cannot set dirstate parent outside of "
511 b"dirstate.changing_parents context manager"
514 b"dirstate.changing_parents context manager"
512 )
515 )
513
516
514 self._dirty = True
517 self._dirty = True
515 oldp2 = self._pl[1]
518 oldp2 = self._pl[1]
516 if self._origpl is None:
519 if self._origpl is None:
517 self._origpl = self._pl
520 self._origpl = self._pl
518 nullid = self._nodeconstants.nullid
521 nullid = self._nodeconstants.nullid
519 # True if we need to fold p2 related state back to a linear case
522 # True if we need to fold p2 related state back to a linear case
520 fold_p2 = oldp2 != nullid and p2 == nullid
523 fold_p2 = oldp2 != nullid and p2 == nullid
521 return self._map.setparents(p1, p2, fold_p2=fold_p2)
524 return self._map.setparents(p1, p2, fold_p2=fold_p2)
522
525
523 def setbranch(self, branch):
526 def setbranch(self, branch):
524 self.__class__._branch.set(self, encoding.fromlocal(branch))
527 self.__class__._branch.set(self, encoding.fromlocal(branch))
525 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
528 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
526 try:
529 try:
527 f.write(self._branch + b'\n')
530 f.write(self._branch + b'\n')
528 f.close()
531 f.close()
529
532
530 # make sure filecache has the correct stat info for _branch after
533 # make sure filecache has the correct stat info for _branch after
531 # replacing the underlying file
534 # replacing the underlying file
532 ce = self._filecache[b'_branch']
535 ce = self._filecache[b'_branch']
533 if ce:
536 if ce:
534 ce.refresh()
537 ce.refresh()
535 except: # re-raises
538 except: # re-raises
536 f.discard()
539 f.discard()
537 raise
540 raise
538
541
539 def invalidate(self):
542 def invalidate(self):
540 """Causes the next access to reread the dirstate.
543 """Causes the next access to reread the dirstate.
541
544
542 This is different from localrepo.invalidatedirstate() because it always
545 This is different from localrepo.invalidatedirstate() because it always
543 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
546 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
544 check whether the dirstate has changed before rereading it."""
547 check whether the dirstate has changed before rereading it."""
545
548
546 for a in ("_map", "_branch", "_ignore"):
549 for a in ("_map", "_branch", "_ignore"):
547 if a in self.__dict__:
550 if a in self.__dict__:
548 delattr(self, a)
551 delattr(self, a)
549 self._dirty = False
552 self._dirty = False
550 self._dirty_tracked_set = False
553 self._dirty_tracked_set = False
551 self._invalidated_context = self._changing_level > 0
554 self._invalidated_context = self._changing_level > 0
552 self._origpl = None
555 self._origpl = None
553
556
554 @requires_changing_any
557 @requires_changing_any
555 def copy(self, source, dest):
558 def copy(self, source, dest):
556 """Mark dest as a copy of source. Unmark dest if source is None."""
559 """Mark dest as a copy of source. Unmark dest if source is None."""
557 if source == dest:
560 if source == dest:
558 return
561 return
559 self._dirty = True
562 self._dirty = True
560 if source is not None:
563 if source is not None:
561 self._check_sparse(source)
564 self._check_sparse(source)
562 self._map.copymap[dest] = source
565 self._map.copymap[dest] = source
563 else:
566 else:
564 self._map.copymap.pop(dest, None)
567 self._map.copymap.pop(dest, None)
565
568
566 def copied(self, file):
569 def copied(self, file):
567 return self._map.copymap.get(file, None)
570 return self._map.copymap.get(file, None)
568
571
569 def copies(self):
572 def copies(self):
570 return self._map.copymap
573 return self._map.copymap
571
574
572 @requires_changing_files
575 @requires_changing_files
573 def set_tracked(self, filename, reset_copy=False):
576 def set_tracked(self, filename, reset_copy=False):
574 """a "public" method for generic code to mark a file as tracked
577 """a "public" method for generic code to mark a file as tracked
575
578
576 This function is to be called outside of "update/merge" case. For
579 This function is to be called outside of "update/merge" case. For
577 example by a command like `hg add X`.
580 example by a command like `hg add X`.
578
581
579 if reset_copy is set, any existing copy information will be dropped.
582 if reset_copy is set, any existing copy information will be dropped.
580
583
581 return True the file was previously untracked, False otherwise.
584 return True the file was previously untracked, False otherwise.
582 """
585 """
583 self._dirty = True
586 self._dirty = True
584 entry = self._map.get(filename)
587 entry = self._map.get(filename)
585 if entry is None or not entry.tracked:
588 if entry is None or not entry.tracked:
586 self._check_new_tracked_filename(filename)
589 self._check_new_tracked_filename(filename)
587 pre_tracked = self._map.set_tracked(filename)
590 pre_tracked = self._map.set_tracked(filename)
588 if reset_copy:
591 if reset_copy:
589 self._map.copymap.pop(filename, None)
592 self._map.copymap.pop(filename, None)
590 if pre_tracked:
593 if pre_tracked:
591 self._dirty_tracked_set = True
594 self._dirty_tracked_set = True
592 return pre_tracked
595 return pre_tracked
593
596
594 @requires_changing_files
597 @requires_changing_files
595 def set_untracked(self, filename):
598 def set_untracked(self, filename):
596 """a "public" method for generic code to mark a file as untracked
599 """a "public" method for generic code to mark a file as untracked
597
600
598 This function is to be called outside of "update/merge" case. For
601 This function is to be called outside of "update/merge" case. For
599 example by a command like `hg remove X`.
602 example by a command like `hg remove X`.
600
603
601 return True the file was previously tracked, False otherwise.
604 return True the file was previously tracked, False otherwise.
602 """
605 """
603 ret = self._map.set_untracked(filename)
606 ret = self._map.set_untracked(filename)
604 if ret:
607 if ret:
605 self._dirty = True
608 self._dirty = True
606 self._dirty_tracked_set = True
609 self._dirty_tracked_set = True
607 return ret
610 return ret
608
611
609 @requires_not_changing_parents
612 @requires_not_changing_parents
610 def set_clean(self, filename, parentfiledata):
613 def set_clean(self, filename, parentfiledata):
611 """record that the current state of the file on disk is known to be clean"""
614 """record that the current state of the file on disk is known to be clean"""
612 self._dirty = True
615 self._dirty = True
613 if not self._map[filename].tracked:
616 if not self._map[filename].tracked:
614 self._check_new_tracked_filename(filename)
617 self._check_new_tracked_filename(filename)
615 (mode, size, mtime) = parentfiledata
618 (mode, size, mtime) = parentfiledata
616 self._map.set_clean(filename, mode, size, mtime)
619 self._map.set_clean(filename, mode, size, mtime)
617
620
618 @requires_not_changing_parents
621 @requires_not_changing_parents
619 def set_possibly_dirty(self, filename):
622 def set_possibly_dirty(self, filename):
620 """record that the current state of the file on disk is unknown"""
623 """record that the current state of the file on disk is unknown"""
621 self._dirty = True
624 self._dirty = True
622 self._map.set_possibly_dirty(filename)
625 self._map.set_possibly_dirty(filename)
623
626
624 @requires_changing_parents
627 @requires_changing_parents
625 def update_file_p1(
628 def update_file_p1(
626 self,
629 self,
627 filename,
630 filename,
628 p1_tracked,
631 p1_tracked,
629 ):
632 ):
630 """Set a file as tracked in the parent (or not)
633 """Set a file as tracked in the parent (or not)
631
634
632 This is to be called when adjust the dirstate to a new parent after an history
635 This is to be called when adjust the dirstate to a new parent after an history
633 rewriting operation.
636 rewriting operation.
634
637
635 It should not be called during a merge (p2 != nullid) and only within
638 It should not be called during a merge (p2 != nullid) and only within
636 a `with dirstate.changing_parents(repo):` context.
639 a `with dirstate.changing_parents(repo):` context.
637 """
640 """
638 if self.in_merge:
641 if self.in_merge:
639 msg = b'update_file_reference should not be called when merging'
642 msg = b'update_file_reference should not be called when merging'
640 raise error.ProgrammingError(msg)
643 raise error.ProgrammingError(msg)
641 entry = self._map.get(filename)
644 entry = self._map.get(filename)
642 if entry is None:
645 if entry is None:
643 wc_tracked = False
646 wc_tracked = False
644 else:
647 else:
645 wc_tracked = entry.tracked
648 wc_tracked = entry.tracked
646 if not (p1_tracked or wc_tracked):
649 if not (p1_tracked or wc_tracked):
647 # the file is no longer relevant to anyone
650 # the file is no longer relevant to anyone
648 if self._map.get(filename) is not None:
651 if self._map.get(filename) is not None:
649 self._map.reset_state(filename)
652 self._map.reset_state(filename)
650 self._dirty = True
653 self._dirty = True
651 elif (not p1_tracked) and wc_tracked:
654 elif (not p1_tracked) and wc_tracked:
652 if entry is not None and entry.added:
655 if entry is not None and entry.added:
653 return # avoid dropping copy information (maybe?)
656 return # avoid dropping copy information (maybe?)
654
657
655 self._map.reset_state(
658 self._map.reset_state(
656 filename,
659 filename,
657 wc_tracked,
660 wc_tracked,
658 p1_tracked,
661 p1_tracked,
659 # the underlying reference might have changed, we will have to
662 # the underlying reference might have changed, we will have to
660 # check it.
663 # check it.
661 has_meaningful_mtime=False,
664 has_meaningful_mtime=False,
662 )
665 )
663
666
664 @requires_changing_parents
667 @requires_changing_parents
665 def update_file(
668 def update_file(
666 self,
669 self,
667 filename,
670 filename,
668 wc_tracked,
671 wc_tracked,
669 p1_tracked,
672 p1_tracked,
670 p2_info=False,
673 p2_info=False,
671 possibly_dirty=False,
674 possibly_dirty=False,
672 parentfiledata=None,
675 parentfiledata=None,
673 ):
676 ):
674 """update the information about a file in the dirstate
677 """update the information about a file in the dirstate
675
678
676 This is to be called when the direstates parent changes to keep track
679 This is to be called when the direstates parent changes to keep track
677 of what is the file situation in regards to the working copy and its parent.
680 of what is the file situation in regards to the working copy and its parent.
678
681
679 This function must be called within a `dirstate.changing_parents` context.
682 This function must be called within a `dirstate.changing_parents` context.
680
683
681 note: the API is at an early stage and we might need to adjust it
684 note: the API is at an early stage and we might need to adjust it
682 depending of what information ends up being relevant and useful to
685 depending of what information ends up being relevant and useful to
683 other processing.
686 other processing.
684 """
687 """
685 self._update_file(
688 self._update_file(
686 filename=filename,
689 filename=filename,
687 wc_tracked=wc_tracked,
690 wc_tracked=wc_tracked,
688 p1_tracked=p1_tracked,
691 p1_tracked=p1_tracked,
689 p2_info=p2_info,
692 p2_info=p2_info,
690 possibly_dirty=possibly_dirty,
693 possibly_dirty=possibly_dirty,
691 parentfiledata=parentfiledata,
694 parentfiledata=parentfiledata,
692 )
695 )
693
696
694 # XXX since this make the dirstate dirty, we should enforce that it is done
697 # XXX since this make the dirstate dirty, we should enforce that it is done
695 # withing an appropriate change-context that scope the change and ensure it
698 # withing an appropriate change-context that scope the change and ensure it
696 # eventually get written on disk (or rolled back)
699 # eventually get written on disk (or rolled back)
697 def hacky_extension_update_file(self, *args, **kwargs):
700 def hacky_extension_update_file(self, *args, **kwargs):
698 """NEVER USE THIS, YOU DO NOT NEED IT
701 """NEVER USE THIS, YOU DO NOT NEED IT
699
702
700 This function is a variant of "update_file" to be called by a small set
703 This function is a variant of "update_file" to be called by a small set
701 of extensions, it also adjust the internal state of file, but can be
704 of extensions, it also adjust the internal state of file, but can be
702 called outside an `changing_parents` context.
705 called outside an `changing_parents` context.
703
706
704 A very small number of extension meddle with the working copy content
707 A very small number of extension meddle with the working copy content
705 in a way that requires to adjust the dirstate accordingly. At the time
708 in a way that requires to adjust the dirstate accordingly. At the time
706 this command is written they are :
709 this command is written they are :
707 - keyword,
710 - keyword,
708 - largefile,
711 - largefile,
709 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
712 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
710
713
711 This function could probably be replaced by more semantic one (like
714 This function could probably be replaced by more semantic one (like
712 "adjust expected size" or "always revalidate file content", etc)
715 "adjust expected size" or "always revalidate file content", etc)
713 however at the time where this is writen, this is too much of a detour
716 however at the time where this is writen, this is too much of a detour
714 to be considered.
717 to be considered.
715 """
718 """
716 self._update_file(
719 self._update_file(
717 *args,
720 *args,
718 **kwargs,
721 **kwargs,
719 )
722 )
720
723
721 def _update_file(
724 def _update_file(
722 self,
725 self,
723 filename,
726 filename,
724 wc_tracked,
727 wc_tracked,
725 p1_tracked,
728 p1_tracked,
726 p2_info=False,
729 p2_info=False,
727 possibly_dirty=False,
730 possibly_dirty=False,
728 parentfiledata=None,
731 parentfiledata=None,
729 ):
732 ):
730
733
731 # note: I do not think we need to double check name clash here since we
734 # note: I do not think we need to double check name clash here since we
732 # are in a update/merge case that should already have taken care of
735 # are in a update/merge case that should already have taken care of
733 # this. The test agrees
736 # this. The test agrees
734
737
735 self._dirty = True
738 self._dirty = True
736 old_entry = self._map.get(filename)
739 old_entry = self._map.get(filename)
737 if old_entry is None:
740 if old_entry is None:
738 prev_tracked = False
741 prev_tracked = False
739 else:
742 else:
740 prev_tracked = old_entry.tracked
743 prev_tracked = old_entry.tracked
741 if prev_tracked != wc_tracked:
744 if prev_tracked != wc_tracked:
742 self._dirty_tracked_set = True
745 self._dirty_tracked_set = True
743
746
744 self._map.reset_state(
747 self._map.reset_state(
745 filename,
748 filename,
746 wc_tracked,
749 wc_tracked,
747 p1_tracked,
750 p1_tracked,
748 p2_info=p2_info,
751 p2_info=p2_info,
749 has_meaningful_mtime=not possibly_dirty,
752 has_meaningful_mtime=not possibly_dirty,
750 parentfiledata=parentfiledata,
753 parentfiledata=parentfiledata,
751 )
754 )
752
755
753 def _check_new_tracked_filename(self, filename):
756 def _check_new_tracked_filename(self, filename):
754 scmutil.checkfilename(filename)
757 scmutil.checkfilename(filename)
755 if self._map.hastrackeddir(filename):
758 if self._map.hastrackeddir(filename):
756 msg = _(b'directory %r already in dirstate')
759 msg = _(b'directory %r already in dirstate')
757 msg %= pycompat.bytestr(filename)
760 msg %= pycompat.bytestr(filename)
758 raise error.Abort(msg)
761 raise error.Abort(msg)
759 # shadows
762 # shadows
760 for d in pathutil.finddirs(filename):
763 for d in pathutil.finddirs(filename):
761 if self._map.hastrackeddir(d):
764 if self._map.hastrackeddir(d):
762 break
765 break
763 entry = self._map.get(d)
766 entry = self._map.get(d)
764 if entry is not None and not entry.removed:
767 if entry is not None and not entry.removed:
765 msg = _(b'file %r in dirstate clashes with %r')
768 msg = _(b'file %r in dirstate clashes with %r')
766 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
769 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
767 raise error.Abort(msg)
770 raise error.Abort(msg)
768 self._check_sparse(filename)
771 self._check_sparse(filename)
769
772
770 def _check_sparse(self, filename):
773 def _check_sparse(self, filename):
771 """Check that a filename is inside the sparse profile"""
774 """Check that a filename is inside the sparse profile"""
772 sparsematch = self._sparsematcher
775 sparsematch = self._sparsematcher
773 if sparsematch is not None and not sparsematch.always():
776 if sparsematch is not None and not sparsematch.always():
774 if not sparsematch(filename):
777 if not sparsematch(filename):
775 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
778 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
776 hint = _(
779 hint = _(
777 b'include file with `hg debugsparse --include <pattern>` or use '
780 b'include file with `hg debugsparse --include <pattern>` or use '
778 b'`hg add -s <file>` to include file directory while adding'
781 b'`hg add -s <file>` to include file directory while adding'
779 )
782 )
780 raise error.Abort(msg % filename, hint=hint)
783 raise error.Abort(msg % filename, hint=hint)
781
784
782 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
785 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
783 if exists is None:
786 if exists is None:
784 exists = os.path.lexists(os.path.join(self._root, path))
787 exists = os.path.lexists(os.path.join(self._root, path))
785 if not exists:
788 if not exists:
786 # Maybe a path component exists
789 # Maybe a path component exists
787 if not ignoremissing and b'/' in path:
790 if not ignoremissing and b'/' in path:
788 d, f = path.rsplit(b'/', 1)
791 d, f = path.rsplit(b'/', 1)
789 d = self._normalize(d, False, ignoremissing, None)
792 d = self._normalize(d, False, ignoremissing, None)
790 folded = d + b"/" + f
793 folded = d + b"/" + f
791 else:
794 else:
792 # No path components, preserve original case
795 # No path components, preserve original case
793 folded = path
796 folded = path
794 else:
797 else:
795 # recursively normalize leading directory components
798 # recursively normalize leading directory components
796 # against dirstate
799 # against dirstate
797 if b'/' in normed:
800 if b'/' in normed:
798 d, f = normed.rsplit(b'/', 1)
801 d, f = normed.rsplit(b'/', 1)
799 d = self._normalize(d, False, ignoremissing, True)
802 d = self._normalize(d, False, ignoremissing, True)
800 r = self._root + b"/" + d
803 r = self._root + b"/" + d
801 folded = d + b"/" + util.fspath(f, r)
804 folded = d + b"/" + util.fspath(f, r)
802 else:
805 else:
803 folded = util.fspath(normed, self._root)
806 folded = util.fspath(normed, self._root)
804 storemap[normed] = folded
807 storemap[normed] = folded
805
808
806 return folded
809 return folded
807
810
808 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
811 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
809 normed = util.normcase(path)
812 normed = util.normcase(path)
810 folded = self._map.filefoldmap.get(normed, None)
813 folded = self._map.filefoldmap.get(normed, None)
811 if folded is None:
814 if folded is None:
812 if isknown:
815 if isknown:
813 folded = path
816 folded = path
814 else:
817 else:
815 folded = self._discoverpath(
818 folded = self._discoverpath(
816 path, normed, ignoremissing, exists, self._map.filefoldmap
819 path, normed, ignoremissing, exists, self._map.filefoldmap
817 )
820 )
818 return folded
821 return folded
819
822
820 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
823 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
821 normed = util.normcase(path)
824 normed = util.normcase(path)
822 folded = self._map.filefoldmap.get(normed, None)
825 folded = self._map.filefoldmap.get(normed, None)
823 if folded is None:
826 if folded is None:
824 folded = self._map.dirfoldmap.get(normed, None)
827 folded = self._map.dirfoldmap.get(normed, None)
825 if folded is None:
828 if folded is None:
826 if isknown:
829 if isknown:
827 folded = path
830 folded = path
828 else:
831 else:
829 # store discovered result in dirfoldmap so that future
832 # store discovered result in dirfoldmap so that future
830 # normalizefile calls don't start matching directories
833 # normalizefile calls don't start matching directories
831 folded = self._discoverpath(
834 folded = self._discoverpath(
832 path, normed, ignoremissing, exists, self._map.dirfoldmap
835 path, normed, ignoremissing, exists, self._map.dirfoldmap
833 )
836 )
834 return folded
837 return folded
835
838
836 def normalize(self, path, isknown=False, ignoremissing=False):
839 def normalize(self, path, isknown=False, ignoremissing=False):
837 """
840 """
838 normalize the case of a pathname when on a casefolding filesystem
841 normalize the case of a pathname when on a casefolding filesystem
839
842
840 isknown specifies whether the filename came from walking the
843 isknown specifies whether the filename came from walking the
841 disk, to avoid extra filesystem access.
844 disk, to avoid extra filesystem access.
842
845
843 If ignoremissing is True, missing path are returned
846 If ignoremissing is True, missing path are returned
844 unchanged. Otherwise, we try harder to normalize possibly
847 unchanged. Otherwise, we try harder to normalize possibly
845 existing path components.
848 existing path components.
846
849
847 The normalized case is determined based on the following precedence:
850 The normalized case is determined based on the following precedence:
848
851
849 - version of name already stored in the dirstate
852 - version of name already stored in the dirstate
850 - version of name stored on disk
853 - version of name stored on disk
851 - version provided via command arguments
854 - version provided via command arguments
852 """
855 """
853
856
854 if self._checkcase:
857 if self._checkcase:
855 return self._normalize(path, isknown, ignoremissing)
858 return self._normalize(path, isknown, ignoremissing)
856 return path
859 return path
857
860
858 # XXX this method is barely used, as a result:
861 # XXX this method is barely used, as a result:
859 # - its semantic is unclear
862 # - its semantic is unclear
860 # - do we really needs it ?
863 # - do we really needs it ?
861 @requires_changing_parents
864 @requires_changing_parents
862 def clear(self):
865 def clear(self):
863 self._map.clear()
866 self._map.clear()
864 self._dirty = True
867 self._dirty = True
865
868
866 @requires_changing_parents
869 @requires_changing_parents
867 def rebuild(self, parent, allfiles, changedfiles=None):
870 def rebuild(self, parent, allfiles, changedfiles=None):
868 matcher = self._sparsematcher
871 matcher = self._sparsematcher
869 if matcher is not None and not matcher.always():
872 if matcher is not None and not matcher.always():
870 # should not add non-matching files
873 # should not add non-matching files
871 allfiles = [f for f in allfiles if matcher(f)]
874 allfiles = [f for f in allfiles if matcher(f)]
872 if changedfiles:
875 if changedfiles:
873 changedfiles = [f for f in changedfiles if matcher(f)]
876 changedfiles = [f for f in changedfiles if matcher(f)]
874
877
875 if changedfiles is not None:
878 if changedfiles is not None:
876 # these files will be deleted from the dirstate when they are
879 # these files will be deleted from the dirstate when they are
877 # not found to be in allfiles
880 # not found to be in allfiles
878 dirstatefilestoremove = {f for f in self if not matcher(f)}
881 dirstatefilestoremove = {f for f in self if not matcher(f)}
879 changedfiles = dirstatefilestoremove.union(changedfiles)
882 changedfiles = dirstatefilestoremove.union(changedfiles)
880
883
881 if changedfiles is None:
884 if changedfiles is None:
882 # Rebuild entire dirstate
885 # Rebuild entire dirstate
883 to_lookup = allfiles
886 to_lookup = allfiles
884 to_drop = []
887 to_drop = []
885 self.clear()
888 self.clear()
886 elif len(changedfiles) < 10:
889 elif len(changedfiles) < 10:
887 # Avoid turning allfiles into a set, which can be expensive if it's
890 # Avoid turning allfiles into a set, which can be expensive if it's
888 # large.
891 # large.
889 to_lookup = []
892 to_lookup = []
890 to_drop = []
893 to_drop = []
891 for f in changedfiles:
894 for f in changedfiles:
892 if f in allfiles:
895 if f in allfiles:
893 to_lookup.append(f)
896 to_lookup.append(f)
894 else:
897 else:
895 to_drop.append(f)
898 to_drop.append(f)
896 else:
899 else:
897 changedfilesset = set(changedfiles)
900 changedfilesset = set(changedfiles)
898 to_lookup = changedfilesset & set(allfiles)
901 to_lookup = changedfilesset & set(allfiles)
899 to_drop = changedfilesset - to_lookup
902 to_drop = changedfilesset - to_lookup
900
903
901 if self._origpl is None:
904 if self._origpl is None:
902 self._origpl = self._pl
905 self._origpl = self._pl
903 self._map.setparents(parent, self._nodeconstants.nullid)
906 self._map.setparents(parent, self._nodeconstants.nullid)
904
907
905 for f in to_lookup:
908 for f in to_lookup:
906 if self.in_merge:
909 if self.in_merge:
907 self.set_tracked(f)
910 self.set_tracked(f)
908 else:
911 else:
909 self._map.reset_state(
912 self._map.reset_state(
910 f,
913 f,
911 wc_tracked=True,
914 wc_tracked=True,
912 p1_tracked=True,
915 p1_tracked=True,
913 )
916 )
914 for f in to_drop:
917 for f in to_drop:
915 self._map.reset_state(f)
918 self._map.reset_state(f)
916
919
917 self._dirty = True
920 self._dirty = True
918
921
919 def identity(self):
922 def identity(self):
920 """Return identity of dirstate itself to detect changing in storage
923 """Return identity of dirstate itself to detect changing in storage
921
924
922 If identity of previous dirstate is equal to this, writing
925 If identity of previous dirstate is equal to this, writing
923 changes based on the former dirstate out can keep consistency.
926 changes based on the former dirstate out can keep consistency.
924 """
927 """
925 return self._map.identity
928 return self._map.identity
926
929
927 def write(self, tr):
930 def write(self, tr):
928 if not self._dirty:
931 if not self._dirty:
929 return
932 return
930
933
931 write_key = self._use_tracked_hint and self._dirty_tracked_set
934 write_key = self._use_tracked_hint and self._dirty_tracked_set
932 if tr:
935 if tr:
933 # make sure we invalidate the current change on abort
936 # make sure we invalidate the current change on abort
934 if tr is not None:
937 if tr is not None:
935 tr.addabort(
938 tr.addabort(
936 b'dirstate-invalidate',
939 b'dirstate-invalidate',
937 lambda tr: self.invalidate(),
940 lambda tr: self.invalidate(),
938 )
941 )
939 # delay writing in-memory changes out
942 # delay writing in-memory changes out
940 tr.addfilegenerator(
943 tr.addfilegenerator(
941 b'dirstate-1-main',
944 b'dirstate-1-main',
942 (self._filename,),
945 (self._filename,),
943 lambda f: self._writedirstate(tr, f),
946 lambda f: self._writedirstate(tr, f),
944 location=b'plain',
947 location=b'plain',
945 post_finalize=True,
948 post_finalize=True,
946 )
949 )
947 if write_key:
950 if write_key:
948 tr.addfilegenerator(
951 tr.addfilegenerator(
949 b'dirstate-2-key-post',
952 b'dirstate-2-key-post',
950 (self._filename_th,),
953 (self._filename_th,),
951 lambda f: self._write_tracked_hint(tr, f),
954 lambda f: self._write_tracked_hint(tr, f),
952 location=b'plain',
955 location=b'plain',
953 post_finalize=True,
956 post_finalize=True,
954 )
957 )
955 return
958 return
956
959
957 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
960 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
958 with file(self._filename) as f:
961 with file(self._filename) as f:
959 self._writedirstate(tr, f)
962 self._writedirstate(tr, f)
960 if write_key:
963 if write_key:
961 # we update the key-file after writing to make sure reader have a
964 # we update the key-file after writing to make sure reader have a
962 # key that match the newly written content
965 # key that match the newly written content
963 with file(self._filename_th) as f:
966 with file(self._filename_th) as f:
964 self._write_tracked_hint(tr, f)
967 self._write_tracked_hint(tr, f)
965
968
966 def delete_tracked_hint(self):
969 def delete_tracked_hint(self):
967 """remove the tracked_hint file
970 """remove the tracked_hint file
968
971
969 To be used by format downgrades operation"""
972 To be used by format downgrades operation"""
970 self._opener.unlink(self._filename_th)
973 self._opener.unlink(self._filename_th)
971 self._use_tracked_hint = False
974 self._use_tracked_hint = False
972
975
973 def addparentchangecallback(self, category, callback):
976 def addparentchangecallback(self, category, callback):
974 """add a callback to be called when the wd parents are changed
977 """add a callback to be called when the wd parents are changed
975
978
976 Callback will be called with the following arguments:
979 Callback will be called with the following arguments:
977 dirstate, (oldp1, oldp2), (newp1, newp2)
980 dirstate, (oldp1, oldp2), (newp1, newp2)
978
981
979 Category is a unique identifier to allow overwriting an old callback
982 Category is a unique identifier to allow overwriting an old callback
980 with a newer callback.
983 with a newer callback.
981 """
984 """
982 self._plchangecallbacks[category] = callback
985 self._plchangecallbacks[category] = callback
983
986
984 def _writedirstate(self, tr, st):
987 def _writedirstate(self, tr, st):
985 # notify callbacks about parents change
988 # notify callbacks about parents change
986 if self._origpl is not None and self._origpl != self._pl:
989 if self._origpl is not None and self._origpl != self._pl:
987 for c, callback in sorted(self._plchangecallbacks.items()):
990 for c, callback in sorted(self._plchangecallbacks.items()):
988 callback(self, self._origpl, self._pl)
991 callback(self, self._origpl, self._pl)
989 self._origpl = None
992 self._origpl = None
990 self._map.write(tr, st)
993 self._map.write(tr, st)
991 self._dirty = False
994 self._dirty = False
992 self._dirty_tracked_set = False
995 self._dirty_tracked_set = False
993
996
994 def _write_tracked_hint(self, tr, f):
997 def _write_tracked_hint(self, tr, f):
995 key = node.hex(uuid.uuid4().bytes)
998 key = node.hex(uuid.uuid4().bytes)
996 f.write(b"1\n%s\n" % key) # 1 is the format version
999 f.write(b"1\n%s\n" % key) # 1 is the format version
997
1000
998 def _dirignore(self, f):
1001 def _dirignore(self, f):
999 if self._ignore(f):
1002 if self._ignore(f):
1000 return True
1003 return True
1001 for p in pathutil.finddirs(f):
1004 for p in pathutil.finddirs(f):
1002 if self._ignore(p):
1005 if self._ignore(p):
1003 return True
1006 return True
1004 return False
1007 return False
1005
1008
1006 def _ignorefiles(self):
1009 def _ignorefiles(self):
1007 files = []
1010 files = []
1008 if os.path.exists(self._join(b'.hgignore')):
1011 if os.path.exists(self._join(b'.hgignore')):
1009 files.append(self._join(b'.hgignore'))
1012 files.append(self._join(b'.hgignore'))
1010 for name, path in self._ui.configitems(b"ui"):
1013 for name, path in self._ui.configitems(b"ui"):
1011 if name == b'ignore' or name.startswith(b'ignore.'):
1014 if name == b'ignore' or name.startswith(b'ignore.'):
1012 # we need to use os.path.join here rather than self._join
1015 # we need to use os.path.join here rather than self._join
1013 # because path is arbitrary and user-specified
1016 # because path is arbitrary and user-specified
1014 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1017 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1015 return files
1018 return files
1016
1019
1017 def _ignorefileandline(self, f):
1020 def _ignorefileandline(self, f):
1018 files = collections.deque(self._ignorefiles())
1021 files = collections.deque(self._ignorefiles())
1019 visited = set()
1022 visited = set()
1020 while files:
1023 while files:
1021 i = files.popleft()
1024 i = files.popleft()
1022 patterns = matchmod.readpatternfile(
1025 patterns = matchmod.readpatternfile(
1023 i, self._ui.warn, sourceinfo=True
1026 i, self._ui.warn, sourceinfo=True
1024 )
1027 )
1025 for pattern, lineno, line in patterns:
1028 for pattern, lineno, line in patterns:
1026 kind, p = matchmod._patsplit(pattern, b'glob')
1029 kind, p = matchmod._patsplit(pattern, b'glob')
1027 if kind == b"subinclude":
1030 if kind == b"subinclude":
1028 if p not in visited:
1031 if p not in visited:
1029 files.append(p)
1032 files.append(p)
1030 continue
1033 continue
1031 m = matchmod.match(
1034 m = matchmod.match(
1032 self._root, b'', [], [pattern], warn=self._ui.warn
1035 self._root, b'', [], [pattern], warn=self._ui.warn
1033 )
1036 )
1034 if m(f):
1037 if m(f):
1035 return (i, lineno, line)
1038 return (i, lineno, line)
1036 visited.add(i)
1039 visited.add(i)
1037 return (None, -1, b"")
1040 return (None, -1, b"")
1038
1041
1039 def _walkexplicit(self, match, subrepos):
1042 def _walkexplicit(self, match, subrepos):
1040 """Get stat data about the files explicitly specified by match.
1043 """Get stat data about the files explicitly specified by match.
1041
1044
1042 Return a triple (results, dirsfound, dirsnotfound).
1045 Return a triple (results, dirsfound, dirsnotfound).
1043 - results is a mapping from filename to stat result. It also contains
1046 - results is a mapping from filename to stat result. It also contains
1044 listings mapping subrepos and .hg to None.
1047 listings mapping subrepos and .hg to None.
1045 - dirsfound is a list of files found to be directories.
1048 - dirsfound is a list of files found to be directories.
1046 - dirsnotfound is a list of files that the dirstate thinks are
1049 - dirsnotfound is a list of files that the dirstate thinks are
1047 directories and that were not found."""
1050 directories and that were not found."""
1048
1051
1049 def badtype(mode):
1052 def badtype(mode):
1050 kind = _(b'unknown')
1053 kind = _(b'unknown')
1051 if stat.S_ISCHR(mode):
1054 if stat.S_ISCHR(mode):
1052 kind = _(b'character device')
1055 kind = _(b'character device')
1053 elif stat.S_ISBLK(mode):
1056 elif stat.S_ISBLK(mode):
1054 kind = _(b'block device')
1057 kind = _(b'block device')
1055 elif stat.S_ISFIFO(mode):
1058 elif stat.S_ISFIFO(mode):
1056 kind = _(b'fifo')
1059 kind = _(b'fifo')
1057 elif stat.S_ISSOCK(mode):
1060 elif stat.S_ISSOCK(mode):
1058 kind = _(b'socket')
1061 kind = _(b'socket')
1059 elif stat.S_ISDIR(mode):
1062 elif stat.S_ISDIR(mode):
1060 kind = _(b'directory')
1063 kind = _(b'directory')
1061 return _(b'unsupported file type (type is %s)') % kind
1064 return _(b'unsupported file type (type is %s)') % kind
1062
1065
1063 badfn = match.bad
1066 badfn = match.bad
1064 dmap = self._map
1067 dmap = self._map
1065 lstat = os.lstat
1068 lstat = os.lstat
1066 getkind = stat.S_IFMT
1069 getkind = stat.S_IFMT
1067 dirkind = stat.S_IFDIR
1070 dirkind = stat.S_IFDIR
1068 regkind = stat.S_IFREG
1071 regkind = stat.S_IFREG
1069 lnkkind = stat.S_IFLNK
1072 lnkkind = stat.S_IFLNK
1070 join = self._join
1073 join = self._join
1071 dirsfound = []
1074 dirsfound = []
1072 foundadd = dirsfound.append
1075 foundadd = dirsfound.append
1073 dirsnotfound = []
1076 dirsnotfound = []
1074 notfoundadd = dirsnotfound.append
1077 notfoundadd = dirsnotfound.append
1075
1078
1076 if not match.isexact() and self._checkcase:
1079 if not match.isexact() and self._checkcase:
1077 normalize = self._normalize
1080 normalize = self._normalize
1078 else:
1081 else:
1079 normalize = None
1082 normalize = None
1080
1083
1081 files = sorted(match.files())
1084 files = sorted(match.files())
1082 subrepos.sort()
1085 subrepos.sort()
1083 i, j = 0, 0
1086 i, j = 0, 0
1084 while i < len(files) and j < len(subrepos):
1087 while i < len(files) and j < len(subrepos):
1085 subpath = subrepos[j] + b"/"
1088 subpath = subrepos[j] + b"/"
1086 if files[i] < subpath:
1089 if files[i] < subpath:
1087 i += 1
1090 i += 1
1088 continue
1091 continue
1089 while i < len(files) and files[i].startswith(subpath):
1092 while i < len(files) and files[i].startswith(subpath):
1090 del files[i]
1093 del files[i]
1091 j += 1
1094 j += 1
1092
1095
1093 if not files or b'' in files:
1096 if not files or b'' in files:
1094 files = [b'']
1097 files = [b'']
1095 # constructing the foldmap is expensive, so don't do it for the
1098 # constructing the foldmap is expensive, so don't do it for the
1096 # common case where files is ['']
1099 # common case where files is ['']
1097 normalize = None
1100 normalize = None
1098 results = dict.fromkeys(subrepos)
1101 results = dict.fromkeys(subrepos)
1099 results[b'.hg'] = None
1102 results[b'.hg'] = None
1100
1103
1101 for ff in files:
1104 for ff in files:
1102 if normalize:
1105 if normalize:
1103 nf = normalize(ff, False, True)
1106 nf = normalize(ff, False, True)
1104 else:
1107 else:
1105 nf = ff
1108 nf = ff
1106 if nf in results:
1109 if nf in results:
1107 continue
1110 continue
1108
1111
1109 try:
1112 try:
1110 st = lstat(join(nf))
1113 st = lstat(join(nf))
1111 kind = getkind(st.st_mode)
1114 kind = getkind(st.st_mode)
1112 if kind == dirkind:
1115 if kind == dirkind:
1113 if nf in dmap:
1116 if nf in dmap:
1114 # file replaced by dir on disk but still in dirstate
1117 # file replaced by dir on disk but still in dirstate
1115 results[nf] = None
1118 results[nf] = None
1116 foundadd((nf, ff))
1119 foundadd((nf, ff))
1117 elif kind == regkind or kind == lnkkind:
1120 elif kind == regkind or kind == lnkkind:
1118 results[nf] = st
1121 results[nf] = st
1119 else:
1122 else:
1120 badfn(ff, badtype(kind))
1123 badfn(ff, badtype(kind))
1121 if nf in dmap:
1124 if nf in dmap:
1122 results[nf] = None
1125 results[nf] = None
1123 except (OSError) as inst:
1126 except (OSError) as inst:
1124 # nf not found on disk - it is dirstate only
1127 # nf not found on disk - it is dirstate only
1125 if nf in dmap: # does it exactly match a missing file?
1128 if nf in dmap: # does it exactly match a missing file?
1126 results[nf] = None
1129 results[nf] = None
1127 else: # does it match a missing directory?
1130 else: # does it match a missing directory?
1128 if self._map.hasdir(nf):
1131 if self._map.hasdir(nf):
1129 notfoundadd(nf)
1132 notfoundadd(nf)
1130 else:
1133 else:
1131 badfn(ff, encoding.strtolocal(inst.strerror))
1134 badfn(ff, encoding.strtolocal(inst.strerror))
1132
1135
1133 # match.files() may contain explicitly-specified paths that shouldn't
1136 # match.files() may contain explicitly-specified paths that shouldn't
1134 # be taken; drop them from the list of files found. dirsfound/notfound
1137 # be taken; drop them from the list of files found. dirsfound/notfound
1135 # aren't filtered here because they will be tested later.
1138 # aren't filtered here because they will be tested later.
1136 if match.anypats():
1139 if match.anypats():
1137 for f in list(results):
1140 for f in list(results):
1138 if f == b'.hg' or f in subrepos:
1141 if f == b'.hg' or f in subrepos:
1139 # keep sentinel to disable further out-of-repo walks
1142 # keep sentinel to disable further out-of-repo walks
1140 continue
1143 continue
1141 if not match(f):
1144 if not match(f):
1142 del results[f]
1145 del results[f]
1143
1146
1144 # Case insensitive filesystems cannot rely on lstat() failing to detect
1147 # Case insensitive filesystems cannot rely on lstat() failing to detect
1145 # a case-only rename. Prune the stat object for any file that does not
1148 # a case-only rename. Prune the stat object for any file that does not
1146 # match the case in the filesystem, if there are multiple files that
1149 # match the case in the filesystem, if there are multiple files that
1147 # normalize to the same path.
1150 # normalize to the same path.
1148 if match.isexact() and self._checkcase:
1151 if match.isexact() and self._checkcase:
1149 normed = {}
1152 normed = {}
1150
1153
1151 for f, st in results.items():
1154 for f, st in results.items():
1152 if st is None:
1155 if st is None:
1153 continue
1156 continue
1154
1157
1155 nc = util.normcase(f)
1158 nc = util.normcase(f)
1156 paths = normed.get(nc)
1159 paths = normed.get(nc)
1157
1160
1158 if paths is None:
1161 if paths is None:
1159 paths = set()
1162 paths = set()
1160 normed[nc] = paths
1163 normed[nc] = paths
1161
1164
1162 paths.add(f)
1165 paths.add(f)
1163
1166
1164 for norm, paths in normed.items():
1167 for norm, paths in normed.items():
1165 if len(paths) > 1:
1168 if len(paths) > 1:
1166 for path in paths:
1169 for path in paths:
1167 folded = self._discoverpath(
1170 folded = self._discoverpath(
1168 path, norm, True, None, self._map.dirfoldmap
1171 path, norm, True, None, self._map.dirfoldmap
1169 )
1172 )
1170 if path != folded:
1173 if path != folded:
1171 results[path] = None
1174 results[path] = None
1172
1175
1173 return results, dirsfound, dirsnotfound
1176 return results, dirsfound, dirsnotfound
1174
1177
1175 def walk(self, match, subrepos, unknown, ignored, full=True):
1178 def walk(self, match, subrepos, unknown, ignored, full=True):
1176 """
1179 """
1177 Walk recursively through the directory tree, finding all files
1180 Walk recursively through the directory tree, finding all files
1178 matched by match.
1181 matched by match.
1179
1182
1180 If full is False, maybe skip some known-clean files.
1183 If full is False, maybe skip some known-clean files.
1181
1184
1182 Return a dict mapping filename to stat-like object (either
1185 Return a dict mapping filename to stat-like object (either
1183 mercurial.osutil.stat instance or return value of os.stat()).
1186 mercurial.osutil.stat instance or return value of os.stat()).
1184
1187
1185 """
1188 """
1186 # full is a flag that extensions that hook into walk can use -- this
1189 # full is a flag that extensions that hook into walk can use -- this
1187 # implementation doesn't use it at all. This satisfies the contract
1190 # implementation doesn't use it at all. This satisfies the contract
1188 # because we only guarantee a "maybe".
1191 # because we only guarantee a "maybe".
1189
1192
1190 if ignored:
1193 if ignored:
1191 ignore = util.never
1194 ignore = util.never
1192 dirignore = util.never
1195 dirignore = util.never
1193 elif unknown:
1196 elif unknown:
1194 ignore = self._ignore
1197 ignore = self._ignore
1195 dirignore = self._dirignore
1198 dirignore = self._dirignore
1196 else:
1199 else:
1197 # if not unknown and not ignored, drop dir recursion and step 2
1200 # if not unknown and not ignored, drop dir recursion and step 2
1198 ignore = util.always
1201 ignore = util.always
1199 dirignore = util.always
1202 dirignore = util.always
1200
1203
1201 if self._sparsematchfn is not None:
1204 if self._sparsematchfn is not None:
1202 em = matchmod.exact(match.files())
1205 em = matchmod.exact(match.files())
1203 sm = matchmod.unionmatcher([self._sparsematcher, em])
1206 sm = matchmod.unionmatcher([self._sparsematcher, em])
1204 match = matchmod.intersectmatchers(match, sm)
1207 match = matchmod.intersectmatchers(match, sm)
1205
1208
1206 matchfn = match.matchfn
1209 matchfn = match.matchfn
1207 matchalways = match.always()
1210 matchalways = match.always()
1208 matchtdir = match.traversedir
1211 matchtdir = match.traversedir
1209 dmap = self._map
1212 dmap = self._map
1210 listdir = util.listdir
1213 listdir = util.listdir
1211 lstat = os.lstat
1214 lstat = os.lstat
1212 dirkind = stat.S_IFDIR
1215 dirkind = stat.S_IFDIR
1213 regkind = stat.S_IFREG
1216 regkind = stat.S_IFREG
1214 lnkkind = stat.S_IFLNK
1217 lnkkind = stat.S_IFLNK
1215 join = self._join
1218 join = self._join
1216
1219
1217 exact = skipstep3 = False
1220 exact = skipstep3 = False
1218 if match.isexact(): # match.exact
1221 if match.isexact(): # match.exact
1219 exact = True
1222 exact = True
1220 dirignore = util.always # skip step 2
1223 dirignore = util.always # skip step 2
1221 elif match.prefix(): # match.match, no patterns
1224 elif match.prefix(): # match.match, no patterns
1222 skipstep3 = True
1225 skipstep3 = True
1223
1226
1224 if not exact and self._checkcase:
1227 if not exact and self._checkcase:
1225 normalize = self._normalize
1228 normalize = self._normalize
1226 normalizefile = self._normalizefile
1229 normalizefile = self._normalizefile
1227 skipstep3 = False
1230 skipstep3 = False
1228 else:
1231 else:
1229 normalize = self._normalize
1232 normalize = self._normalize
1230 normalizefile = None
1233 normalizefile = None
1231
1234
1232 # step 1: find all explicit files
1235 # step 1: find all explicit files
1233 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1236 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1234 if matchtdir:
1237 if matchtdir:
1235 for d in work:
1238 for d in work:
1236 matchtdir(d[0])
1239 matchtdir(d[0])
1237 for d in dirsnotfound:
1240 for d in dirsnotfound:
1238 matchtdir(d)
1241 matchtdir(d)
1239
1242
1240 skipstep3 = skipstep3 and not (work or dirsnotfound)
1243 skipstep3 = skipstep3 and not (work or dirsnotfound)
1241 work = [d for d in work if not dirignore(d[0])]
1244 work = [d for d in work if not dirignore(d[0])]
1242
1245
1243 # step 2: visit subdirectories
1246 # step 2: visit subdirectories
1244 def traverse(work, alreadynormed):
1247 def traverse(work, alreadynormed):
1245 wadd = work.append
1248 wadd = work.append
1246 while work:
1249 while work:
1247 tracing.counter('dirstate.walk work', len(work))
1250 tracing.counter('dirstate.walk work', len(work))
1248 nd = work.pop()
1251 nd = work.pop()
1249 visitentries = match.visitchildrenset(nd)
1252 visitentries = match.visitchildrenset(nd)
1250 if not visitentries:
1253 if not visitentries:
1251 continue
1254 continue
1252 if visitentries == b'this' or visitentries == b'all':
1255 if visitentries == b'this' or visitentries == b'all':
1253 visitentries = None
1256 visitentries = None
1254 skip = None
1257 skip = None
1255 if nd != b'':
1258 if nd != b'':
1256 skip = b'.hg'
1259 skip = b'.hg'
1257 try:
1260 try:
1258 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1261 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1259 entries = listdir(join(nd), stat=True, skip=skip)
1262 entries = listdir(join(nd), stat=True, skip=skip)
1260 except (PermissionError, FileNotFoundError) as inst:
1263 except (PermissionError, FileNotFoundError) as inst:
1261 match.bad(
1264 match.bad(
1262 self.pathto(nd), encoding.strtolocal(inst.strerror)
1265 self.pathto(nd), encoding.strtolocal(inst.strerror)
1263 )
1266 )
1264 continue
1267 continue
1265 for f, kind, st in entries:
1268 for f, kind, st in entries:
1266 # Some matchers may return files in the visitentries set,
1269 # Some matchers may return files in the visitentries set,
1267 # instead of 'this', if the matcher explicitly mentions them
1270 # instead of 'this', if the matcher explicitly mentions them
1268 # and is not an exactmatcher. This is acceptable; we do not
1271 # and is not an exactmatcher. This is acceptable; we do not
1269 # make any hard assumptions about file-or-directory below
1272 # make any hard assumptions about file-or-directory below
1270 # based on the presence of `f` in visitentries. If
1273 # based on the presence of `f` in visitentries. If
1271 # visitchildrenset returned a set, we can always skip the
1274 # visitchildrenset returned a set, we can always skip the
1272 # entries *not* in the set it provided regardless of whether
1275 # entries *not* in the set it provided regardless of whether
1273 # they're actually a file or a directory.
1276 # they're actually a file or a directory.
1274 if visitentries and f not in visitentries:
1277 if visitentries and f not in visitentries:
1275 continue
1278 continue
1276 if normalizefile:
1279 if normalizefile:
1277 # even though f might be a directory, we're only
1280 # even though f might be a directory, we're only
1278 # interested in comparing it to files currently in the
1281 # interested in comparing it to files currently in the
1279 # dmap -- therefore normalizefile is enough
1282 # dmap -- therefore normalizefile is enough
1280 nf = normalizefile(
1283 nf = normalizefile(
1281 nd and (nd + b"/" + f) or f, True, True
1284 nd and (nd + b"/" + f) or f, True, True
1282 )
1285 )
1283 else:
1286 else:
1284 nf = nd and (nd + b"/" + f) or f
1287 nf = nd and (nd + b"/" + f) or f
1285 if nf not in results:
1288 if nf not in results:
1286 if kind == dirkind:
1289 if kind == dirkind:
1287 if not ignore(nf):
1290 if not ignore(nf):
1288 if matchtdir:
1291 if matchtdir:
1289 matchtdir(nf)
1292 matchtdir(nf)
1290 wadd(nf)
1293 wadd(nf)
1291 if nf in dmap and (matchalways or matchfn(nf)):
1294 if nf in dmap and (matchalways or matchfn(nf)):
1292 results[nf] = None
1295 results[nf] = None
1293 elif kind == regkind or kind == lnkkind:
1296 elif kind == regkind or kind == lnkkind:
1294 if nf in dmap:
1297 if nf in dmap:
1295 if matchalways or matchfn(nf):
1298 if matchalways or matchfn(nf):
1296 results[nf] = st
1299 results[nf] = st
1297 elif (matchalways or matchfn(nf)) and not ignore(
1300 elif (matchalways or matchfn(nf)) and not ignore(
1298 nf
1301 nf
1299 ):
1302 ):
1300 # unknown file -- normalize if necessary
1303 # unknown file -- normalize if necessary
1301 if not alreadynormed:
1304 if not alreadynormed:
1302 nf = normalize(nf, False, True)
1305 nf = normalize(nf, False, True)
1303 results[nf] = st
1306 results[nf] = st
1304 elif nf in dmap and (matchalways or matchfn(nf)):
1307 elif nf in dmap and (matchalways or matchfn(nf)):
1305 results[nf] = None
1308 results[nf] = None
1306
1309
1307 for nd, d in work:
1310 for nd, d in work:
1308 # alreadynormed means that processwork doesn't have to do any
1311 # alreadynormed means that processwork doesn't have to do any
1309 # expensive directory normalization
1312 # expensive directory normalization
1310 alreadynormed = not normalize or nd == d
1313 alreadynormed = not normalize or nd == d
1311 traverse([d], alreadynormed)
1314 traverse([d], alreadynormed)
1312
1315
1313 for s in subrepos:
1316 for s in subrepos:
1314 del results[s]
1317 del results[s]
1315 del results[b'.hg']
1318 del results[b'.hg']
1316
1319
1317 # step 3: visit remaining files from dmap
1320 # step 3: visit remaining files from dmap
1318 if not skipstep3 and not exact:
1321 if not skipstep3 and not exact:
1319 # If a dmap file is not in results yet, it was either
1322 # If a dmap file is not in results yet, it was either
1320 # a) not matching matchfn b) ignored, c) missing, or d) under a
1323 # a) not matching matchfn b) ignored, c) missing, or d) under a
1321 # symlink directory.
1324 # symlink directory.
1322 if not results and matchalways:
1325 if not results and matchalways:
1323 visit = [f for f in dmap]
1326 visit = [f for f in dmap]
1324 else:
1327 else:
1325 visit = [f for f in dmap if f not in results and matchfn(f)]
1328 visit = [f for f in dmap if f not in results and matchfn(f)]
1326 visit.sort()
1329 visit.sort()
1327
1330
1328 if unknown:
1331 if unknown:
1329 # unknown == True means we walked all dirs under the roots
1332 # unknown == True means we walked all dirs under the roots
1330 # that wasn't ignored, and everything that matched was stat'ed
1333 # that wasn't ignored, and everything that matched was stat'ed
1331 # and is already in results.
1334 # and is already in results.
1332 # The rest must thus be ignored or under a symlink.
1335 # The rest must thus be ignored or under a symlink.
1333 audit_path = pathutil.pathauditor(self._root, cached=True)
1336 audit_path = pathutil.pathauditor(self._root, cached=True)
1334
1337
1335 for nf in iter(visit):
1338 for nf in iter(visit):
1336 # If a stat for the same file was already added with a
1339 # If a stat for the same file was already added with a
1337 # different case, don't add one for this, since that would
1340 # different case, don't add one for this, since that would
1338 # make it appear as if the file exists under both names
1341 # make it appear as if the file exists under both names
1339 # on disk.
1342 # on disk.
1340 if (
1343 if (
1341 normalizefile
1344 normalizefile
1342 and normalizefile(nf, True, True) in results
1345 and normalizefile(nf, True, True) in results
1343 ):
1346 ):
1344 results[nf] = None
1347 results[nf] = None
1345 # Report ignored items in the dmap as long as they are not
1348 # Report ignored items in the dmap as long as they are not
1346 # under a symlink directory.
1349 # under a symlink directory.
1347 elif audit_path.check(nf):
1350 elif audit_path.check(nf):
1348 try:
1351 try:
1349 results[nf] = lstat(join(nf))
1352 results[nf] = lstat(join(nf))
1350 # file was just ignored, no links, and exists
1353 # file was just ignored, no links, and exists
1351 except OSError:
1354 except OSError:
1352 # file doesn't exist
1355 # file doesn't exist
1353 results[nf] = None
1356 results[nf] = None
1354 else:
1357 else:
1355 # It's either missing or under a symlink directory
1358 # It's either missing or under a symlink directory
1356 # which we in this case report as missing
1359 # which we in this case report as missing
1357 results[nf] = None
1360 results[nf] = None
1358 else:
1361 else:
1359 # We may not have walked the full directory tree above,
1362 # We may not have walked the full directory tree above,
1360 # so stat and check everything we missed.
1363 # so stat and check everything we missed.
1361 iv = iter(visit)
1364 iv = iter(visit)
1362 for st in util.statfiles([join(i) for i in visit]):
1365 for st in util.statfiles([join(i) for i in visit]):
1363 results[next(iv)] = st
1366 results[next(iv)] = st
1364 return results
1367 return results
1365
1368
1366 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1369 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1367 if self._sparsematchfn is not None:
1370 if self._sparsematchfn is not None:
1368 em = matchmod.exact(matcher.files())
1371 em = matchmod.exact(matcher.files())
1369 sm = matchmod.unionmatcher([self._sparsematcher, em])
1372 sm = matchmod.unionmatcher([self._sparsematcher, em])
1370 matcher = matchmod.intersectmatchers(matcher, sm)
1373 matcher = matchmod.intersectmatchers(matcher, sm)
1371 # Force Rayon (Rust parallelism library) to respect the number of
1374 # Force Rayon (Rust parallelism library) to respect the number of
1372 # workers. This is a temporary workaround until Rust code knows
1375 # workers. This is a temporary workaround until Rust code knows
1373 # how to read the config file.
1376 # how to read the config file.
1374 numcpus = self._ui.configint(b"worker", b"numcpus")
1377 numcpus = self._ui.configint(b"worker", b"numcpus")
1375 if numcpus is not None:
1378 if numcpus is not None:
1376 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1379 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1377
1380
1378 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1381 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1379 if not workers_enabled:
1382 if not workers_enabled:
1380 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1383 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1381
1384
1382 (
1385 (
1383 lookup,
1386 lookup,
1384 modified,
1387 modified,
1385 added,
1388 added,
1386 removed,
1389 removed,
1387 deleted,
1390 deleted,
1388 clean,
1391 clean,
1389 ignored,
1392 ignored,
1390 unknown,
1393 unknown,
1391 warnings,
1394 warnings,
1392 bad,
1395 bad,
1393 traversed,
1396 traversed,
1394 dirty,
1397 dirty,
1395 ) = rustmod.status(
1398 ) = rustmod.status(
1396 self._map._map,
1399 self._map._map,
1397 matcher,
1400 matcher,
1398 self._rootdir,
1401 self._rootdir,
1399 self._ignorefiles(),
1402 self._ignorefiles(),
1400 self._checkexec,
1403 self._checkexec,
1401 bool(list_clean),
1404 bool(list_clean),
1402 bool(list_ignored),
1405 bool(list_ignored),
1403 bool(list_unknown),
1406 bool(list_unknown),
1404 bool(matcher.traversedir),
1407 bool(matcher.traversedir),
1405 )
1408 )
1406
1409
1407 self._dirty |= dirty
1410 self._dirty |= dirty
1408
1411
1409 if matcher.traversedir:
1412 if matcher.traversedir:
1410 for dir in traversed:
1413 for dir in traversed:
1411 matcher.traversedir(dir)
1414 matcher.traversedir(dir)
1412
1415
1413 if self._ui.warn:
1416 if self._ui.warn:
1414 for item in warnings:
1417 for item in warnings:
1415 if isinstance(item, tuple):
1418 if isinstance(item, tuple):
1416 file_path, syntax = item
1419 file_path, syntax = item
1417 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1420 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1418 file_path,
1421 file_path,
1419 syntax,
1422 syntax,
1420 )
1423 )
1421 self._ui.warn(msg)
1424 self._ui.warn(msg)
1422 else:
1425 else:
1423 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1426 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1424 self._ui.warn(
1427 self._ui.warn(
1425 msg
1428 msg
1426 % (
1429 % (
1427 pathutil.canonpath(
1430 pathutil.canonpath(
1428 self._rootdir, self._rootdir, item
1431 self._rootdir, self._rootdir, item
1429 ),
1432 ),
1430 b"No such file or directory",
1433 b"No such file or directory",
1431 )
1434 )
1432 )
1435 )
1433
1436
1434 for fn, message in bad:
1437 for fn, message in bad:
1435 matcher.bad(fn, encoding.strtolocal(message))
1438 matcher.bad(fn, encoding.strtolocal(message))
1436
1439
1437 status = scmutil.status(
1440 status = scmutil.status(
1438 modified=modified,
1441 modified=modified,
1439 added=added,
1442 added=added,
1440 removed=removed,
1443 removed=removed,
1441 deleted=deleted,
1444 deleted=deleted,
1442 unknown=unknown,
1445 unknown=unknown,
1443 ignored=ignored,
1446 ignored=ignored,
1444 clean=clean,
1447 clean=clean,
1445 )
1448 )
1446 return (lookup, status)
1449 return (lookup, status)
1447
1450
1448 # XXX since this can make the dirstate dirty (through rust), we should
1451 # XXX since this can make the dirstate dirty (through rust), we should
1449 # enforce that it is done withing an appropriate change-context that scope
1452 # enforce that it is done withing an appropriate change-context that scope
1450 # the change and ensure it eventually get written on disk (or rolled back)
1453 # the change and ensure it eventually get written on disk (or rolled back)
1451 def status(self, match, subrepos, ignored, clean, unknown):
1454 def status(self, match, subrepos, ignored, clean, unknown):
1452 """Determine the status of the working copy relative to the
1455 """Determine the status of the working copy relative to the
1453 dirstate and return a pair of (unsure, status), where status is of type
1456 dirstate and return a pair of (unsure, status), where status is of type
1454 scmutil.status and:
1457 scmutil.status and:
1455
1458
1456 unsure:
1459 unsure:
1457 files that might have been modified since the dirstate was
1460 files that might have been modified since the dirstate was
1458 written, but need to be read to be sure (size is the same
1461 written, but need to be read to be sure (size is the same
1459 but mtime differs)
1462 but mtime differs)
1460 status.modified:
1463 status.modified:
1461 files that have definitely been modified since the dirstate
1464 files that have definitely been modified since the dirstate
1462 was written (different size or mode)
1465 was written (different size or mode)
1463 status.clean:
1466 status.clean:
1464 files that have definitely not been modified since the
1467 files that have definitely not been modified since the
1465 dirstate was written
1468 dirstate was written
1466 """
1469 """
1467 listignored, listclean, listunknown = ignored, clean, unknown
1470 listignored, listclean, listunknown = ignored, clean, unknown
1468 lookup, modified, added, unknown, ignored = [], [], [], [], []
1471 lookup, modified, added, unknown, ignored = [], [], [], [], []
1469 removed, deleted, clean = [], [], []
1472 removed, deleted, clean = [], [], []
1470
1473
1471 dmap = self._map
1474 dmap = self._map
1472 dmap.preload()
1475 dmap.preload()
1473
1476
1474 use_rust = True
1477 use_rust = True
1475
1478
1476 allowed_matchers = (
1479 allowed_matchers = (
1477 matchmod.alwaysmatcher,
1480 matchmod.alwaysmatcher,
1478 matchmod.differencematcher,
1481 matchmod.differencematcher,
1479 matchmod.exactmatcher,
1482 matchmod.exactmatcher,
1480 matchmod.includematcher,
1483 matchmod.includematcher,
1481 matchmod.intersectionmatcher,
1484 matchmod.intersectionmatcher,
1482 matchmod.nevermatcher,
1485 matchmod.nevermatcher,
1483 matchmod.unionmatcher,
1486 matchmod.unionmatcher,
1484 )
1487 )
1485
1488
1486 if rustmod is None:
1489 if rustmod is None:
1487 use_rust = False
1490 use_rust = False
1488 elif self._checkcase:
1491 elif self._checkcase:
1489 # Case-insensitive filesystems are not handled yet
1492 # Case-insensitive filesystems are not handled yet
1490 use_rust = False
1493 use_rust = False
1491 elif subrepos:
1494 elif subrepos:
1492 use_rust = False
1495 use_rust = False
1493 elif not isinstance(match, allowed_matchers):
1496 elif not isinstance(match, allowed_matchers):
1494 # Some matchers have yet to be implemented
1497 # Some matchers have yet to be implemented
1495 use_rust = False
1498 use_rust = False
1496
1499
1497 # Get the time from the filesystem so we can disambiguate files that
1500 # Get the time from the filesystem so we can disambiguate files that
1498 # appear modified in the present or future.
1501 # appear modified in the present or future.
1499 try:
1502 try:
1500 mtime_boundary = timestamp.get_fs_now(self._opener)
1503 mtime_boundary = timestamp.get_fs_now(self._opener)
1501 except OSError:
1504 except OSError:
1502 # In largefiles or readonly context
1505 # In largefiles or readonly context
1503 mtime_boundary = None
1506 mtime_boundary = None
1504
1507
1505 if use_rust:
1508 if use_rust:
1506 try:
1509 try:
1507 res = self._rust_status(
1510 res = self._rust_status(
1508 match, listclean, listignored, listunknown
1511 match, listclean, listignored, listunknown
1509 )
1512 )
1510 return res + (mtime_boundary,)
1513 return res + (mtime_boundary,)
1511 except rustmod.FallbackError:
1514 except rustmod.FallbackError:
1512 pass
1515 pass
1513
1516
1514 def noop(f):
1517 def noop(f):
1515 pass
1518 pass
1516
1519
1517 dcontains = dmap.__contains__
1520 dcontains = dmap.__contains__
1518 dget = dmap.__getitem__
1521 dget = dmap.__getitem__
1519 ladd = lookup.append # aka "unsure"
1522 ladd = lookup.append # aka "unsure"
1520 madd = modified.append
1523 madd = modified.append
1521 aadd = added.append
1524 aadd = added.append
1522 uadd = unknown.append if listunknown else noop
1525 uadd = unknown.append if listunknown else noop
1523 iadd = ignored.append if listignored else noop
1526 iadd = ignored.append if listignored else noop
1524 radd = removed.append
1527 radd = removed.append
1525 dadd = deleted.append
1528 dadd = deleted.append
1526 cadd = clean.append if listclean else noop
1529 cadd = clean.append if listclean else noop
1527 mexact = match.exact
1530 mexact = match.exact
1528 dirignore = self._dirignore
1531 dirignore = self._dirignore
1529 checkexec = self._checkexec
1532 checkexec = self._checkexec
1530 checklink = self._checklink
1533 checklink = self._checklink
1531 copymap = self._map.copymap
1534 copymap = self._map.copymap
1532
1535
1533 # We need to do full walks when either
1536 # We need to do full walks when either
1534 # - we're listing all clean files, or
1537 # - we're listing all clean files, or
1535 # - match.traversedir does something, because match.traversedir should
1538 # - match.traversedir does something, because match.traversedir should
1536 # be called for every dir in the working dir
1539 # be called for every dir in the working dir
1537 full = listclean or match.traversedir is not None
1540 full = listclean or match.traversedir is not None
1538 for fn, st in self.walk(
1541 for fn, st in self.walk(
1539 match, subrepos, listunknown, listignored, full=full
1542 match, subrepos, listunknown, listignored, full=full
1540 ).items():
1543 ).items():
1541 if not dcontains(fn):
1544 if not dcontains(fn):
1542 if (listignored or mexact(fn)) and dirignore(fn):
1545 if (listignored or mexact(fn)) and dirignore(fn):
1543 if listignored:
1546 if listignored:
1544 iadd(fn)
1547 iadd(fn)
1545 else:
1548 else:
1546 uadd(fn)
1549 uadd(fn)
1547 continue
1550 continue
1548
1551
1549 t = dget(fn)
1552 t = dget(fn)
1550 mode = t.mode
1553 mode = t.mode
1551 size = t.size
1554 size = t.size
1552
1555
1553 if not st and t.tracked:
1556 if not st and t.tracked:
1554 dadd(fn)
1557 dadd(fn)
1555 elif t.p2_info:
1558 elif t.p2_info:
1556 madd(fn)
1559 madd(fn)
1557 elif t.added:
1560 elif t.added:
1558 aadd(fn)
1561 aadd(fn)
1559 elif t.removed:
1562 elif t.removed:
1560 radd(fn)
1563 radd(fn)
1561 elif t.tracked:
1564 elif t.tracked:
1562 if not checklink and t.has_fallback_symlink:
1565 if not checklink and t.has_fallback_symlink:
1563 # If the file system does not support symlink, the mode
1566 # If the file system does not support symlink, the mode
1564 # might not be correctly stored in the dirstate, so do not
1567 # might not be correctly stored in the dirstate, so do not
1565 # trust it.
1568 # trust it.
1566 ladd(fn)
1569 ladd(fn)
1567 elif not checkexec and t.has_fallback_exec:
1570 elif not checkexec and t.has_fallback_exec:
1568 # If the file system does not support exec bits, the mode
1571 # If the file system does not support exec bits, the mode
1569 # might not be correctly stored in the dirstate, so do not
1572 # might not be correctly stored in the dirstate, so do not
1570 # trust it.
1573 # trust it.
1571 ladd(fn)
1574 ladd(fn)
1572 elif (
1575 elif (
1573 size >= 0
1576 size >= 0
1574 and (
1577 and (
1575 (size != st.st_size and size != st.st_size & _rangemask)
1578 (size != st.st_size and size != st.st_size & _rangemask)
1576 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1579 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1577 )
1580 )
1578 or fn in copymap
1581 or fn in copymap
1579 ):
1582 ):
1580 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1583 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1581 # issue6456: Size returned may be longer due to
1584 # issue6456: Size returned may be longer due to
1582 # encryption on EXT-4 fscrypt, undecided.
1585 # encryption on EXT-4 fscrypt, undecided.
1583 ladd(fn)
1586 ladd(fn)
1584 else:
1587 else:
1585 madd(fn)
1588 madd(fn)
1586 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1589 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1587 # There might be a change in the future if for example the
1590 # There might be a change in the future if for example the
1588 # internal clock is off, but this is a case where the issues
1591 # internal clock is off, but this is a case where the issues
1589 # the user would face would be a lot worse and there is
1592 # the user would face would be a lot worse and there is
1590 # nothing we can really do.
1593 # nothing we can really do.
1591 ladd(fn)
1594 ladd(fn)
1592 elif listclean:
1595 elif listclean:
1593 cadd(fn)
1596 cadd(fn)
1594 status = scmutil.status(
1597 status = scmutil.status(
1595 modified, added, removed, deleted, unknown, ignored, clean
1598 modified, added, removed, deleted, unknown, ignored, clean
1596 )
1599 )
1597 return (lookup, status, mtime_boundary)
1600 return (lookup, status, mtime_boundary)
1598
1601
1599 def matches(self, match):
1602 def matches(self, match):
1600 """
1603 """
1601 return files in the dirstate (in whatever state) filtered by match
1604 return files in the dirstate (in whatever state) filtered by match
1602 """
1605 """
1603 dmap = self._map
1606 dmap = self._map
1604 if rustmod is not None:
1607 if rustmod is not None:
1605 dmap = self._map._map
1608 dmap = self._map._map
1606
1609
1607 if match.always():
1610 if match.always():
1608 return dmap.keys()
1611 return dmap.keys()
1609 files = match.files()
1612 files = match.files()
1610 if match.isexact():
1613 if match.isexact():
1611 # fast path -- filter the other way around, since typically files is
1614 # fast path -- filter the other way around, since typically files is
1612 # much smaller than dmap
1615 # much smaller than dmap
1613 return [f for f in files if f in dmap]
1616 return [f for f in files if f in dmap]
1614 if match.prefix() and all(fn in dmap for fn in files):
1617 if match.prefix() and all(fn in dmap for fn in files):
1615 # fast path -- all the values are known to be files, so just return
1618 # fast path -- all the values are known to be files, so just return
1616 # that
1619 # that
1617 return list(files)
1620 return list(files)
1618 return [f for f in dmap if match(f)]
1621 return [f for f in dmap if match(f)]
1619
1622
1620 def _actualfilename(self, tr):
1623 def _actualfilename(self, tr):
1621 if tr:
1624 if tr:
1622 return self._pendingfilename
1625 return self._pendingfilename
1623 else:
1626 else:
1624 return self._filename
1627 return self._filename
1625
1628
1626 def all_file_names(self):
1629 def all_file_names(self):
1627 """list all filename currently used by this dirstate
1630 """list all filename currently used by this dirstate
1628
1631
1629 This is only used to do `hg rollback` related backup in the transaction
1632 This is only used to do `hg rollback` related backup in the transaction
1630 """
1633 """
1631 if not self._opener.exists(self._filename):
1634 if not self._opener.exists(self._filename):
1632 # no data every written to disk yet
1635 # no data every written to disk yet
1633 return ()
1636 return ()
1634 elif self._use_dirstate_v2:
1637 elif self._use_dirstate_v2:
1635 return (
1638 return (
1636 self._filename,
1639 self._filename,
1637 self._map.docket.data_filename(),
1640 self._map.docket.data_filename(),
1638 )
1641 )
1639 else:
1642 else:
1640 return (self._filename,)
1643 return (self._filename,)
1641
1644
1642 def verify(self, m1, m2, p1, narrow_matcher=None):
1645 def verify(self, m1, m2, p1, narrow_matcher=None):
1643 """
1646 """
1644 check the dirstate contents against the parent manifest and yield errors
1647 check the dirstate contents against the parent manifest and yield errors
1645 """
1648 """
1646 missing_from_p1 = _(
1649 missing_from_p1 = _(
1647 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1650 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1648 )
1651 )
1649 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1652 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1650 missing_from_ps = _(
1653 missing_from_ps = _(
1651 b"%s marked as modified, but not in either manifest\n"
1654 b"%s marked as modified, but not in either manifest\n"
1652 )
1655 )
1653 missing_from_ds = _(
1656 missing_from_ds = _(
1654 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1657 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1655 )
1658 )
1656 for f, entry in self.items():
1659 for f, entry in self.items():
1657 if entry.p1_tracked:
1660 if entry.p1_tracked:
1658 if entry.modified and f not in m1 and f not in m2:
1661 if entry.modified and f not in m1 and f not in m2:
1659 yield missing_from_ps % f
1662 yield missing_from_ps % f
1660 elif f not in m1:
1663 elif f not in m1:
1661 yield missing_from_p1 % (f, node.short(p1))
1664 yield missing_from_p1 % (f, node.short(p1))
1662 if entry.added and f in m1:
1665 if entry.added and f in m1:
1663 yield unexpected_in_p1 % f
1666 yield unexpected_in_p1 % f
1664 for f in m1:
1667 for f in m1:
1665 if narrow_matcher is not None and not narrow_matcher(f):
1668 if narrow_matcher is not None and not narrow_matcher(f):
1666 continue
1669 continue
1667 entry = self.get_entry(f)
1670 entry = self.get_entry(f)
1668 if not entry.p1_tracked:
1671 if not entry.p1_tracked:
1669 yield missing_from_ds % (f, node.short(p1))
1672 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now