##// END OF EJS Templates
dirstate: detect potential fishy transaction patterns while changing...
marmoute -
r50973:605f0ccf default
parent child Browse files
Show More
@@ -1,1754 +1,1762 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_changing_parents(func):
69 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.is_changing_parents:
71 if not self.is_changing_parents:
72 msg = 'calling `%s` outside of a changing_parents context'
72 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
77 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
79
79
80 return wrap
80 return wrap
81
81
82
82
83 def requires_changing_files(func):
83 def requires_changing_files(func):
84 def wrap(self, *args, **kwargs):
84 def wrap(self, *args, **kwargs):
85 if not self.is_changing_files:
85 if not self.is_changing_files:
86 msg = 'calling `%s` outside of a `changing_files`'
86 msg = 'calling `%s` outside of a `changing_files`'
87 msg %= func.__name__
87 msg %= func.__name__
88 raise error.ProgrammingError(msg)
88 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
89 return func(self, *args, **kwargs)
90
90
91 return wrap
91 return wrap
92
92
93
93
94 def requires_not_changing_parents(func):
94 def requires_not_changing_parents(func):
95 def wrap(self, *args, **kwargs):
95 def wrap(self, *args, **kwargs):
96 if self.is_changing_parents:
96 if self.is_changing_parents:
97 msg = 'calling `%s` inside of a changing_parents context'
97 msg = 'calling `%s` inside of a changing_parents context'
98 msg %= func.__name__
98 msg %= func.__name__
99 raise error.ProgrammingError(msg)
99 raise error.ProgrammingError(msg)
100 return func(self, *args, **kwargs)
100 return func(self, *args, **kwargs)
101
101
102 return wrap
102 return wrap
103
103
104
104
105 CHANGE_TYPE_PARENTS = "parents"
105 CHANGE_TYPE_PARENTS = "parents"
106 CHANGE_TYPE_FILES = "files"
106 CHANGE_TYPE_FILES = "files"
107
107
108
108
109 @interfaceutil.implementer(intdirstate.idirstate)
109 @interfaceutil.implementer(intdirstate.idirstate)
110 class dirstate:
110 class dirstate:
111 def __init__(
111 def __init__(
112 self,
112 self,
113 opener,
113 opener,
114 ui,
114 ui,
115 root,
115 root,
116 validate,
116 validate,
117 sparsematchfn,
117 sparsematchfn,
118 nodeconstants,
118 nodeconstants,
119 use_dirstate_v2,
119 use_dirstate_v2,
120 use_tracked_hint=False,
120 use_tracked_hint=False,
121 ):
121 ):
122 """Create a new dirstate object.
122 """Create a new dirstate object.
123
123
124 opener is an open()-like callable that can be used to open the
124 opener is an open()-like callable that can be used to open the
125 dirstate file; root is the root of the directory tracked by
125 dirstate file; root is the root of the directory tracked by
126 the dirstate.
126 the dirstate.
127 """
127 """
128 self._use_dirstate_v2 = use_dirstate_v2
128 self._use_dirstate_v2 = use_dirstate_v2
129 self._use_tracked_hint = use_tracked_hint
129 self._use_tracked_hint = use_tracked_hint
130 self._nodeconstants = nodeconstants
130 self._nodeconstants = nodeconstants
131 self._opener = opener
131 self._opener = opener
132 self._validate = validate
132 self._validate = validate
133 self._root = root
133 self._root = root
134 # Either build a sparse-matcher or None if sparse is disabled
134 # Either build a sparse-matcher or None if sparse is disabled
135 self._sparsematchfn = sparsematchfn
135 self._sparsematchfn = sparsematchfn
136 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
136 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
137 # UNC path pointing to root share (issue4557)
137 # UNC path pointing to root share (issue4557)
138 self._rootdir = pathutil.normasprefix(root)
138 self._rootdir = pathutil.normasprefix(root)
139 # True is any internal state may be different
139 # True is any internal state may be different
140 self._dirty = False
140 self._dirty = False
141 # True if the set of tracked file may be different
141 # True if the set of tracked file may be different
142 self._dirty_tracked_set = False
142 self._dirty_tracked_set = False
143 self._ui = ui
143 self._ui = ui
144 self._filecache = {}
144 self._filecache = {}
145 # nesting level of `changing_parents` context
145 # nesting level of `changing_parents` context
146 self._changing_level = 0
146 self._changing_level = 0
147 # the change currently underway
147 # the change currently underway
148 self._change_type = None
148 self._change_type = None
149 # True if the current dirstate changing operations have been
149 # True if the current dirstate changing operations have been
150 # invalidated (used to make sure all nested contexts have been exited)
150 # invalidated (used to make sure all nested contexts have been exited)
151 self._invalidated_context = False
151 self._invalidated_context = False
152 self._filename = b'dirstate'
152 self._filename = b'dirstate'
153 self._filename_th = b'dirstate-tracked-hint'
153 self._filename_th = b'dirstate-tracked-hint'
154 self._pendingfilename = b'%s.pending' % self._filename
154 self._pendingfilename = b'%s.pending' % self._filename
155 self._plchangecallbacks = {}
155 self._plchangecallbacks = {}
156 self._origpl = None
156 self._origpl = None
157 self._mapcls = dirstatemap.dirstatemap
157 self._mapcls = dirstatemap.dirstatemap
158 # Access and cache cwd early, so we don't access it for the first time
158 # Access and cache cwd early, so we don't access it for the first time
159 # after a working-copy update caused it to not exist (accessing it then
159 # after a working-copy update caused it to not exist (accessing it then
160 # raises an exception).
160 # raises an exception).
161 self._cwd
161 self._cwd
162
162
163 def prefetch_parents(self):
163 def prefetch_parents(self):
164 """make sure the parents are loaded
164 """make sure the parents are loaded
165
165
166 Used to avoid a race condition.
166 Used to avoid a race condition.
167 """
167 """
168 self._pl
168 self._pl
169
169
170 @contextlib.contextmanager
170 @contextlib.contextmanager
171 def _changing(self, repo, change_type):
171 def _changing(self, repo, change_type):
172 if repo.currentwlock() is None:
172 if repo.currentwlock() is None:
173 msg = b"trying to change the dirstate without holding the wlock"
173 msg = b"trying to change the dirstate without holding the wlock"
174 raise error.ProgrammingError(msg)
174 raise error.ProgrammingError(msg)
175 if self._invalidated_context:
175 if self._invalidated_context:
176 msg = "trying to use an invalidated dirstate before it has reset"
176 msg = "trying to use an invalidated dirstate before it has reset"
177 raise error.ProgrammingError(msg)
177 raise error.ProgrammingError(msg)
178
178
179 has_tr = repo.currenttransaction() is not None
180
179 # different type of change are mutually exclusive
181 # different type of change are mutually exclusive
180 if self._change_type is None:
182 if self._change_type is None:
181 assert self._changing_level == 0
183 assert self._changing_level == 0
182 self._change_type = change_type
184 self._change_type = change_type
183 elif self._change_type != change_type:
185 elif self._change_type != change_type:
184 msg = (
186 msg = (
185 'trying to open "%s" dirstate-changing context while a "%s" is'
187 'trying to open "%s" dirstate-changing context while a "%s" is'
186 ' already open'
188 ' already open'
187 )
189 )
188 msg %= (change_type, self._change_type)
190 msg %= (change_type, self._change_type)
189 raise error.ProgrammingError(msg)
191 raise error.ProgrammingError(msg)
190 self._changing_level += 1
192 self._changing_level += 1
191 try:
193 try:
192 yield
194 yield
193 except Exception:
195 except Exception:
194 self.invalidate()
196 self.invalidate()
195 raise
197 raise
196 finally:
198 finally:
199 tr = repo.currenttransaction()
197 if self._changing_level > 0:
200 if self._changing_level > 0:
198 if self._invalidated_context:
201 if self._invalidated_context:
199 # make sure we invalidate anything an upper context might
202 # make sure we invalidate anything an upper context might
200 # have changed.
203 # have changed.
201 self.invalidate()
204 self.invalidate()
202 self._changing_level -= 1
205 self._changing_level -= 1
203 # The invalidation is complete once we exit the final context
206 # The invalidation is complete once we exit the final context
204 # manager
207 # manager
205 if self._changing_level <= 0:
208 if self._changing_level <= 0:
206 self._change_type = None
209 self._change_type = None
207 assert self._changing_level == 0
210 assert self._changing_level == 0
208 if self._invalidated_context:
211 if self._invalidated_context:
209 self._invalidated_context = False
212 self._invalidated_context = False
210 else:
213 else:
211 # When an exception occured, `_invalidated_context`
214 # When an exception occured, `_invalidated_context`
212 # would have been set to True by the `invalidate`
215 # would have been set to True by the `invalidate`
213 # call earlier.
216 # call earlier.
214 #
217 #
215 # We don't have more straightforward code, because the
218 # We don't have more straightforward code, because the
216 # Exception catching (and the associated `invalidate`
219 # Exception catching (and the associated `invalidate`
217 # calling) might have been called by a nested context
220 # calling) might have been called by a nested context
218 # instead of the top level one.
221 # instead of the top level one.
219 tr = repo.currenttransaction()
220 self.write(tr)
222 self.write(tr)
223 if has_tr != (tr is not None):
224 if has_tr:
225 m = "transaction vanished while changing dirstate"
226 else:
227 m = "transaction appeared while changing dirstate"
228 raise error.ProgrammingError(m)
221
229
222 @contextlib.contextmanager
230 @contextlib.contextmanager
223 def changing_parents(self, repo):
231 def changing_parents(self, repo):
224 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
232 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
225 yield c
233 yield c
226
234
227 @contextlib.contextmanager
235 @contextlib.contextmanager
228 def changing_files(self, repo):
236 def changing_files(self, repo):
229 with self._changing(repo, CHANGE_TYPE_FILES) as c:
237 with self._changing(repo, CHANGE_TYPE_FILES) as c:
230 yield c
238 yield c
231
239
232 # here to help migration to the new code
240 # here to help migration to the new code
233 def parentchange(self):
241 def parentchange(self):
234 msg = (
242 msg = (
235 "Mercurial 6.4 and later requires call to "
243 "Mercurial 6.4 and later requires call to "
236 "`dirstate.changing_parents(repo)`"
244 "`dirstate.changing_parents(repo)`"
237 )
245 )
238 raise error.ProgrammingError(msg)
246 raise error.ProgrammingError(msg)
239
247
240 @property
248 @property
241 def is_changing_any(self):
249 def is_changing_any(self):
242 """Returns true if the dirstate is in the middle of a set of changes.
250 """Returns true if the dirstate is in the middle of a set of changes.
243
251
244 This returns True for any kind of change.
252 This returns True for any kind of change.
245 """
253 """
246 return self._changing_level > 0
254 return self._changing_level > 0
247
255
248 def pendingparentchange(self):
256 def pendingparentchange(self):
249 return self.is_changing_parent()
257 return self.is_changing_parent()
250
258
251 def is_changing_parent(self):
259 def is_changing_parent(self):
252 """Returns true if the dirstate is in the middle of a set of changes
260 """Returns true if the dirstate is in the middle of a set of changes
253 that modify the dirstate parent.
261 that modify the dirstate parent.
254 """
262 """
255 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
263 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
256 return self.is_changing_parents
264 return self.is_changing_parents
257
265
258 @property
266 @property
259 def is_changing_parents(self):
267 def is_changing_parents(self):
260 """Returns true if the dirstate is in the middle of a set of changes
268 """Returns true if the dirstate is in the middle of a set of changes
261 that modify the dirstate parent.
269 that modify the dirstate parent.
262 """
270 """
263 if self._changing_level <= 0:
271 if self._changing_level <= 0:
264 return False
272 return False
265 return self._change_type == CHANGE_TYPE_PARENTS
273 return self._change_type == CHANGE_TYPE_PARENTS
266
274
267 @property
275 @property
268 def is_changing_files(self):
276 def is_changing_files(self):
269 """Returns true if the dirstate is in the middle of a set of changes
277 """Returns true if the dirstate is in the middle of a set of changes
270 that modify the files tracked or their sources.
278 that modify the files tracked or their sources.
271 """
279 """
272 if self._changing_level <= 0:
280 if self._changing_level <= 0:
273 return False
281 return False
274 return self._change_type == CHANGE_TYPE_FILES
282 return self._change_type == CHANGE_TYPE_FILES
275
283
276 @propertycache
284 @propertycache
277 def _map(self):
285 def _map(self):
278 """Return the dirstate contents (see documentation for dirstatemap)."""
286 """Return the dirstate contents (see documentation for dirstatemap)."""
279 self._map = self._mapcls(
287 self._map = self._mapcls(
280 self._ui,
288 self._ui,
281 self._opener,
289 self._opener,
282 self._root,
290 self._root,
283 self._nodeconstants,
291 self._nodeconstants,
284 self._use_dirstate_v2,
292 self._use_dirstate_v2,
285 )
293 )
286 return self._map
294 return self._map
287
295
288 @property
296 @property
289 def _sparsematcher(self):
297 def _sparsematcher(self):
290 """The matcher for the sparse checkout.
298 """The matcher for the sparse checkout.
291
299
292 The working directory may not include every file from a manifest. The
300 The working directory may not include every file from a manifest. The
293 matcher obtained by this property will match a path if it is to be
301 matcher obtained by this property will match a path if it is to be
294 included in the working directory.
302 included in the working directory.
295
303
296 When sparse if disabled, return None.
304 When sparse if disabled, return None.
297 """
305 """
298 if self._sparsematchfn is None:
306 if self._sparsematchfn is None:
299 return None
307 return None
300 # TODO there is potential to cache this property. For now, the matcher
308 # TODO there is potential to cache this property. For now, the matcher
301 # is resolved on every access. (But the called function does use a
309 # is resolved on every access. (But the called function does use a
302 # cache to keep the lookup fast.)
310 # cache to keep the lookup fast.)
303 return self._sparsematchfn()
311 return self._sparsematchfn()
304
312
305 @repocache(b'branch')
313 @repocache(b'branch')
306 def _branch(self):
314 def _branch(self):
307 try:
315 try:
308 return self._opener.read(b"branch").strip() or b"default"
316 return self._opener.read(b"branch").strip() or b"default"
309 except FileNotFoundError:
317 except FileNotFoundError:
310 return b"default"
318 return b"default"
311
319
312 @property
320 @property
313 def _pl(self):
321 def _pl(self):
314 return self._map.parents()
322 return self._map.parents()
315
323
316 def hasdir(self, d):
324 def hasdir(self, d):
317 return self._map.hastrackeddir(d)
325 return self._map.hastrackeddir(d)
318
326
319 @rootcache(b'.hgignore')
327 @rootcache(b'.hgignore')
320 def _ignore(self):
328 def _ignore(self):
321 files = self._ignorefiles()
329 files = self._ignorefiles()
322 if not files:
330 if not files:
323 return matchmod.never()
331 return matchmod.never()
324
332
325 pats = [b'include:%s' % f for f in files]
333 pats = [b'include:%s' % f for f in files]
326 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
334 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
327
335
328 @propertycache
336 @propertycache
329 def _slash(self):
337 def _slash(self):
330 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
338 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
331
339
332 @propertycache
340 @propertycache
333 def _checklink(self):
341 def _checklink(self):
334 return util.checklink(self._root)
342 return util.checklink(self._root)
335
343
336 @propertycache
344 @propertycache
337 def _checkexec(self):
345 def _checkexec(self):
338 return bool(util.checkexec(self._root))
346 return bool(util.checkexec(self._root))
339
347
340 @propertycache
348 @propertycache
341 def _checkcase(self):
349 def _checkcase(self):
342 return not util.fscasesensitive(self._join(b'.hg'))
350 return not util.fscasesensitive(self._join(b'.hg'))
343
351
344 def _join(self, f):
352 def _join(self, f):
345 # much faster than os.path.join()
353 # much faster than os.path.join()
346 # it's safe because f is always a relative path
354 # it's safe because f is always a relative path
347 return self._rootdir + f
355 return self._rootdir + f
348
356
349 def flagfunc(self, buildfallback):
357 def flagfunc(self, buildfallback):
350 """build a callable that returns flags associated with a filename
358 """build a callable that returns flags associated with a filename
351
359
352 The information is extracted from three possible layers:
360 The information is extracted from three possible layers:
353 1. the file system if it supports the information
361 1. the file system if it supports the information
354 2. the "fallback" information stored in the dirstate if any
362 2. the "fallback" information stored in the dirstate if any
355 3. a more expensive mechanism inferring the flags from the parents.
363 3. a more expensive mechanism inferring the flags from the parents.
356 """
364 """
357
365
358 # small hack to cache the result of buildfallback()
366 # small hack to cache the result of buildfallback()
359 fallback_func = []
367 fallback_func = []
360
368
361 def get_flags(x):
369 def get_flags(x):
362 entry = None
370 entry = None
363 fallback_value = None
371 fallback_value = None
364 try:
372 try:
365 st = os.lstat(self._join(x))
373 st = os.lstat(self._join(x))
366 except OSError:
374 except OSError:
367 return b''
375 return b''
368
376
369 if self._checklink:
377 if self._checklink:
370 if util.statislink(st):
378 if util.statislink(st):
371 return b'l'
379 return b'l'
372 else:
380 else:
373 entry = self.get_entry(x)
381 entry = self.get_entry(x)
374 if entry.has_fallback_symlink:
382 if entry.has_fallback_symlink:
375 if entry.fallback_symlink:
383 if entry.fallback_symlink:
376 return b'l'
384 return b'l'
377 else:
385 else:
378 if not fallback_func:
386 if not fallback_func:
379 fallback_func.append(buildfallback())
387 fallback_func.append(buildfallback())
380 fallback_value = fallback_func[0](x)
388 fallback_value = fallback_func[0](x)
381 if b'l' in fallback_value:
389 if b'l' in fallback_value:
382 return b'l'
390 return b'l'
383
391
384 if self._checkexec:
392 if self._checkexec:
385 if util.statisexec(st):
393 if util.statisexec(st):
386 return b'x'
394 return b'x'
387 else:
395 else:
388 if entry is None:
396 if entry is None:
389 entry = self.get_entry(x)
397 entry = self.get_entry(x)
390 if entry.has_fallback_exec:
398 if entry.has_fallback_exec:
391 if entry.fallback_exec:
399 if entry.fallback_exec:
392 return b'x'
400 return b'x'
393 else:
401 else:
394 if fallback_value is None:
402 if fallback_value is None:
395 if not fallback_func:
403 if not fallback_func:
396 fallback_func.append(buildfallback())
404 fallback_func.append(buildfallback())
397 fallback_value = fallback_func[0](x)
405 fallback_value = fallback_func[0](x)
398 if b'x' in fallback_value:
406 if b'x' in fallback_value:
399 return b'x'
407 return b'x'
400 return b''
408 return b''
401
409
402 return get_flags
410 return get_flags
403
411
404 @propertycache
412 @propertycache
405 def _cwd(self):
413 def _cwd(self):
406 # internal config: ui.forcecwd
414 # internal config: ui.forcecwd
407 forcecwd = self._ui.config(b'ui', b'forcecwd')
415 forcecwd = self._ui.config(b'ui', b'forcecwd')
408 if forcecwd:
416 if forcecwd:
409 return forcecwd
417 return forcecwd
410 return encoding.getcwd()
418 return encoding.getcwd()
411
419
412 def getcwd(self):
420 def getcwd(self):
413 """Return the path from which a canonical path is calculated.
421 """Return the path from which a canonical path is calculated.
414
422
415 This path should be used to resolve file patterns or to convert
423 This path should be used to resolve file patterns or to convert
416 canonical paths back to file paths for display. It shouldn't be
424 canonical paths back to file paths for display. It shouldn't be
417 used to get real file paths. Use vfs functions instead.
425 used to get real file paths. Use vfs functions instead.
418 """
426 """
419 cwd = self._cwd
427 cwd = self._cwd
420 if cwd == self._root:
428 if cwd == self._root:
421 return b''
429 return b''
422 # self._root ends with a path separator if self._root is '/' or 'C:\'
430 # self._root ends with a path separator if self._root is '/' or 'C:\'
423 rootsep = self._root
431 rootsep = self._root
424 if not util.endswithsep(rootsep):
432 if not util.endswithsep(rootsep):
425 rootsep += pycompat.ossep
433 rootsep += pycompat.ossep
426 if cwd.startswith(rootsep):
434 if cwd.startswith(rootsep):
427 return cwd[len(rootsep) :]
435 return cwd[len(rootsep) :]
428 else:
436 else:
429 # we're outside the repo. return an absolute path.
437 # we're outside the repo. return an absolute path.
430 return cwd
438 return cwd
431
439
432 def pathto(self, f, cwd=None):
440 def pathto(self, f, cwd=None):
433 if cwd is None:
441 if cwd is None:
434 cwd = self.getcwd()
442 cwd = self.getcwd()
435 path = util.pathto(self._root, cwd, f)
443 path = util.pathto(self._root, cwd, f)
436 if self._slash:
444 if self._slash:
437 return util.pconvert(path)
445 return util.pconvert(path)
438 return path
446 return path
439
447
440 def get_entry(self, path):
448 def get_entry(self, path):
441 """return a DirstateItem for the associated path"""
449 """return a DirstateItem for the associated path"""
442 entry = self._map.get(path)
450 entry = self._map.get(path)
443 if entry is None:
451 if entry is None:
444 return DirstateItem()
452 return DirstateItem()
445 return entry
453 return entry
446
454
447 def __contains__(self, key):
455 def __contains__(self, key):
448 return key in self._map
456 return key in self._map
449
457
450 def __iter__(self):
458 def __iter__(self):
451 return iter(sorted(self._map))
459 return iter(sorted(self._map))
452
460
453 def items(self):
461 def items(self):
454 return self._map.items()
462 return self._map.items()
455
463
456 iteritems = items
464 iteritems = items
457
465
458 def parents(self):
466 def parents(self):
459 return [self._validate(p) for p in self._pl]
467 return [self._validate(p) for p in self._pl]
460
468
461 def p1(self):
469 def p1(self):
462 return self._validate(self._pl[0])
470 return self._validate(self._pl[0])
463
471
464 def p2(self):
472 def p2(self):
465 return self._validate(self._pl[1])
473 return self._validate(self._pl[1])
466
474
467 @property
475 @property
468 def in_merge(self):
476 def in_merge(self):
469 """True if a merge is in progress"""
477 """True if a merge is in progress"""
470 return self._pl[1] != self._nodeconstants.nullid
478 return self._pl[1] != self._nodeconstants.nullid
471
479
472 def branch(self):
480 def branch(self):
473 return encoding.tolocal(self._branch)
481 return encoding.tolocal(self._branch)
474
482
475 # XXX since this make the dirstate dirty, we should enforce that it is done
483 # XXX since this make the dirstate dirty, we should enforce that it is done
476 # withing an appropriate change-context that scope the change and ensure it
484 # withing an appropriate change-context that scope the change and ensure it
477 # eventually get written on disk (or rolled back)
485 # eventually get written on disk (or rolled back)
478 def setparents(self, p1, p2=None):
486 def setparents(self, p1, p2=None):
479 """Set dirstate parents to p1 and p2.
487 """Set dirstate parents to p1 and p2.
480
488
481 When moving from two parents to one, "merged" entries a
489 When moving from two parents to one, "merged" entries a
482 adjusted to normal and previous copy records discarded and
490 adjusted to normal and previous copy records discarded and
483 returned by the call.
491 returned by the call.
484
492
485 See localrepo.setparents()
493 See localrepo.setparents()
486 """
494 """
487 if p2 is None:
495 if p2 is None:
488 p2 = self._nodeconstants.nullid
496 p2 = self._nodeconstants.nullid
489 if self._changing_level == 0:
497 if self._changing_level == 0:
490 raise ValueError(
498 raise ValueError(
491 b"cannot set dirstate parent outside of "
499 b"cannot set dirstate parent outside of "
492 b"dirstate.changing_parents context manager"
500 b"dirstate.changing_parents context manager"
493 )
501 )
494
502
495 self._dirty = True
503 self._dirty = True
496 oldp2 = self._pl[1]
504 oldp2 = self._pl[1]
497 if self._origpl is None:
505 if self._origpl is None:
498 self._origpl = self._pl
506 self._origpl = self._pl
499 nullid = self._nodeconstants.nullid
507 nullid = self._nodeconstants.nullid
500 # True if we need to fold p2 related state back to a linear case
508 # True if we need to fold p2 related state back to a linear case
501 fold_p2 = oldp2 != nullid and p2 == nullid
509 fold_p2 = oldp2 != nullid and p2 == nullid
502 return self._map.setparents(p1, p2, fold_p2=fold_p2)
510 return self._map.setparents(p1, p2, fold_p2=fold_p2)
503
511
504 def setbranch(self, branch):
512 def setbranch(self, branch):
505 self.__class__._branch.set(self, encoding.fromlocal(branch))
513 self.__class__._branch.set(self, encoding.fromlocal(branch))
506 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
514 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
507 try:
515 try:
508 f.write(self._branch + b'\n')
516 f.write(self._branch + b'\n')
509 f.close()
517 f.close()
510
518
511 # make sure filecache has the correct stat info for _branch after
519 # make sure filecache has the correct stat info for _branch after
512 # replacing the underlying file
520 # replacing the underlying file
513 ce = self._filecache[b'_branch']
521 ce = self._filecache[b'_branch']
514 if ce:
522 if ce:
515 ce.refresh()
523 ce.refresh()
516 except: # re-raises
524 except: # re-raises
517 f.discard()
525 f.discard()
518 raise
526 raise
519
527
520 def invalidate(self):
528 def invalidate(self):
521 """Causes the next access to reread the dirstate.
529 """Causes the next access to reread the dirstate.
522
530
523 This is different from localrepo.invalidatedirstate() because it always
531 This is different from localrepo.invalidatedirstate() because it always
524 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
532 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
525 check whether the dirstate has changed before rereading it."""
533 check whether the dirstate has changed before rereading it."""
526
534
527 for a in ("_map", "_branch", "_ignore"):
535 for a in ("_map", "_branch", "_ignore"):
528 if a in self.__dict__:
536 if a in self.__dict__:
529 delattr(self, a)
537 delattr(self, a)
530 self._dirty = False
538 self._dirty = False
531 self._dirty_tracked_set = False
539 self._dirty_tracked_set = False
532 self._invalidated_context = self._changing_level > 0
540 self._invalidated_context = self._changing_level > 0
533 self._origpl = None
541 self._origpl = None
534
542
535 # XXX since this make the dirstate dirty, we should enforce that it is done
543 # XXX since this make the dirstate dirty, we should enforce that it is done
536 # withing an appropriate change-context that scope the change and ensure it
544 # withing an appropriate change-context that scope the change and ensure it
537 # eventually get written on disk (or rolled back)
545 # eventually get written on disk (or rolled back)
538 def copy(self, source, dest):
546 def copy(self, source, dest):
539 """Mark dest as a copy of source. Unmark dest if source is None."""
547 """Mark dest as a copy of source. Unmark dest if source is None."""
540 if source == dest:
548 if source == dest:
541 return
549 return
542 self._dirty = True
550 self._dirty = True
543 if source is not None:
551 if source is not None:
544 self._check_sparse(source)
552 self._check_sparse(source)
545 self._map.copymap[dest] = source
553 self._map.copymap[dest] = source
546 else:
554 else:
547 self._map.copymap.pop(dest, None)
555 self._map.copymap.pop(dest, None)
548
556
549 def copied(self, file):
557 def copied(self, file):
550 return self._map.copymap.get(file, None)
558 return self._map.copymap.get(file, None)
551
559
552 def copies(self):
560 def copies(self):
553 return self._map.copymap
561 return self._map.copymap
554
562
555 @requires_changing_files
563 @requires_changing_files
556 def set_tracked(self, filename, reset_copy=False):
564 def set_tracked(self, filename, reset_copy=False):
557 """a "public" method for generic code to mark a file as tracked
565 """a "public" method for generic code to mark a file as tracked
558
566
559 This function is to be called outside of "update/merge" case. For
567 This function is to be called outside of "update/merge" case. For
560 example by a command like `hg add X`.
568 example by a command like `hg add X`.
561
569
562 if reset_copy is set, any existing copy information will be dropped.
570 if reset_copy is set, any existing copy information will be dropped.
563
571
564 return True the file was previously untracked, False otherwise.
572 return True the file was previously untracked, False otherwise.
565 """
573 """
566 self._dirty = True
574 self._dirty = True
567 entry = self._map.get(filename)
575 entry = self._map.get(filename)
568 if entry is None or not entry.tracked:
576 if entry is None or not entry.tracked:
569 self._check_new_tracked_filename(filename)
577 self._check_new_tracked_filename(filename)
570 pre_tracked = self._map.set_tracked(filename)
578 pre_tracked = self._map.set_tracked(filename)
571 if reset_copy:
579 if reset_copy:
572 self._map.copymap.pop(filename, None)
580 self._map.copymap.pop(filename, None)
573 if pre_tracked:
581 if pre_tracked:
574 self._dirty_tracked_set = True
582 self._dirty_tracked_set = True
575 return pre_tracked
583 return pre_tracked
576
584
577 @requires_changing_files
585 @requires_changing_files
578 def set_untracked(self, filename):
586 def set_untracked(self, filename):
579 """a "public" method for generic code to mark a file as untracked
587 """a "public" method for generic code to mark a file as untracked
580
588
581 This function is to be called outside of "update/merge" case. For
589 This function is to be called outside of "update/merge" case. For
582 example by a command like `hg remove X`.
590 example by a command like `hg remove X`.
583
591
584 return True the file was previously tracked, False otherwise.
592 return True the file was previously tracked, False otherwise.
585 """
593 """
586 ret = self._map.set_untracked(filename)
594 ret = self._map.set_untracked(filename)
587 if ret:
595 if ret:
588 self._dirty = True
596 self._dirty = True
589 self._dirty_tracked_set = True
597 self._dirty_tracked_set = True
590 return ret
598 return ret
591
599
592 @requires_not_changing_parents
600 @requires_not_changing_parents
593 def set_clean(self, filename, parentfiledata):
601 def set_clean(self, filename, parentfiledata):
594 """record that the current state of the file on disk is known to be clean"""
602 """record that the current state of the file on disk is known to be clean"""
595 self._dirty = True
603 self._dirty = True
596 if not self._map[filename].tracked:
604 if not self._map[filename].tracked:
597 self._check_new_tracked_filename(filename)
605 self._check_new_tracked_filename(filename)
598 (mode, size, mtime) = parentfiledata
606 (mode, size, mtime) = parentfiledata
599 self._map.set_clean(filename, mode, size, mtime)
607 self._map.set_clean(filename, mode, size, mtime)
600
608
601 @requires_not_changing_parents
609 @requires_not_changing_parents
602 def set_possibly_dirty(self, filename):
610 def set_possibly_dirty(self, filename):
603 """record that the current state of the file on disk is unknown"""
611 """record that the current state of the file on disk is unknown"""
604 self._dirty = True
612 self._dirty = True
605 self._map.set_possibly_dirty(filename)
613 self._map.set_possibly_dirty(filename)
606
614
607 @requires_changing_parents
615 @requires_changing_parents
608 def update_file_p1(
616 def update_file_p1(
609 self,
617 self,
610 filename,
618 filename,
611 p1_tracked,
619 p1_tracked,
612 ):
620 ):
613 """Set a file as tracked in the parent (or not)
621 """Set a file as tracked in the parent (or not)
614
622
615 This is to be called when adjust the dirstate to a new parent after an history
623 This is to be called when adjust the dirstate to a new parent after an history
616 rewriting operation.
624 rewriting operation.
617
625
618 It should not be called during a merge (p2 != nullid) and only within
626 It should not be called during a merge (p2 != nullid) and only within
619 a `with dirstate.changing_parents(repo):` context.
627 a `with dirstate.changing_parents(repo):` context.
620 """
628 """
621 if self.in_merge:
629 if self.in_merge:
622 msg = b'update_file_reference should not be called when merging'
630 msg = b'update_file_reference should not be called when merging'
623 raise error.ProgrammingError(msg)
631 raise error.ProgrammingError(msg)
624 entry = self._map.get(filename)
632 entry = self._map.get(filename)
625 if entry is None:
633 if entry is None:
626 wc_tracked = False
634 wc_tracked = False
627 else:
635 else:
628 wc_tracked = entry.tracked
636 wc_tracked = entry.tracked
629 if not (p1_tracked or wc_tracked):
637 if not (p1_tracked or wc_tracked):
630 # the file is no longer relevant to anyone
638 # the file is no longer relevant to anyone
631 if self._map.get(filename) is not None:
639 if self._map.get(filename) is not None:
632 self._map.reset_state(filename)
640 self._map.reset_state(filename)
633 self._dirty = True
641 self._dirty = True
634 elif (not p1_tracked) and wc_tracked:
642 elif (not p1_tracked) and wc_tracked:
635 if entry is not None and entry.added:
643 if entry is not None and entry.added:
636 return # avoid dropping copy information (maybe?)
644 return # avoid dropping copy information (maybe?)
637
645
638 self._map.reset_state(
646 self._map.reset_state(
639 filename,
647 filename,
640 wc_tracked,
648 wc_tracked,
641 p1_tracked,
649 p1_tracked,
642 # the underlying reference might have changed, we will have to
650 # the underlying reference might have changed, we will have to
643 # check it.
651 # check it.
644 has_meaningful_mtime=False,
652 has_meaningful_mtime=False,
645 )
653 )
646
654
647 @requires_changing_parents
655 @requires_changing_parents
648 def update_file(
656 def update_file(
649 self,
657 self,
650 filename,
658 filename,
651 wc_tracked,
659 wc_tracked,
652 p1_tracked,
660 p1_tracked,
653 p2_info=False,
661 p2_info=False,
654 possibly_dirty=False,
662 possibly_dirty=False,
655 parentfiledata=None,
663 parentfiledata=None,
656 ):
664 ):
657 """update the information about a file in the dirstate
665 """update the information about a file in the dirstate
658
666
659 This is to be called when the direstates parent changes to keep track
667 This is to be called when the direstates parent changes to keep track
660 of what is the file situation in regards to the working copy and its parent.
668 of what is the file situation in regards to the working copy and its parent.
661
669
662 This function must be called within a `dirstate.changing_parents` context.
670 This function must be called within a `dirstate.changing_parents` context.
663
671
664 note: the API is at an early stage and we might need to adjust it
672 note: the API is at an early stage and we might need to adjust it
665 depending of what information ends up being relevant and useful to
673 depending of what information ends up being relevant and useful to
666 other processing.
674 other processing.
667 """
675 """
668 self._update_file(
676 self._update_file(
669 filename=filename,
677 filename=filename,
670 wc_tracked=wc_tracked,
678 wc_tracked=wc_tracked,
671 p1_tracked=p1_tracked,
679 p1_tracked=p1_tracked,
672 p2_info=p2_info,
680 p2_info=p2_info,
673 possibly_dirty=possibly_dirty,
681 possibly_dirty=possibly_dirty,
674 parentfiledata=parentfiledata,
682 parentfiledata=parentfiledata,
675 )
683 )
676
684
677 # XXX since this make the dirstate dirty, we should enforce that it is done
685 # XXX since this make the dirstate dirty, we should enforce that it is done
678 # withing an appropriate change-context that scope the change and ensure it
686 # withing an appropriate change-context that scope the change and ensure it
679 # eventually get written on disk (or rolled back)
687 # eventually get written on disk (or rolled back)
680 def hacky_extension_update_file(self, *args, **kwargs):
688 def hacky_extension_update_file(self, *args, **kwargs):
681 """NEVER USE THIS, YOU DO NOT NEED IT
689 """NEVER USE THIS, YOU DO NOT NEED IT
682
690
683 This function is a variant of "update_file" to be called by a small set
691 This function is a variant of "update_file" to be called by a small set
684 of extensions, it also adjust the internal state of file, but can be
692 of extensions, it also adjust the internal state of file, but can be
685 called outside an `changing_parents` context.
693 called outside an `changing_parents` context.
686
694
687 A very small number of extension meddle with the working copy content
695 A very small number of extension meddle with the working copy content
688 in a way that requires to adjust the dirstate accordingly. At the time
696 in a way that requires to adjust the dirstate accordingly. At the time
689 this command is written they are :
697 this command is written they are :
690 - keyword,
698 - keyword,
691 - largefile,
699 - largefile,
692 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
700 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
693
701
694 This function could probably be replaced by more semantic one (like
702 This function could probably be replaced by more semantic one (like
695 "adjust expected size" or "always revalidate file content", etc)
703 "adjust expected size" or "always revalidate file content", etc)
696 however at the time where this is writen, this is too much of a detour
704 however at the time where this is writen, this is too much of a detour
697 to be considered.
705 to be considered.
698 """
706 """
699 self._update_file(
707 self._update_file(
700 *args,
708 *args,
701 **kwargs,
709 **kwargs,
702 )
710 )
703
711
704 def _update_file(
712 def _update_file(
705 self,
713 self,
706 filename,
714 filename,
707 wc_tracked,
715 wc_tracked,
708 p1_tracked,
716 p1_tracked,
709 p2_info=False,
717 p2_info=False,
710 possibly_dirty=False,
718 possibly_dirty=False,
711 parentfiledata=None,
719 parentfiledata=None,
712 ):
720 ):
713
721
714 # note: I do not think we need to double check name clash here since we
722 # note: I do not think we need to double check name clash here since we
715 # are in a update/merge case that should already have taken care of
723 # are in a update/merge case that should already have taken care of
716 # this. The test agrees
724 # this. The test agrees
717
725
718 self._dirty = True
726 self._dirty = True
719 old_entry = self._map.get(filename)
727 old_entry = self._map.get(filename)
720 if old_entry is None:
728 if old_entry is None:
721 prev_tracked = False
729 prev_tracked = False
722 else:
730 else:
723 prev_tracked = old_entry.tracked
731 prev_tracked = old_entry.tracked
724 if prev_tracked != wc_tracked:
732 if prev_tracked != wc_tracked:
725 self._dirty_tracked_set = True
733 self._dirty_tracked_set = True
726
734
727 self._map.reset_state(
735 self._map.reset_state(
728 filename,
736 filename,
729 wc_tracked,
737 wc_tracked,
730 p1_tracked,
738 p1_tracked,
731 p2_info=p2_info,
739 p2_info=p2_info,
732 has_meaningful_mtime=not possibly_dirty,
740 has_meaningful_mtime=not possibly_dirty,
733 parentfiledata=parentfiledata,
741 parentfiledata=parentfiledata,
734 )
742 )
735
743
736 def _check_new_tracked_filename(self, filename):
744 def _check_new_tracked_filename(self, filename):
737 scmutil.checkfilename(filename)
745 scmutil.checkfilename(filename)
738 if self._map.hastrackeddir(filename):
746 if self._map.hastrackeddir(filename):
739 msg = _(b'directory %r already in dirstate')
747 msg = _(b'directory %r already in dirstate')
740 msg %= pycompat.bytestr(filename)
748 msg %= pycompat.bytestr(filename)
741 raise error.Abort(msg)
749 raise error.Abort(msg)
742 # shadows
750 # shadows
743 for d in pathutil.finddirs(filename):
751 for d in pathutil.finddirs(filename):
744 if self._map.hastrackeddir(d):
752 if self._map.hastrackeddir(d):
745 break
753 break
746 entry = self._map.get(d)
754 entry = self._map.get(d)
747 if entry is not None and not entry.removed:
755 if entry is not None and not entry.removed:
748 msg = _(b'file %r in dirstate clashes with %r')
756 msg = _(b'file %r in dirstate clashes with %r')
749 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
757 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
750 raise error.Abort(msg)
758 raise error.Abort(msg)
751 self._check_sparse(filename)
759 self._check_sparse(filename)
752
760
753 def _check_sparse(self, filename):
761 def _check_sparse(self, filename):
754 """Check that a filename is inside the sparse profile"""
762 """Check that a filename is inside the sparse profile"""
755 sparsematch = self._sparsematcher
763 sparsematch = self._sparsematcher
756 if sparsematch is not None and not sparsematch.always():
764 if sparsematch is not None and not sparsematch.always():
757 if not sparsematch(filename):
765 if not sparsematch(filename):
758 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
766 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
759 hint = _(
767 hint = _(
760 b'include file with `hg debugsparse --include <pattern>` or use '
768 b'include file with `hg debugsparse --include <pattern>` or use '
761 b'`hg add -s <file>` to include file directory while adding'
769 b'`hg add -s <file>` to include file directory while adding'
762 )
770 )
763 raise error.Abort(msg % filename, hint=hint)
771 raise error.Abort(msg % filename, hint=hint)
764
772
765 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
773 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
766 if exists is None:
774 if exists is None:
767 exists = os.path.lexists(os.path.join(self._root, path))
775 exists = os.path.lexists(os.path.join(self._root, path))
768 if not exists:
776 if not exists:
769 # Maybe a path component exists
777 # Maybe a path component exists
770 if not ignoremissing and b'/' in path:
778 if not ignoremissing and b'/' in path:
771 d, f = path.rsplit(b'/', 1)
779 d, f = path.rsplit(b'/', 1)
772 d = self._normalize(d, False, ignoremissing, None)
780 d = self._normalize(d, False, ignoremissing, None)
773 folded = d + b"/" + f
781 folded = d + b"/" + f
774 else:
782 else:
775 # No path components, preserve original case
783 # No path components, preserve original case
776 folded = path
784 folded = path
777 else:
785 else:
778 # recursively normalize leading directory components
786 # recursively normalize leading directory components
779 # against dirstate
787 # against dirstate
780 if b'/' in normed:
788 if b'/' in normed:
781 d, f = normed.rsplit(b'/', 1)
789 d, f = normed.rsplit(b'/', 1)
782 d = self._normalize(d, False, ignoremissing, True)
790 d = self._normalize(d, False, ignoremissing, True)
783 r = self._root + b"/" + d
791 r = self._root + b"/" + d
784 folded = d + b"/" + util.fspath(f, r)
792 folded = d + b"/" + util.fspath(f, r)
785 else:
793 else:
786 folded = util.fspath(normed, self._root)
794 folded = util.fspath(normed, self._root)
787 storemap[normed] = folded
795 storemap[normed] = folded
788
796
789 return folded
797 return folded
790
798
791 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
799 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
792 normed = util.normcase(path)
800 normed = util.normcase(path)
793 folded = self._map.filefoldmap.get(normed, None)
801 folded = self._map.filefoldmap.get(normed, None)
794 if folded is None:
802 if folded is None:
795 if isknown:
803 if isknown:
796 folded = path
804 folded = path
797 else:
805 else:
798 folded = self._discoverpath(
806 folded = self._discoverpath(
799 path, normed, ignoremissing, exists, self._map.filefoldmap
807 path, normed, ignoremissing, exists, self._map.filefoldmap
800 )
808 )
801 return folded
809 return folded
802
810
803 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
811 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
804 normed = util.normcase(path)
812 normed = util.normcase(path)
805 folded = self._map.filefoldmap.get(normed, None)
813 folded = self._map.filefoldmap.get(normed, None)
806 if folded is None:
814 if folded is None:
807 folded = self._map.dirfoldmap.get(normed, None)
815 folded = self._map.dirfoldmap.get(normed, None)
808 if folded is None:
816 if folded is None:
809 if isknown:
817 if isknown:
810 folded = path
818 folded = path
811 else:
819 else:
812 # store discovered result in dirfoldmap so that future
820 # store discovered result in dirfoldmap so that future
813 # normalizefile calls don't start matching directories
821 # normalizefile calls don't start matching directories
814 folded = self._discoverpath(
822 folded = self._discoverpath(
815 path, normed, ignoremissing, exists, self._map.dirfoldmap
823 path, normed, ignoremissing, exists, self._map.dirfoldmap
816 )
824 )
817 return folded
825 return folded
818
826
819 def normalize(self, path, isknown=False, ignoremissing=False):
827 def normalize(self, path, isknown=False, ignoremissing=False):
820 """
828 """
821 normalize the case of a pathname when on a casefolding filesystem
829 normalize the case of a pathname when on a casefolding filesystem
822
830
823 isknown specifies whether the filename came from walking the
831 isknown specifies whether the filename came from walking the
824 disk, to avoid extra filesystem access.
832 disk, to avoid extra filesystem access.
825
833
826 If ignoremissing is True, missing path are returned
834 If ignoremissing is True, missing path are returned
827 unchanged. Otherwise, we try harder to normalize possibly
835 unchanged. Otherwise, we try harder to normalize possibly
828 existing path components.
836 existing path components.
829
837
830 The normalized case is determined based on the following precedence:
838 The normalized case is determined based on the following precedence:
831
839
832 - version of name already stored in the dirstate
840 - version of name already stored in the dirstate
833 - version of name stored on disk
841 - version of name stored on disk
834 - version provided via command arguments
842 - version provided via command arguments
835 """
843 """
836
844
837 if self._checkcase:
845 if self._checkcase:
838 return self._normalize(path, isknown, ignoremissing)
846 return self._normalize(path, isknown, ignoremissing)
839 return path
847 return path
840
848
841 # XXX since this make the dirstate dirty, we should enforce that it is done
849 # XXX since this make the dirstate dirty, we should enforce that it is done
842 # withing an appropriate change-context that scope the change and ensure it
850 # withing an appropriate change-context that scope the change and ensure it
843 # eventually get written on disk (or rolled back)
851 # eventually get written on disk (or rolled back)
844 def clear(self):
852 def clear(self):
845 self._map.clear()
853 self._map.clear()
846 self._dirty = True
854 self._dirty = True
847
855
848 # XXX since this make the dirstate dirty, we should enforce that it is done
856 # XXX since this make the dirstate dirty, we should enforce that it is done
849 # withing an appropriate change-context that scope the change and ensure it
857 # withing an appropriate change-context that scope the change and ensure it
850 # eventually get written on disk (or rolled back)
858 # eventually get written on disk (or rolled back)
851 def rebuild(self, parent, allfiles, changedfiles=None):
859 def rebuild(self, parent, allfiles, changedfiles=None):
852 matcher = self._sparsematcher
860 matcher = self._sparsematcher
853 if matcher is not None and not matcher.always():
861 if matcher is not None and not matcher.always():
854 # should not add non-matching files
862 # should not add non-matching files
855 allfiles = [f for f in allfiles if matcher(f)]
863 allfiles = [f for f in allfiles if matcher(f)]
856 if changedfiles:
864 if changedfiles:
857 changedfiles = [f for f in changedfiles if matcher(f)]
865 changedfiles = [f for f in changedfiles if matcher(f)]
858
866
859 if changedfiles is not None:
867 if changedfiles is not None:
860 # these files will be deleted from the dirstate when they are
868 # these files will be deleted from the dirstate when they are
861 # not found to be in allfiles
869 # not found to be in allfiles
862 dirstatefilestoremove = {f for f in self if not matcher(f)}
870 dirstatefilestoremove = {f for f in self if not matcher(f)}
863 changedfiles = dirstatefilestoremove.union(changedfiles)
871 changedfiles = dirstatefilestoremove.union(changedfiles)
864
872
865 if changedfiles is None:
873 if changedfiles is None:
866 # Rebuild entire dirstate
874 # Rebuild entire dirstate
867 to_lookup = allfiles
875 to_lookup = allfiles
868 to_drop = []
876 to_drop = []
869 self.clear()
877 self.clear()
870 elif len(changedfiles) < 10:
878 elif len(changedfiles) < 10:
871 # Avoid turning allfiles into a set, which can be expensive if it's
879 # Avoid turning allfiles into a set, which can be expensive if it's
872 # large.
880 # large.
873 to_lookup = []
881 to_lookup = []
874 to_drop = []
882 to_drop = []
875 for f in changedfiles:
883 for f in changedfiles:
876 if f in allfiles:
884 if f in allfiles:
877 to_lookup.append(f)
885 to_lookup.append(f)
878 else:
886 else:
879 to_drop.append(f)
887 to_drop.append(f)
880 else:
888 else:
881 changedfilesset = set(changedfiles)
889 changedfilesset = set(changedfiles)
882 to_lookup = changedfilesset & set(allfiles)
890 to_lookup = changedfilesset & set(allfiles)
883 to_drop = changedfilesset - to_lookup
891 to_drop = changedfilesset - to_lookup
884
892
885 if self._origpl is None:
893 if self._origpl is None:
886 self._origpl = self._pl
894 self._origpl = self._pl
887 self._map.setparents(parent, self._nodeconstants.nullid)
895 self._map.setparents(parent, self._nodeconstants.nullid)
888
896
889 for f in to_lookup:
897 for f in to_lookup:
890 if self.in_merge:
898 if self.in_merge:
891 self.set_tracked(f)
899 self.set_tracked(f)
892 else:
900 else:
893 self._map.reset_state(
901 self._map.reset_state(
894 f,
902 f,
895 wc_tracked=True,
903 wc_tracked=True,
896 p1_tracked=True,
904 p1_tracked=True,
897 )
905 )
898 for f in to_drop:
906 for f in to_drop:
899 self._map.reset_state(f)
907 self._map.reset_state(f)
900
908
901 self._dirty = True
909 self._dirty = True
902
910
903 def identity(self):
911 def identity(self):
904 """Return identity of dirstate itself to detect changing in storage
912 """Return identity of dirstate itself to detect changing in storage
905
913
906 If identity of previous dirstate is equal to this, writing
914 If identity of previous dirstate is equal to this, writing
907 changes based on the former dirstate out can keep consistency.
915 changes based on the former dirstate out can keep consistency.
908 """
916 """
909 return self._map.identity
917 return self._map.identity
910
918
911 def write(self, tr):
919 def write(self, tr):
912 if not self._dirty:
920 if not self._dirty:
913 return
921 return
914
922
915 write_key = self._use_tracked_hint and self._dirty_tracked_set
923 write_key = self._use_tracked_hint and self._dirty_tracked_set
916 if tr:
924 if tr:
917 # make sure we invalidate the current change on abort
925 # make sure we invalidate the current change on abort
918 if tr is not None:
926 if tr is not None:
919 tr.addabort(
927 tr.addabort(
920 b'dirstate-invalidate',
928 b'dirstate-invalidate',
921 lambda tr: self.invalidate(),
929 lambda tr: self.invalidate(),
922 )
930 )
923 # delay writing in-memory changes out
931 # delay writing in-memory changes out
924 tr.addfilegenerator(
932 tr.addfilegenerator(
925 b'dirstate-1-main',
933 b'dirstate-1-main',
926 (self._filename,),
934 (self._filename,),
927 lambda f: self._writedirstate(tr, f),
935 lambda f: self._writedirstate(tr, f),
928 location=b'plain',
936 location=b'plain',
929 post_finalize=True,
937 post_finalize=True,
930 )
938 )
931 if write_key:
939 if write_key:
932 tr.addfilegenerator(
940 tr.addfilegenerator(
933 b'dirstate-2-key-post',
941 b'dirstate-2-key-post',
934 (self._filename_th,),
942 (self._filename_th,),
935 lambda f: self._write_tracked_hint(tr, f),
943 lambda f: self._write_tracked_hint(tr, f),
936 location=b'plain',
944 location=b'plain',
937 post_finalize=True,
945 post_finalize=True,
938 )
946 )
939 return
947 return
940
948
941 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
949 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
942 with file(self._filename) as f:
950 with file(self._filename) as f:
943 self._writedirstate(tr, f)
951 self._writedirstate(tr, f)
944 if write_key:
952 if write_key:
945 # we update the key-file after writing to make sure reader have a
953 # we update the key-file after writing to make sure reader have a
946 # key that match the newly written content
954 # key that match the newly written content
947 with file(self._filename_th) as f:
955 with file(self._filename_th) as f:
948 self._write_tracked_hint(tr, f)
956 self._write_tracked_hint(tr, f)
949
957
950 def delete_tracked_hint(self):
958 def delete_tracked_hint(self):
951 """remove the tracked_hint file
959 """remove the tracked_hint file
952
960
953 To be used by format downgrades operation"""
961 To be used by format downgrades operation"""
954 self._opener.unlink(self._filename_th)
962 self._opener.unlink(self._filename_th)
955 self._use_tracked_hint = False
963 self._use_tracked_hint = False
956
964
957 def addparentchangecallback(self, category, callback):
965 def addparentchangecallback(self, category, callback):
958 """add a callback to be called when the wd parents are changed
966 """add a callback to be called when the wd parents are changed
959
967
960 Callback will be called with the following arguments:
968 Callback will be called with the following arguments:
961 dirstate, (oldp1, oldp2), (newp1, newp2)
969 dirstate, (oldp1, oldp2), (newp1, newp2)
962
970
963 Category is a unique identifier to allow overwriting an old callback
971 Category is a unique identifier to allow overwriting an old callback
964 with a newer callback.
972 with a newer callback.
965 """
973 """
966 self._plchangecallbacks[category] = callback
974 self._plchangecallbacks[category] = callback
967
975
968 def _writedirstate(self, tr, st):
976 def _writedirstate(self, tr, st):
969 # notify callbacks about parents change
977 # notify callbacks about parents change
970 if self._origpl is not None and self._origpl != self._pl:
978 if self._origpl is not None and self._origpl != self._pl:
971 for c, callback in sorted(self._plchangecallbacks.items()):
979 for c, callback in sorted(self._plchangecallbacks.items()):
972 callback(self, self._origpl, self._pl)
980 callback(self, self._origpl, self._pl)
973 self._origpl = None
981 self._origpl = None
974 self._map.write(tr, st)
982 self._map.write(tr, st)
975 self._dirty = False
983 self._dirty = False
976 self._dirty_tracked_set = False
984 self._dirty_tracked_set = False
977
985
978 def _write_tracked_hint(self, tr, f):
986 def _write_tracked_hint(self, tr, f):
979 key = node.hex(uuid.uuid4().bytes)
987 key = node.hex(uuid.uuid4().bytes)
980 f.write(b"1\n%s\n" % key) # 1 is the format version
988 f.write(b"1\n%s\n" % key) # 1 is the format version
981
989
982 def _dirignore(self, f):
990 def _dirignore(self, f):
983 if self._ignore(f):
991 if self._ignore(f):
984 return True
992 return True
985 for p in pathutil.finddirs(f):
993 for p in pathutil.finddirs(f):
986 if self._ignore(p):
994 if self._ignore(p):
987 return True
995 return True
988 return False
996 return False
989
997
990 def _ignorefiles(self):
998 def _ignorefiles(self):
991 files = []
999 files = []
992 if os.path.exists(self._join(b'.hgignore')):
1000 if os.path.exists(self._join(b'.hgignore')):
993 files.append(self._join(b'.hgignore'))
1001 files.append(self._join(b'.hgignore'))
994 for name, path in self._ui.configitems(b"ui"):
1002 for name, path in self._ui.configitems(b"ui"):
995 if name == b'ignore' or name.startswith(b'ignore.'):
1003 if name == b'ignore' or name.startswith(b'ignore.'):
996 # we need to use os.path.join here rather than self._join
1004 # we need to use os.path.join here rather than self._join
997 # because path is arbitrary and user-specified
1005 # because path is arbitrary and user-specified
998 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1006 files.append(os.path.join(self._rootdir, util.expandpath(path)))
999 return files
1007 return files
1000
1008
1001 def _ignorefileandline(self, f):
1009 def _ignorefileandline(self, f):
1002 files = collections.deque(self._ignorefiles())
1010 files = collections.deque(self._ignorefiles())
1003 visited = set()
1011 visited = set()
1004 while files:
1012 while files:
1005 i = files.popleft()
1013 i = files.popleft()
1006 patterns = matchmod.readpatternfile(
1014 patterns = matchmod.readpatternfile(
1007 i, self._ui.warn, sourceinfo=True
1015 i, self._ui.warn, sourceinfo=True
1008 )
1016 )
1009 for pattern, lineno, line in patterns:
1017 for pattern, lineno, line in patterns:
1010 kind, p = matchmod._patsplit(pattern, b'glob')
1018 kind, p = matchmod._patsplit(pattern, b'glob')
1011 if kind == b"subinclude":
1019 if kind == b"subinclude":
1012 if p not in visited:
1020 if p not in visited:
1013 files.append(p)
1021 files.append(p)
1014 continue
1022 continue
1015 m = matchmod.match(
1023 m = matchmod.match(
1016 self._root, b'', [], [pattern], warn=self._ui.warn
1024 self._root, b'', [], [pattern], warn=self._ui.warn
1017 )
1025 )
1018 if m(f):
1026 if m(f):
1019 return (i, lineno, line)
1027 return (i, lineno, line)
1020 visited.add(i)
1028 visited.add(i)
1021 return (None, -1, b"")
1029 return (None, -1, b"")
1022
1030
1023 def _walkexplicit(self, match, subrepos):
1031 def _walkexplicit(self, match, subrepos):
1024 """Get stat data about the files explicitly specified by match.
1032 """Get stat data about the files explicitly specified by match.
1025
1033
1026 Return a triple (results, dirsfound, dirsnotfound).
1034 Return a triple (results, dirsfound, dirsnotfound).
1027 - results is a mapping from filename to stat result. It also contains
1035 - results is a mapping from filename to stat result. It also contains
1028 listings mapping subrepos and .hg to None.
1036 listings mapping subrepos and .hg to None.
1029 - dirsfound is a list of files found to be directories.
1037 - dirsfound is a list of files found to be directories.
1030 - dirsnotfound is a list of files that the dirstate thinks are
1038 - dirsnotfound is a list of files that the dirstate thinks are
1031 directories and that were not found."""
1039 directories and that were not found."""
1032
1040
1033 def badtype(mode):
1041 def badtype(mode):
1034 kind = _(b'unknown')
1042 kind = _(b'unknown')
1035 if stat.S_ISCHR(mode):
1043 if stat.S_ISCHR(mode):
1036 kind = _(b'character device')
1044 kind = _(b'character device')
1037 elif stat.S_ISBLK(mode):
1045 elif stat.S_ISBLK(mode):
1038 kind = _(b'block device')
1046 kind = _(b'block device')
1039 elif stat.S_ISFIFO(mode):
1047 elif stat.S_ISFIFO(mode):
1040 kind = _(b'fifo')
1048 kind = _(b'fifo')
1041 elif stat.S_ISSOCK(mode):
1049 elif stat.S_ISSOCK(mode):
1042 kind = _(b'socket')
1050 kind = _(b'socket')
1043 elif stat.S_ISDIR(mode):
1051 elif stat.S_ISDIR(mode):
1044 kind = _(b'directory')
1052 kind = _(b'directory')
1045 return _(b'unsupported file type (type is %s)') % kind
1053 return _(b'unsupported file type (type is %s)') % kind
1046
1054
1047 badfn = match.bad
1055 badfn = match.bad
1048 dmap = self._map
1056 dmap = self._map
1049 lstat = os.lstat
1057 lstat = os.lstat
1050 getkind = stat.S_IFMT
1058 getkind = stat.S_IFMT
1051 dirkind = stat.S_IFDIR
1059 dirkind = stat.S_IFDIR
1052 regkind = stat.S_IFREG
1060 regkind = stat.S_IFREG
1053 lnkkind = stat.S_IFLNK
1061 lnkkind = stat.S_IFLNK
1054 join = self._join
1062 join = self._join
1055 dirsfound = []
1063 dirsfound = []
1056 foundadd = dirsfound.append
1064 foundadd = dirsfound.append
1057 dirsnotfound = []
1065 dirsnotfound = []
1058 notfoundadd = dirsnotfound.append
1066 notfoundadd = dirsnotfound.append
1059
1067
1060 if not match.isexact() and self._checkcase:
1068 if not match.isexact() and self._checkcase:
1061 normalize = self._normalize
1069 normalize = self._normalize
1062 else:
1070 else:
1063 normalize = None
1071 normalize = None
1064
1072
1065 files = sorted(match.files())
1073 files = sorted(match.files())
1066 subrepos.sort()
1074 subrepos.sort()
1067 i, j = 0, 0
1075 i, j = 0, 0
1068 while i < len(files) and j < len(subrepos):
1076 while i < len(files) and j < len(subrepos):
1069 subpath = subrepos[j] + b"/"
1077 subpath = subrepos[j] + b"/"
1070 if files[i] < subpath:
1078 if files[i] < subpath:
1071 i += 1
1079 i += 1
1072 continue
1080 continue
1073 while i < len(files) and files[i].startswith(subpath):
1081 while i < len(files) and files[i].startswith(subpath):
1074 del files[i]
1082 del files[i]
1075 j += 1
1083 j += 1
1076
1084
1077 if not files or b'' in files:
1085 if not files or b'' in files:
1078 files = [b'']
1086 files = [b'']
1079 # constructing the foldmap is expensive, so don't do it for the
1087 # constructing the foldmap is expensive, so don't do it for the
1080 # common case where files is ['']
1088 # common case where files is ['']
1081 normalize = None
1089 normalize = None
1082 results = dict.fromkeys(subrepos)
1090 results = dict.fromkeys(subrepos)
1083 results[b'.hg'] = None
1091 results[b'.hg'] = None
1084
1092
1085 for ff in files:
1093 for ff in files:
1086 if normalize:
1094 if normalize:
1087 nf = normalize(ff, False, True)
1095 nf = normalize(ff, False, True)
1088 else:
1096 else:
1089 nf = ff
1097 nf = ff
1090 if nf in results:
1098 if nf in results:
1091 continue
1099 continue
1092
1100
1093 try:
1101 try:
1094 st = lstat(join(nf))
1102 st = lstat(join(nf))
1095 kind = getkind(st.st_mode)
1103 kind = getkind(st.st_mode)
1096 if kind == dirkind:
1104 if kind == dirkind:
1097 if nf in dmap:
1105 if nf in dmap:
1098 # file replaced by dir on disk but still in dirstate
1106 # file replaced by dir on disk but still in dirstate
1099 results[nf] = None
1107 results[nf] = None
1100 foundadd((nf, ff))
1108 foundadd((nf, ff))
1101 elif kind == regkind or kind == lnkkind:
1109 elif kind == regkind or kind == lnkkind:
1102 results[nf] = st
1110 results[nf] = st
1103 else:
1111 else:
1104 badfn(ff, badtype(kind))
1112 badfn(ff, badtype(kind))
1105 if nf in dmap:
1113 if nf in dmap:
1106 results[nf] = None
1114 results[nf] = None
1107 except (OSError) as inst:
1115 except (OSError) as inst:
1108 # nf not found on disk - it is dirstate only
1116 # nf not found on disk - it is dirstate only
1109 if nf in dmap: # does it exactly match a missing file?
1117 if nf in dmap: # does it exactly match a missing file?
1110 results[nf] = None
1118 results[nf] = None
1111 else: # does it match a missing directory?
1119 else: # does it match a missing directory?
1112 if self._map.hasdir(nf):
1120 if self._map.hasdir(nf):
1113 notfoundadd(nf)
1121 notfoundadd(nf)
1114 else:
1122 else:
1115 badfn(ff, encoding.strtolocal(inst.strerror))
1123 badfn(ff, encoding.strtolocal(inst.strerror))
1116
1124
1117 # match.files() may contain explicitly-specified paths that shouldn't
1125 # match.files() may contain explicitly-specified paths that shouldn't
1118 # be taken; drop them from the list of files found. dirsfound/notfound
1126 # be taken; drop them from the list of files found. dirsfound/notfound
1119 # aren't filtered here because they will be tested later.
1127 # aren't filtered here because they will be tested later.
1120 if match.anypats():
1128 if match.anypats():
1121 for f in list(results):
1129 for f in list(results):
1122 if f == b'.hg' or f in subrepos:
1130 if f == b'.hg' or f in subrepos:
1123 # keep sentinel to disable further out-of-repo walks
1131 # keep sentinel to disable further out-of-repo walks
1124 continue
1132 continue
1125 if not match(f):
1133 if not match(f):
1126 del results[f]
1134 del results[f]
1127
1135
1128 # Case insensitive filesystems cannot rely on lstat() failing to detect
1136 # Case insensitive filesystems cannot rely on lstat() failing to detect
1129 # a case-only rename. Prune the stat object for any file that does not
1137 # a case-only rename. Prune the stat object for any file that does not
1130 # match the case in the filesystem, if there are multiple files that
1138 # match the case in the filesystem, if there are multiple files that
1131 # normalize to the same path.
1139 # normalize to the same path.
1132 if match.isexact() and self._checkcase:
1140 if match.isexact() and self._checkcase:
1133 normed = {}
1141 normed = {}
1134
1142
1135 for f, st in results.items():
1143 for f, st in results.items():
1136 if st is None:
1144 if st is None:
1137 continue
1145 continue
1138
1146
1139 nc = util.normcase(f)
1147 nc = util.normcase(f)
1140 paths = normed.get(nc)
1148 paths = normed.get(nc)
1141
1149
1142 if paths is None:
1150 if paths is None:
1143 paths = set()
1151 paths = set()
1144 normed[nc] = paths
1152 normed[nc] = paths
1145
1153
1146 paths.add(f)
1154 paths.add(f)
1147
1155
1148 for norm, paths in normed.items():
1156 for norm, paths in normed.items():
1149 if len(paths) > 1:
1157 if len(paths) > 1:
1150 for path in paths:
1158 for path in paths:
1151 folded = self._discoverpath(
1159 folded = self._discoverpath(
1152 path, norm, True, None, self._map.dirfoldmap
1160 path, norm, True, None, self._map.dirfoldmap
1153 )
1161 )
1154 if path != folded:
1162 if path != folded:
1155 results[path] = None
1163 results[path] = None
1156
1164
1157 return results, dirsfound, dirsnotfound
1165 return results, dirsfound, dirsnotfound
1158
1166
1159 def walk(self, match, subrepos, unknown, ignored, full=True):
1167 def walk(self, match, subrepos, unknown, ignored, full=True):
1160 """
1168 """
1161 Walk recursively through the directory tree, finding all files
1169 Walk recursively through the directory tree, finding all files
1162 matched by match.
1170 matched by match.
1163
1171
1164 If full is False, maybe skip some known-clean files.
1172 If full is False, maybe skip some known-clean files.
1165
1173
1166 Return a dict mapping filename to stat-like object (either
1174 Return a dict mapping filename to stat-like object (either
1167 mercurial.osutil.stat instance or return value of os.stat()).
1175 mercurial.osutil.stat instance or return value of os.stat()).
1168
1176
1169 """
1177 """
1170 # full is a flag that extensions that hook into walk can use -- this
1178 # full is a flag that extensions that hook into walk can use -- this
1171 # implementation doesn't use it at all. This satisfies the contract
1179 # implementation doesn't use it at all. This satisfies the contract
1172 # because we only guarantee a "maybe".
1180 # because we only guarantee a "maybe".
1173
1181
1174 if ignored:
1182 if ignored:
1175 ignore = util.never
1183 ignore = util.never
1176 dirignore = util.never
1184 dirignore = util.never
1177 elif unknown:
1185 elif unknown:
1178 ignore = self._ignore
1186 ignore = self._ignore
1179 dirignore = self._dirignore
1187 dirignore = self._dirignore
1180 else:
1188 else:
1181 # if not unknown and not ignored, drop dir recursion and step 2
1189 # if not unknown and not ignored, drop dir recursion and step 2
1182 ignore = util.always
1190 ignore = util.always
1183 dirignore = util.always
1191 dirignore = util.always
1184
1192
1185 if self._sparsematchfn is not None:
1193 if self._sparsematchfn is not None:
1186 em = matchmod.exact(match.files())
1194 em = matchmod.exact(match.files())
1187 sm = matchmod.unionmatcher([self._sparsematcher, em])
1195 sm = matchmod.unionmatcher([self._sparsematcher, em])
1188 match = matchmod.intersectmatchers(match, sm)
1196 match = matchmod.intersectmatchers(match, sm)
1189
1197
1190 matchfn = match.matchfn
1198 matchfn = match.matchfn
1191 matchalways = match.always()
1199 matchalways = match.always()
1192 matchtdir = match.traversedir
1200 matchtdir = match.traversedir
1193 dmap = self._map
1201 dmap = self._map
1194 listdir = util.listdir
1202 listdir = util.listdir
1195 lstat = os.lstat
1203 lstat = os.lstat
1196 dirkind = stat.S_IFDIR
1204 dirkind = stat.S_IFDIR
1197 regkind = stat.S_IFREG
1205 regkind = stat.S_IFREG
1198 lnkkind = stat.S_IFLNK
1206 lnkkind = stat.S_IFLNK
1199 join = self._join
1207 join = self._join
1200
1208
1201 exact = skipstep3 = False
1209 exact = skipstep3 = False
1202 if match.isexact(): # match.exact
1210 if match.isexact(): # match.exact
1203 exact = True
1211 exact = True
1204 dirignore = util.always # skip step 2
1212 dirignore = util.always # skip step 2
1205 elif match.prefix(): # match.match, no patterns
1213 elif match.prefix(): # match.match, no patterns
1206 skipstep3 = True
1214 skipstep3 = True
1207
1215
1208 if not exact and self._checkcase:
1216 if not exact and self._checkcase:
1209 normalize = self._normalize
1217 normalize = self._normalize
1210 normalizefile = self._normalizefile
1218 normalizefile = self._normalizefile
1211 skipstep3 = False
1219 skipstep3 = False
1212 else:
1220 else:
1213 normalize = self._normalize
1221 normalize = self._normalize
1214 normalizefile = None
1222 normalizefile = None
1215
1223
1216 # step 1: find all explicit files
1224 # step 1: find all explicit files
1217 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1225 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1218 if matchtdir:
1226 if matchtdir:
1219 for d in work:
1227 for d in work:
1220 matchtdir(d[0])
1228 matchtdir(d[0])
1221 for d in dirsnotfound:
1229 for d in dirsnotfound:
1222 matchtdir(d)
1230 matchtdir(d)
1223
1231
1224 skipstep3 = skipstep3 and not (work or dirsnotfound)
1232 skipstep3 = skipstep3 and not (work or dirsnotfound)
1225 work = [d for d in work if not dirignore(d[0])]
1233 work = [d for d in work if not dirignore(d[0])]
1226
1234
1227 # step 2: visit subdirectories
1235 # step 2: visit subdirectories
1228 def traverse(work, alreadynormed):
1236 def traverse(work, alreadynormed):
1229 wadd = work.append
1237 wadd = work.append
1230 while work:
1238 while work:
1231 tracing.counter('dirstate.walk work', len(work))
1239 tracing.counter('dirstate.walk work', len(work))
1232 nd = work.pop()
1240 nd = work.pop()
1233 visitentries = match.visitchildrenset(nd)
1241 visitentries = match.visitchildrenset(nd)
1234 if not visitentries:
1242 if not visitentries:
1235 continue
1243 continue
1236 if visitentries == b'this' or visitentries == b'all':
1244 if visitentries == b'this' or visitentries == b'all':
1237 visitentries = None
1245 visitentries = None
1238 skip = None
1246 skip = None
1239 if nd != b'':
1247 if nd != b'':
1240 skip = b'.hg'
1248 skip = b'.hg'
1241 try:
1249 try:
1242 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1250 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1243 entries = listdir(join(nd), stat=True, skip=skip)
1251 entries = listdir(join(nd), stat=True, skip=skip)
1244 except (PermissionError, FileNotFoundError) as inst:
1252 except (PermissionError, FileNotFoundError) as inst:
1245 match.bad(
1253 match.bad(
1246 self.pathto(nd), encoding.strtolocal(inst.strerror)
1254 self.pathto(nd), encoding.strtolocal(inst.strerror)
1247 )
1255 )
1248 continue
1256 continue
1249 for f, kind, st in entries:
1257 for f, kind, st in entries:
1250 # Some matchers may return files in the visitentries set,
1258 # Some matchers may return files in the visitentries set,
1251 # instead of 'this', if the matcher explicitly mentions them
1259 # instead of 'this', if the matcher explicitly mentions them
1252 # and is not an exactmatcher. This is acceptable; we do not
1260 # and is not an exactmatcher. This is acceptable; we do not
1253 # make any hard assumptions about file-or-directory below
1261 # make any hard assumptions about file-or-directory below
1254 # based on the presence of `f` in visitentries. If
1262 # based on the presence of `f` in visitentries. If
1255 # visitchildrenset returned a set, we can always skip the
1263 # visitchildrenset returned a set, we can always skip the
1256 # entries *not* in the set it provided regardless of whether
1264 # entries *not* in the set it provided regardless of whether
1257 # they're actually a file or a directory.
1265 # they're actually a file or a directory.
1258 if visitentries and f not in visitentries:
1266 if visitentries and f not in visitentries:
1259 continue
1267 continue
1260 if normalizefile:
1268 if normalizefile:
1261 # even though f might be a directory, we're only
1269 # even though f might be a directory, we're only
1262 # interested in comparing it to files currently in the
1270 # interested in comparing it to files currently in the
1263 # dmap -- therefore normalizefile is enough
1271 # dmap -- therefore normalizefile is enough
1264 nf = normalizefile(
1272 nf = normalizefile(
1265 nd and (nd + b"/" + f) or f, True, True
1273 nd and (nd + b"/" + f) or f, True, True
1266 )
1274 )
1267 else:
1275 else:
1268 nf = nd and (nd + b"/" + f) or f
1276 nf = nd and (nd + b"/" + f) or f
1269 if nf not in results:
1277 if nf not in results:
1270 if kind == dirkind:
1278 if kind == dirkind:
1271 if not ignore(nf):
1279 if not ignore(nf):
1272 if matchtdir:
1280 if matchtdir:
1273 matchtdir(nf)
1281 matchtdir(nf)
1274 wadd(nf)
1282 wadd(nf)
1275 if nf in dmap and (matchalways or matchfn(nf)):
1283 if nf in dmap and (matchalways or matchfn(nf)):
1276 results[nf] = None
1284 results[nf] = None
1277 elif kind == regkind or kind == lnkkind:
1285 elif kind == regkind or kind == lnkkind:
1278 if nf in dmap:
1286 if nf in dmap:
1279 if matchalways or matchfn(nf):
1287 if matchalways or matchfn(nf):
1280 results[nf] = st
1288 results[nf] = st
1281 elif (matchalways or matchfn(nf)) and not ignore(
1289 elif (matchalways or matchfn(nf)) and not ignore(
1282 nf
1290 nf
1283 ):
1291 ):
1284 # unknown file -- normalize if necessary
1292 # unknown file -- normalize if necessary
1285 if not alreadynormed:
1293 if not alreadynormed:
1286 nf = normalize(nf, False, True)
1294 nf = normalize(nf, False, True)
1287 results[nf] = st
1295 results[nf] = st
1288 elif nf in dmap and (matchalways or matchfn(nf)):
1296 elif nf in dmap and (matchalways or matchfn(nf)):
1289 results[nf] = None
1297 results[nf] = None
1290
1298
1291 for nd, d in work:
1299 for nd, d in work:
1292 # alreadynormed means that processwork doesn't have to do any
1300 # alreadynormed means that processwork doesn't have to do any
1293 # expensive directory normalization
1301 # expensive directory normalization
1294 alreadynormed = not normalize or nd == d
1302 alreadynormed = not normalize or nd == d
1295 traverse([d], alreadynormed)
1303 traverse([d], alreadynormed)
1296
1304
1297 for s in subrepos:
1305 for s in subrepos:
1298 del results[s]
1306 del results[s]
1299 del results[b'.hg']
1307 del results[b'.hg']
1300
1308
1301 # step 3: visit remaining files from dmap
1309 # step 3: visit remaining files from dmap
1302 if not skipstep3 and not exact:
1310 if not skipstep3 and not exact:
1303 # If a dmap file is not in results yet, it was either
1311 # If a dmap file is not in results yet, it was either
1304 # a) not matching matchfn b) ignored, c) missing, or d) under a
1312 # a) not matching matchfn b) ignored, c) missing, or d) under a
1305 # symlink directory.
1313 # symlink directory.
1306 if not results and matchalways:
1314 if not results and matchalways:
1307 visit = [f for f in dmap]
1315 visit = [f for f in dmap]
1308 else:
1316 else:
1309 visit = [f for f in dmap if f not in results and matchfn(f)]
1317 visit = [f for f in dmap if f not in results and matchfn(f)]
1310 visit.sort()
1318 visit.sort()
1311
1319
1312 if unknown:
1320 if unknown:
1313 # unknown == True means we walked all dirs under the roots
1321 # unknown == True means we walked all dirs under the roots
1314 # that wasn't ignored, and everything that matched was stat'ed
1322 # that wasn't ignored, and everything that matched was stat'ed
1315 # and is already in results.
1323 # and is already in results.
1316 # The rest must thus be ignored or under a symlink.
1324 # The rest must thus be ignored or under a symlink.
1317 audit_path = pathutil.pathauditor(self._root, cached=True)
1325 audit_path = pathutil.pathauditor(self._root, cached=True)
1318
1326
1319 for nf in iter(visit):
1327 for nf in iter(visit):
1320 # If a stat for the same file was already added with a
1328 # If a stat for the same file was already added with a
1321 # different case, don't add one for this, since that would
1329 # different case, don't add one for this, since that would
1322 # make it appear as if the file exists under both names
1330 # make it appear as if the file exists under both names
1323 # on disk.
1331 # on disk.
1324 if (
1332 if (
1325 normalizefile
1333 normalizefile
1326 and normalizefile(nf, True, True) in results
1334 and normalizefile(nf, True, True) in results
1327 ):
1335 ):
1328 results[nf] = None
1336 results[nf] = None
1329 # Report ignored items in the dmap as long as they are not
1337 # Report ignored items in the dmap as long as they are not
1330 # under a symlink directory.
1338 # under a symlink directory.
1331 elif audit_path.check(nf):
1339 elif audit_path.check(nf):
1332 try:
1340 try:
1333 results[nf] = lstat(join(nf))
1341 results[nf] = lstat(join(nf))
1334 # file was just ignored, no links, and exists
1342 # file was just ignored, no links, and exists
1335 except OSError:
1343 except OSError:
1336 # file doesn't exist
1344 # file doesn't exist
1337 results[nf] = None
1345 results[nf] = None
1338 else:
1346 else:
1339 # It's either missing or under a symlink directory
1347 # It's either missing or under a symlink directory
1340 # which we in this case report as missing
1348 # which we in this case report as missing
1341 results[nf] = None
1349 results[nf] = None
1342 else:
1350 else:
1343 # We may not have walked the full directory tree above,
1351 # We may not have walked the full directory tree above,
1344 # so stat and check everything we missed.
1352 # so stat and check everything we missed.
1345 iv = iter(visit)
1353 iv = iter(visit)
1346 for st in util.statfiles([join(i) for i in visit]):
1354 for st in util.statfiles([join(i) for i in visit]):
1347 results[next(iv)] = st
1355 results[next(iv)] = st
1348 return results
1356 return results
1349
1357
1350 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1358 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1351 if self._sparsematchfn is not None:
1359 if self._sparsematchfn is not None:
1352 em = matchmod.exact(matcher.files())
1360 em = matchmod.exact(matcher.files())
1353 sm = matchmod.unionmatcher([self._sparsematcher, em])
1361 sm = matchmod.unionmatcher([self._sparsematcher, em])
1354 matcher = matchmod.intersectmatchers(matcher, sm)
1362 matcher = matchmod.intersectmatchers(matcher, sm)
1355 # Force Rayon (Rust parallelism library) to respect the number of
1363 # Force Rayon (Rust parallelism library) to respect the number of
1356 # workers. This is a temporary workaround until Rust code knows
1364 # workers. This is a temporary workaround until Rust code knows
1357 # how to read the config file.
1365 # how to read the config file.
1358 numcpus = self._ui.configint(b"worker", b"numcpus")
1366 numcpus = self._ui.configint(b"worker", b"numcpus")
1359 if numcpus is not None:
1367 if numcpus is not None:
1360 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1368 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1361
1369
1362 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1370 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1363 if not workers_enabled:
1371 if not workers_enabled:
1364 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1372 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1365
1373
1366 (
1374 (
1367 lookup,
1375 lookup,
1368 modified,
1376 modified,
1369 added,
1377 added,
1370 removed,
1378 removed,
1371 deleted,
1379 deleted,
1372 clean,
1380 clean,
1373 ignored,
1381 ignored,
1374 unknown,
1382 unknown,
1375 warnings,
1383 warnings,
1376 bad,
1384 bad,
1377 traversed,
1385 traversed,
1378 dirty,
1386 dirty,
1379 ) = rustmod.status(
1387 ) = rustmod.status(
1380 self._map._map,
1388 self._map._map,
1381 matcher,
1389 matcher,
1382 self._rootdir,
1390 self._rootdir,
1383 self._ignorefiles(),
1391 self._ignorefiles(),
1384 self._checkexec,
1392 self._checkexec,
1385 bool(list_clean),
1393 bool(list_clean),
1386 bool(list_ignored),
1394 bool(list_ignored),
1387 bool(list_unknown),
1395 bool(list_unknown),
1388 bool(matcher.traversedir),
1396 bool(matcher.traversedir),
1389 )
1397 )
1390
1398
1391 self._dirty |= dirty
1399 self._dirty |= dirty
1392
1400
1393 if matcher.traversedir:
1401 if matcher.traversedir:
1394 for dir in traversed:
1402 for dir in traversed:
1395 matcher.traversedir(dir)
1403 matcher.traversedir(dir)
1396
1404
1397 if self._ui.warn:
1405 if self._ui.warn:
1398 for item in warnings:
1406 for item in warnings:
1399 if isinstance(item, tuple):
1407 if isinstance(item, tuple):
1400 file_path, syntax = item
1408 file_path, syntax = item
1401 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1409 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1402 file_path,
1410 file_path,
1403 syntax,
1411 syntax,
1404 )
1412 )
1405 self._ui.warn(msg)
1413 self._ui.warn(msg)
1406 else:
1414 else:
1407 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1415 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1408 self._ui.warn(
1416 self._ui.warn(
1409 msg
1417 msg
1410 % (
1418 % (
1411 pathutil.canonpath(
1419 pathutil.canonpath(
1412 self._rootdir, self._rootdir, item
1420 self._rootdir, self._rootdir, item
1413 ),
1421 ),
1414 b"No such file or directory",
1422 b"No such file or directory",
1415 )
1423 )
1416 )
1424 )
1417
1425
1418 for fn, message in bad:
1426 for fn, message in bad:
1419 matcher.bad(fn, encoding.strtolocal(message))
1427 matcher.bad(fn, encoding.strtolocal(message))
1420
1428
1421 status = scmutil.status(
1429 status = scmutil.status(
1422 modified=modified,
1430 modified=modified,
1423 added=added,
1431 added=added,
1424 removed=removed,
1432 removed=removed,
1425 deleted=deleted,
1433 deleted=deleted,
1426 unknown=unknown,
1434 unknown=unknown,
1427 ignored=ignored,
1435 ignored=ignored,
1428 clean=clean,
1436 clean=clean,
1429 )
1437 )
1430 return (lookup, status)
1438 return (lookup, status)
1431
1439
1432 # XXX since this can make the dirstate dirty (through rust), we should
1440 # XXX since this can make the dirstate dirty (through rust), we should
1433 # enforce that it is done withing an appropriate change-context that scope
1441 # enforce that it is done withing an appropriate change-context that scope
1434 # the change and ensure it eventually get written on disk (or rolled back)
1442 # the change and ensure it eventually get written on disk (or rolled back)
1435 def status(self, match, subrepos, ignored, clean, unknown):
1443 def status(self, match, subrepos, ignored, clean, unknown):
1436 """Determine the status of the working copy relative to the
1444 """Determine the status of the working copy relative to the
1437 dirstate and return a pair of (unsure, status), where status is of type
1445 dirstate and return a pair of (unsure, status), where status is of type
1438 scmutil.status and:
1446 scmutil.status and:
1439
1447
1440 unsure:
1448 unsure:
1441 files that might have been modified since the dirstate was
1449 files that might have been modified since the dirstate was
1442 written, but need to be read to be sure (size is the same
1450 written, but need to be read to be sure (size is the same
1443 but mtime differs)
1451 but mtime differs)
1444 status.modified:
1452 status.modified:
1445 files that have definitely been modified since the dirstate
1453 files that have definitely been modified since the dirstate
1446 was written (different size or mode)
1454 was written (different size or mode)
1447 status.clean:
1455 status.clean:
1448 files that have definitely not been modified since the
1456 files that have definitely not been modified since the
1449 dirstate was written
1457 dirstate was written
1450 """
1458 """
1451 listignored, listclean, listunknown = ignored, clean, unknown
1459 listignored, listclean, listunknown = ignored, clean, unknown
1452 lookup, modified, added, unknown, ignored = [], [], [], [], []
1460 lookup, modified, added, unknown, ignored = [], [], [], [], []
1453 removed, deleted, clean = [], [], []
1461 removed, deleted, clean = [], [], []
1454
1462
1455 dmap = self._map
1463 dmap = self._map
1456 dmap.preload()
1464 dmap.preload()
1457
1465
1458 use_rust = True
1466 use_rust = True
1459
1467
1460 allowed_matchers = (
1468 allowed_matchers = (
1461 matchmod.alwaysmatcher,
1469 matchmod.alwaysmatcher,
1462 matchmod.differencematcher,
1470 matchmod.differencematcher,
1463 matchmod.exactmatcher,
1471 matchmod.exactmatcher,
1464 matchmod.includematcher,
1472 matchmod.includematcher,
1465 matchmod.intersectionmatcher,
1473 matchmod.intersectionmatcher,
1466 matchmod.nevermatcher,
1474 matchmod.nevermatcher,
1467 matchmod.unionmatcher,
1475 matchmod.unionmatcher,
1468 )
1476 )
1469
1477
1470 if rustmod is None:
1478 if rustmod is None:
1471 use_rust = False
1479 use_rust = False
1472 elif self._checkcase:
1480 elif self._checkcase:
1473 # Case-insensitive filesystems are not handled yet
1481 # Case-insensitive filesystems are not handled yet
1474 use_rust = False
1482 use_rust = False
1475 elif subrepos:
1483 elif subrepos:
1476 use_rust = False
1484 use_rust = False
1477 elif not isinstance(match, allowed_matchers):
1485 elif not isinstance(match, allowed_matchers):
1478 # Some matchers have yet to be implemented
1486 # Some matchers have yet to be implemented
1479 use_rust = False
1487 use_rust = False
1480
1488
1481 # Get the time from the filesystem so we can disambiguate files that
1489 # Get the time from the filesystem so we can disambiguate files that
1482 # appear modified in the present or future.
1490 # appear modified in the present or future.
1483 try:
1491 try:
1484 mtime_boundary = timestamp.get_fs_now(self._opener)
1492 mtime_boundary = timestamp.get_fs_now(self._opener)
1485 except OSError:
1493 except OSError:
1486 # In largefiles or readonly context
1494 # In largefiles or readonly context
1487 mtime_boundary = None
1495 mtime_boundary = None
1488
1496
1489 if use_rust:
1497 if use_rust:
1490 try:
1498 try:
1491 res = self._rust_status(
1499 res = self._rust_status(
1492 match, listclean, listignored, listunknown
1500 match, listclean, listignored, listunknown
1493 )
1501 )
1494 return res + (mtime_boundary,)
1502 return res + (mtime_boundary,)
1495 except rustmod.FallbackError:
1503 except rustmod.FallbackError:
1496 pass
1504 pass
1497
1505
1498 def noop(f):
1506 def noop(f):
1499 pass
1507 pass
1500
1508
1501 dcontains = dmap.__contains__
1509 dcontains = dmap.__contains__
1502 dget = dmap.__getitem__
1510 dget = dmap.__getitem__
1503 ladd = lookup.append # aka "unsure"
1511 ladd = lookup.append # aka "unsure"
1504 madd = modified.append
1512 madd = modified.append
1505 aadd = added.append
1513 aadd = added.append
1506 uadd = unknown.append if listunknown else noop
1514 uadd = unknown.append if listunknown else noop
1507 iadd = ignored.append if listignored else noop
1515 iadd = ignored.append if listignored else noop
1508 radd = removed.append
1516 radd = removed.append
1509 dadd = deleted.append
1517 dadd = deleted.append
1510 cadd = clean.append if listclean else noop
1518 cadd = clean.append if listclean else noop
1511 mexact = match.exact
1519 mexact = match.exact
1512 dirignore = self._dirignore
1520 dirignore = self._dirignore
1513 checkexec = self._checkexec
1521 checkexec = self._checkexec
1514 checklink = self._checklink
1522 checklink = self._checklink
1515 copymap = self._map.copymap
1523 copymap = self._map.copymap
1516
1524
1517 # We need to do full walks when either
1525 # We need to do full walks when either
1518 # - we're listing all clean files, or
1526 # - we're listing all clean files, or
1519 # - match.traversedir does something, because match.traversedir should
1527 # - match.traversedir does something, because match.traversedir should
1520 # be called for every dir in the working dir
1528 # be called for every dir in the working dir
1521 full = listclean or match.traversedir is not None
1529 full = listclean or match.traversedir is not None
1522 for fn, st in self.walk(
1530 for fn, st in self.walk(
1523 match, subrepos, listunknown, listignored, full=full
1531 match, subrepos, listunknown, listignored, full=full
1524 ).items():
1532 ).items():
1525 if not dcontains(fn):
1533 if not dcontains(fn):
1526 if (listignored or mexact(fn)) and dirignore(fn):
1534 if (listignored or mexact(fn)) and dirignore(fn):
1527 if listignored:
1535 if listignored:
1528 iadd(fn)
1536 iadd(fn)
1529 else:
1537 else:
1530 uadd(fn)
1538 uadd(fn)
1531 continue
1539 continue
1532
1540
1533 t = dget(fn)
1541 t = dget(fn)
1534 mode = t.mode
1542 mode = t.mode
1535 size = t.size
1543 size = t.size
1536
1544
1537 if not st and t.tracked:
1545 if not st and t.tracked:
1538 dadd(fn)
1546 dadd(fn)
1539 elif t.p2_info:
1547 elif t.p2_info:
1540 madd(fn)
1548 madd(fn)
1541 elif t.added:
1549 elif t.added:
1542 aadd(fn)
1550 aadd(fn)
1543 elif t.removed:
1551 elif t.removed:
1544 radd(fn)
1552 radd(fn)
1545 elif t.tracked:
1553 elif t.tracked:
1546 if not checklink and t.has_fallback_symlink:
1554 if not checklink and t.has_fallback_symlink:
1547 # If the file system does not support symlink, the mode
1555 # If the file system does not support symlink, the mode
1548 # might not be correctly stored in the dirstate, so do not
1556 # might not be correctly stored in the dirstate, so do not
1549 # trust it.
1557 # trust it.
1550 ladd(fn)
1558 ladd(fn)
1551 elif not checkexec and t.has_fallback_exec:
1559 elif not checkexec and t.has_fallback_exec:
1552 # If the file system does not support exec bits, the mode
1560 # If the file system does not support exec bits, the mode
1553 # might not be correctly stored in the dirstate, so do not
1561 # might not be correctly stored in the dirstate, so do not
1554 # trust it.
1562 # trust it.
1555 ladd(fn)
1563 ladd(fn)
1556 elif (
1564 elif (
1557 size >= 0
1565 size >= 0
1558 and (
1566 and (
1559 (size != st.st_size and size != st.st_size & _rangemask)
1567 (size != st.st_size and size != st.st_size & _rangemask)
1560 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1568 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1561 )
1569 )
1562 or fn in copymap
1570 or fn in copymap
1563 ):
1571 ):
1564 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1572 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1565 # issue6456: Size returned may be longer due to
1573 # issue6456: Size returned may be longer due to
1566 # encryption on EXT-4 fscrypt, undecided.
1574 # encryption on EXT-4 fscrypt, undecided.
1567 ladd(fn)
1575 ladd(fn)
1568 else:
1576 else:
1569 madd(fn)
1577 madd(fn)
1570 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1578 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1571 # There might be a change in the future if for example the
1579 # There might be a change in the future if for example the
1572 # internal clock is off, but this is a case where the issues
1580 # internal clock is off, but this is a case where the issues
1573 # the user would face would be a lot worse and there is
1581 # the user would face would be a lot worse and there is
1574 # nothing we can really do.
1582 # nothing we can really do.
1575 ladd(fn)
1583 ladd(fn)
1576 elif listclean:
1584 elif listclean:
1577 cadd(fn)
1585 cadd(fn)
1578 status = scmutil.status(
1586 status = scmutil.status(
1579 modified, added, removed, deleted, unknown, ignored, clean
1587 modified, added, removed, deleted, unknown, ignored, clean
1580 )
1588 )
1581 return (lookup, status, mtime_boundary)
1589 return (lookup, status, mtime_boundary)
1582
1590
1583 def matches(self, match):
1591 def matches(self, match):
1584 """
1592 """
1585 return files in the dirstate (in whatever state) filtered by match
1593 return files in the dirstate (in whatever state) filtered by match
1586 """
1594 """
1587 dmap = self._map
1595 dmap = self._map
1588 if rustmod is not None:
1596 if rustmod is not None:
1589 dmap = self._map._map
1597 dmap = self._map._map
1590
1598
1591 if match.always():
1599 if match.always():
1592 return dmap.keys()
1600 return dmap.keys()
1593 files = match.files()
1601 files = match.files()
1594 if match.isexact():
1602 if match.isexact():
1595 # fast path -- filter the other way around, since typically files is
1603 # fast path -- filter the other way around, since typically files is
1596 # much smaller than dmap
1604 # much smaller than dmap
1597 return [f for f in files if f in dmap]
1605 return [f for f in files if f in dmap]
1598 if match.prefix() and all(fn in dmap for fn in files):
1606 if match.prefix() and all(fn in dmap for fn in files):
1599 # fast path -- all the values are known to be files, so just return
1607 # fast path -- all the values are known to be files, so just return
1600 # that
1608 # that
1601 return list(files)
1609 return list(files)
1602 return [f for f in dmap if match(f)]
1610 return [f for f in dmap if match(f)]
1603
1611
1604 def _actualfilename(self, tr):
1612 def _actualfilename(self, tr):
1605 if tr:
1613 if tr:
1606 return self._pendingfilename
1614 return self._pendingfilename
1607 else:
1615 else:
1608 return self._filename
1616 return self._filename
1609
1617
1610 def data_backup_filename(self, backupname):
1618 def data_backup_filename(self, backupname):
1611 if not self._use_dirstate_v2:
1619 if not self._use_dirstate_v2:
1612 return None
1620 return None
1613 return backupname + b'.v2-data'
1621 return backupname + b'.v2-data'
1614
1622
1615 def _new_backup_data_filename(self, backupname):
1623 def _new_backup_data_filename(self, backupname):
1616 """return a filename to backup a data-file or None"""
1624 """return a filename to backup a data-file or None"""
1617 if not self._use_dirstate_v2:
1625 if not self._use_dirstate_v2:
1618 return None
1626 return None
1619 if self._map.docket.uuid is None:
1627 if self._map.docket.uuid is None:
1620 # not created yet, nothing to backup
1628 # not created yet, nothing to backup
1621 return None
1629 return None
1622 data_filename = self._map.docket.data_filename()
1630 data_filename = self._map.docket.data_filename()
1623 return data_filename, self.data_backup_filename(backupname)
1631 return data_filename, self.data_backup_filename(backupname)
1624
1632
1625 def backup_data_file(self, backupname):
1633 def backup_data_file(self, backupname):
1626 if not self._use_dirstate_v2:
1634 if not self._use_dirstate_v2:
1627 return None
1635 return None
1628 docket = docketmod.DirstateDocket.parse(
1636 docket = docketmod.DirstateDocket.parse(
1629 self._opener.read(backupname),
1637 self._opener.read(backupname),
1630 self._nodeconstants,
1638 self._nodeconstants,
1631 )
1639 )
1632 return self.data_backup_filename(backupname), docket.data_filename()
1640 return self.data_backup_filename(backupname), docket.data_filename()
1633
1641
1634 def savebackup(self, tr, backupname):
1642 def savebackup(self, tr, backupname):
1635 '''Save current dirstate into backup file'''
1643 '''Save current dirstate into backup file'''
1636 filename = self._actualfilename(tr)
1644 filename = self._actualfilename(tr)
1637 assert backupname != filename
1645 assert backupname != filename
1638
1646
1639 # use '_writedirstate' instead of 'write' to write changes certainly,
1647 # use '_writedirstate' instead of 'write' to write changes certainly,
1640 # because the latter omits writing out if transaction is running.
1648 # because the latter omits writing out if transaction is running.
1641 # output file will be used to create backup of dirstate at this point.
1649 # output file will be used to create backup of dirstate at this point.
1642 if self._dirty:
1650 if self._dirty:
1643 self._writedirstate(
1651 self._writedirstate(
1644 tr,
1652 tr,
1645 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1653 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1646 )
1654 )
1647
1655
1648 if tr:
1656 if tr:
1649 # ensure that subsequent tr.writepending returns True for
1657 # ensure that subsequent tr.writepending returns True for
1650 # changes written out above, even if dirstate is never
1658 # changes written out above, even if dirstate is never
1651 # changed after this
1659 # changed after this
1652 tr.addfilegenerator(
1660 tr.addfilegenerator(
1653 b'dirstate-1-main',
1661 b'dirstate-1-main',
1654 (self._filename,),
1662 (self._filename,),
1655 lambda f: self._writedirstate(tr, f),
1663 lambda f: self._writedirstate(tr, f),
1656 location=b'plain',
1664 location=b'plain',
1657 post_finalize=True,
1665 post_finalize=True,
1658 )
1666 )
1659
1667
1660 self._opener.tryunlink(backupname)
1668 self._opener.tryunlink(backupname)
1661 if self._opener.exists(filename):
1669 if self._opener.exists(filename):
1662 # hardlink backup is okay because _writedirstate is always called
1670 # hardlink backup is okay because _writedirstate is always called
1663 # with an "atomictemp=True" file.
1671 # with an "atomictemp=True" file.
1664 util.copyfile(
1672 util.copyfile(
1665 self._opener.join(filename),
1673 self._opener.join(filename),
1666 self._opener.join(backupname),
1674 self._opener.join(backupname),
1667 hardlink=True,
1675 hardlink=True,
1668 )
1676 )
1669 data_pair = self._new_backup_data_filename(backupname)
1677 data_pair = self._new_backup_data_filename(backupname)
1670 if data_pair is not None:
1678 if data_pair is not None:
1671 data_filename, bck_data_filename = data_pair
1679 data_filename, bck_data_filename = data_pair
1672 util.copyfile(
1680 util.copyfile(
1673 self._opener.join(data_filename),
1681 self._opener.join(data_filename),
1674 self._opener.join(bck_data_filename),
1682 self._opener.join(bck_data_filename),
1675 hardlink=True,
1683 hardlink=True,
1676 )
1684 )
1677 if tr is not None:
1685 if tr is not None:
1678 # ensure that pending file written above is unlinked at
1686 # ensure that pending file written above is unlinked at
1679 # failure, even if tr.writepending isn't invoked until the
1687 # failure, even if tr.writepending isn't invoked until the
1680 # end of this transaction
1688 # end of this transaction
1681 tr.registertmp(bck_data_filename, location=b'plain')
1689 tr.registertmp(bck_data_filename, location=b'plain')
1682
1690
1683 def restorebackup(self, tr, backupname):
1691 def restorebackup(self, tr, backupname):
1684 '''Restore dirstate by backup file'''
1692 '''Restore dirstate by backup file'''
1685 # this "invalidate()" prevents "wlock.release()" from writing
1693 # this "invalidate()" prevents "wlock.release()" from writing
1686 # changes of dirstate out after restoring from backup file
1694 # changes of dirstate out after restoring from backup file
1687 self.invalidate()
1695 self.invalidate()
1688 o = self._opener
1696 o = self._opener
1689 if not o.exists(backupname):
1697 if not o.exists(backupname):
1690 # there was no file backup, delete existing files
1698 # there was no file backup, delete existing files
1691 filename = self._actualfilename(tr)
1699 filename = self._actualfilename(tr)
1692 data_file = None
1700 data_file = None
1693 if self._use_dirstate_v2 and self._map.docket.uuid is not None:
1701 if self._use_dirstate_v2 and self._map.docket.uuid is not None:
1694 data_file = self._map.docket.data_filename()
1702 data_file = self._map.docket.data_filename()
1695 if o.exists(filename):
1703 if o.exists(filename):
1696 o.unlink(filename)
1704 o.unlink(filename)
1697 if data_file is not None and o.exists(data_file):
1705 if data_file is not None and o.exists(data_file):
1698 o.unlink(data_file)
1706 o.unlink(data_file)
1699 return
1707 return
1700 filename = self._actualfilename(tr)
1708 filename = self._actualfilename(tr)
1701 data_pair = self.backup_data_file(backupname)
1709 data_pair = self.backup_data_file(backupname)
1702 if o.exists(filename) and util.samefile(
1710 if o.exists(filename) and util.samefile(
1703 o.join(backupname), o.join(filename)
1711 o.join(backupname), o.join(filename)
1704 ):
1712 ):
1705 o.unlink(backupname)
1713 o.unlink(backupname)
1706 else:
1714 else:
1707 o.rename(backupname, filename, checkambig=True)
1715 o.rename(backupname, filename, checkambig=True)
1708
1716
1709 if data_pair is not None:
1717 if data_pair is not None:
1710 data_backup, target = data_pair
1718 data_backup, target = data_pair
1711 if o.exists(target) and util.samefile(
1719 if o.exists(target) and util.samefile(
1712 o.join(data_backup), o.join(target)
1720 o.join(data_backup), o.join(target)
1713 ):
1721 ):
1714 o.unlink(data_backup)
1722 o.unlink(data_backup)
1715 else:
1723 else:
1716 o.rename(data_backup, target, checkambig=True)
1724 o.rename(data_backup, target, checkambig=True)
1717
1725
1718 def clearbackup(self, tr, backupname):
1726 def clearbackup(self, tr, backupname):
1719 '''Clear backup file'''
1727 '''Clear backup file'''
1720 o = self._opener
1728 o = self._opener
1721 if o.exists(backupname):
1729 if o.exists(backupname):
1722 data_backup = self.backup_data_file(backupname)
1730 data_backup = self.backup_data_file(backupname)
1723 o.unlink(backupname)
1731 o.unlink(backupname)
1724 if data_backup is not None:
1732 if data_backup is not None:
1725 o.unlink(data_backup[0])
1733 o.unlink(data_backup[0])
1726
1734
1727 def verify(self, m1, m2, p1, narrow_matcher=None):
1735 def verify(self, m1, m2, p1, narrow_matcher=None):
1728 """
1736 """
1729 check the dirstate contents against the parent manifest and yield errors
1737 check the dirstate contents against the parent manifest and yield errors
1730 """
1738 """
1731 missing_from_p1 = _(
1739 missing_from_p1 = _(
1732 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1740 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1733 )
1741 )
1734 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1742 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1735 missing_from_ps = _(
1743 missing_from_ps = _(
1736 b"%s marked as modified, but not in either manifest\n"
1744 b"%s marked as modified, but not in either manifest\n"
1737 )
1745 )
1738 missing_from_ds = _(
1746 missing_from_ds = _(
1739 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1747 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1740 )
1748 )
1741 for f, entry in self.items():
1749 for f, entry in self.items():
1742 if entry.p1_tracked:
1750 if entry.p1_tracked:
1743 if entry.modified and f not in m1 and f not in m2:
1751 if entry.modified and f not in m1 and f not in m2:
1744 yield missing_from_ps % f
1752 yield missing_from_ps % f
1745 elif f not in m1:
1753 elif f not in m1:
1746 yield missing_from_p1 % (f, node.short(p1))
1754 yield missing_from_p1 % (f, node.short(p1))
1747 if entry.added and f in m1:
1755 if entry.added and f in m1:
1748 yield unexpected_in_p1 % f
1756 yield unexpected_in_p1 % f
1749 for f in m1:
1757 for f in m1:
1750 if narrow_matcher is not None and not narrow_matcher(f):
1758 if narrow_matcher is not None and not narrow_matcher(f):
1751 continue
1759 continue
1752 entry = self.get_entry(f)
1760 entry = self.get_entry(f)
1753 if not entry.p1_tracked:
1761 if not entry.p1_tracked:
1754 yield missing_from_ds % (f, node.short(p1))
1762 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now