##// END OF EJS Templates
dirstate: introduce a `hacky_extension_update_file` method...
marmoute -
r50906:016dc238 default
parent child Browse files
Show More
@@ -1,1624 +1,1666 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_changing_parents(func):
69 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.pendingparentchange():
71 if not self.pendingparentchange():
72 msg = 'calling `%s` outside of a changing_parents context'
72 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
77 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
79
79
80 return wrap
80 return wrap
81
81
82
82
83 def requires_not_changing_parents(func):
83 def requires_not_changing_parents(func):
84 def wrap(self, *args, **kwargs):
84 def wrap(self, *args, **kwargs):
85 if self.pendingparentchange():
85 if self.pendingparentchange():
86 msg = 'calling `%s` inside of a changing_parents context'
86 msg = 'calling `%s` inside of a changing_parents context'
87 msg %= func.__name__
87 msg %= func.__name__
88 raise error.ProgrammingError(msg)
88 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
89 return func(self, *args, **kwargs)
90
90
91 return wrap
91 return wrap
92
92
93
93
94 @interfaceutil.implementer(intdirstate.idirstate)
94 @interfaceutil.implementer(intdirstate.idirstate)
95 class dirstate:
95 class dirstate:
96 def __init__(
96 def __init__(
97 self,
97 self,
98 opener,
98 opener,
99 ui,
99 ui,
100 root,
100 root,
101 validate,
101 validate,
102 sparsematchfn,
102 sparsematchfn,
103 nodeconstants,
103 nodeconstants,
104 use_dirstate_v2,
104 use_dirstate_v2,
105 use_tracked_hint=False,
105 use_tracked_hint=False,
106 ):
106 ):
107 """Create a new dirstate object.
107 """Create a new dirstate object.
108
108
109 opener is an open()-like callable that can be used to open the
109 opener is an open()-like callable that can be used to open the
110 dirstate file; root is the root of the directory tracked by
110 dirstate file; root is the root of the directory tracked by
111 the dirstate.
111 the dirstate.
112 """
112 """
113 self._use_dirstate_v2 = use_dirstate_v2
113 self._use_dirstate_v2 = use_dirstate_v2
114 self._use_tracked_hint = use_tracked_hint
114 self._use_tracked_hint = use_tracked_hint
115 self._nodeconstants = nodeconstants
115 self._nodeconstants = nodeconstants
116 self._opener = opener
116 self._opener = opener
117 self._validate = validate
117 self._validate = validate
118 self._root = root
118 self._root = root
119 # Either build a sparse-matcher or None if sparse is disabled
119 # Either build a sparse-matcher or None if sparse is disabled
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 # True is any internal state may be different
124 # True is any internal state may be different
125 self._dirty = False
125 self._dirty = False
126 # True if the set of tracked file may be different
126 # True if the set of tracked file may be different
127 self._dirty_tracked_set = False
127 self._dirty_tracked_set = False
128 self._ui = ui
128 self._ui = ui
129 self._filecache = {}
129 self._filecache = {}
130 # nesting level of `changing_parents` context
130 # nesting level of `changing_parents` context
131 self._parentwriters = 0
131 self._parentwriters = 0
132 # True if the current dirstate changing operations have been
132 # True if the current dirstate changing operations have been
133 # invalidated (used to make sure all nested contexts have been exited)
133 # invalidated (used to make sure all nested contexts have been exited)
134 self._invalidated_context = False
134 self._invalidated_context = False
135 self._filename = b'dirstate'
135 self._filename = b'dirstate'
136 self._filename_th = b'dirstate-tracked-hint'
136 self._filename_th = b'dirstate-tracked-hint'
137 self._pendingfilename = b'%s.pending' % self._filename
137 self._pendingfilename = b'%s.pending' % self._filename
138 self._plchangecallbacks = {}
138 self._plchangecallbacks = {}
139 self._origpl = None
139 self._origpl = None
140 self._mapcls = dirstatemap.dirstatemap
140 self._mapcls = dirstatemap.dirstatemap
141 # Access and cache cwd early, so we don't access it for the first time
141 # Access and cache cwd early, so we don't access it for the first time
142 # after a working-copy update caused it to not exist (accessing it then
142 # after a working-copy update caused it to not exist (accessing it then
143 # raises an exception).
143 # raises an exception).
144 self._cwd
144 self._cwd
145
145
146 def prefetch_parents(self):
146 def prefetch_parents(self):
147 """make sure the parents are loaded
147 """make sure the parents are loaded
148
148
149 Used to avoid a race condition.
149 Used to avoid a race condition.
150 """
150 """
151 self._pl
151 self._pl
152
152
153 @contextlib.contextmanager
153 @contextlib.contextmanager
154 def changing_parents(self, repo):
154 def changing_parents(self, repo):
155 """Context manager for handling dirstate parents.
155 """Context manager for handling dirstate parents.
156
156
157 If an exception occurs in the scope of the context manager,
157 If an exception occurs in the scope of the context manager,
158 the incoherent dirstate won't be written when wlock is
158 the incoherent dirstate won't be written when wlock is
159 released.
159 released.
160 """
160 """
161 if repo.currentwlock() is None:
161 if repo.currentwlock() is None:
162 msg = b"changing parents without holding the wlock"
162 msg = b"changing parents without holding the wlock"
163 raise error.ProgrammingError(msg)
163 raise error.ProgrammingError(msg)
164 if self._invalidated_context:
164 if self._invalidated_context:
165 msg = "trying to use an invalidated dirstate before it has reset"
165 msg = "trying to use an invalidated dirstate before it has reset"
166 raise error.ProgrammingError(msg)
166 raise error.ProgrammingError(msg)
167 self._parentwriters += 1
167 self._parentwriters += 1
168 try:
168 try:
169 yield
169 yield
170 except Exception:
170 except Exception:
171 self.invalidate()
171 self.invalidate()
172 raise
172 raise
173 finally:
173 finally:
174 if self._parentwriters > 0:
174 if self._parentwriters > 0:
175 if self._invalidated_context:
175 if self._invalidated_context:
176 # make sure we invalidate anything an upper context might
176 # make sure we invalidate anything an upper context might
177 # have changed.
177 # have changed.
178 self.invalidate()
178 self.invalidate()
179 self._parentwriters -= 1
179 self._parentwriters -= 1
180 # The invalidation is complete once we exit the final context
180 # The invalidation is complete once we exit the final context
181 # manager
181 # manager
182 if self._parentwriters <= 0:
182 if self._parentwriters <= 0:
183 assert self._parentwriters == 0
183 assert self._parentwriters == 0
184 if self._invalidated_context:
184 if self._invalidated_context:
185 self._invalidated_context = False
185 self._invalidated_context = False
186 else:
186 else:
187 # When an exception occured, `_invalidated_context`
187 # When an exception occured, `_invalidated_context`
188 # would have been set to True by the `invalidate`
188 # would have been set to True by the `invalidate`
189 # call earlier.
189 # call earlier.
190 #
190 #
191 # We don't have more straightforward code, because the
191 # We don't have more straightforward code, because the
192 # Exception catching (and the associated `invalidate`
192 # Exception catching (and the associated `invalidate`
193 # calling) might have been called by a nested context
193 # calling) might have been called by a nested context
194 # instead of the top level one.
194 # instead of the top level one.
195 self.write(repo.currenttransaction())
195 self.write(repo.currenttransaction())
196
196
197 # here to help migration to the new code
197 # here to help migration to the new code
198 def parentchange(self):
198 def parentchange(self):
199 msg = (
199 msg = (
200 "Mercurial 6.4 and later requires call to "
200 "Mercurial 6.4 and later requires call to "
201 "`dirstate.changing_parents(repo)`"
201 "`dirstate.changing_parents(repo)`"
202 )
202 )
203 raise error.ProgrammingError(msg)
203 raise error.ProgrammingError(msg)
204
204
205 def pendingparentchange(self):
205 def pendingparentchange(self):
206 """Returns true if the dirstate is in the middle of a set of changes
206 """Returns true if the dirstate is in the middle of a set of changes
207 that modify the dirstate parent.
207 that modify the dirstate parent.
208 """
208 """
209 return self._parentwriters > 0
209 return self._parentwriters > 0
210
210
211 @propertycache
211 @propertycache
212 def _map(self):
212 def _map(self):
213 """Return the dirstate contents (see documentation for dirstatemap)."""
213 """Return the dirstate contents (see documentation for dirstatemap)."""
214 self._map = self._mapcls(
214 self._map = self._mapcls(
215 self._ui,
215 self._ui,
216 self._opener,
216 self._opener,
217 self._root,
217 self._root,
218 self._nodeconstants,
218 self._nodeconstants,
219 self._use_dirstate_v2,
219 self._use_dirstate_v2,
220 )
220 )
221 return self._map
221 return self._map
222
222
223 @property
223 @property
224 def _sparsematcher(self):
224 def _sparsematcher(self):
225 """The matcher for the sparse checkout.
225 """The matcher for the sparse checkout.
226
226
227 The working directory may not include every file from a manifest. The
227 The working directory may not include every file from a manifest. The
228 matcher obtained by this property will match a path if it is to be
228 matcher obtained by this property will match a path if it is to be
229 included in the working directory.
229 included in the working directory.
230
230
231 When sparse if disabled, return None.
231 When sparse if disabled, return None.
232 """
232 """
233 if self._sparsematchfn is None:
233 if self._sparsematchfn is None:
234 return None
234 return None
235 # TODO there is potential to cache this property. For now, the matcher
235 # TODO there is potential to cache this property. For now, the matcher
236 # is resolved on every access. (But the called function does use a
236 # is resolved on every access. (But the called function does use a
237 # cache to keep the lookup fast.)
237 # cache to keep the lookup fast.)
238 return self._sparsematchfn()
238 return self._sparsematchfn()
239
239
240 @repocache(b'branch')
240 @repocache(b'branch')
241 def _branch(self):
241 def _branch(self):
242 try:
242 try:
243 return self._opener.read(b"branch").strip() or b"default"
243 return self._opener.read(b"branch").strip() or b"default"
244 except FileNotFoundError:
244 except FileNotFoundError:
245 return b"default"
245 return b"default"
246
246
247 @property
247 @property
248 def _pl(self):
248 def _pl(self):
249 return self._map.parents()
249 return self._map.parents()
250
250
251 def hasdir(self, d):
251 def hasdir(self, d):
252 return self._map.hastrackeddir(d)
252 return self._map.hastrackeddir(d)
253
253
254 @rootcache(b'.hgignore')
254 @rootcache(b'.hgignore')
255 def _ignore(self):
255 def _ignore(self):
256 files = self._ignorefiles()
256 files = self._ignorefiles()
257 if not files:
257 if not files:
258 return matchmod.never()
258 return matchmod.never()
259
259
260 pats = [b'include:%s' % f for f in files]
260 pats = [b'include:%s' % f for f in files]
261 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
261 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
262
262
263 @propertycache
263 @propertycache
264 def _slash(self):
264 def _slash(self):
265 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
265 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
266
266
267 @propertycache
267 @propertycache
268 def _checklink(self):
268 def _checklink(self):
269 return util.checklink(self._root)
269 return util.checklink(self._root)
270
270
271 @propertycache
271 @propertycache
272 def _checkexec(self):
272 def _checkexec(self):
273 return bool(util.checkexec(self._root))
273 return bool(util.checkexec(self._root))
274
274
275 @propertycache
275 @propertycache
276 def _checkcase(self):
276 def _checkcase(self):
277 return not util.fscasesensitive(self._join(b'.hg'))
277 return not util.fscasesensitive(self._join(b'.hg'))
278
278
279 def _join(self, f):
279 def _join(self, f):
280 # much faster than os.path.join()
280 # much faster than os.path.join()
281 # it's safe because f is always a relative path
281 # it's safe because f is always a relative path
282 return self._rootdir + f
282 return self._rootdir + f
283
283
284 def flagfunc(self, buildfallback):
284 def flagfunc(self, buildfallback):
285 """build a callable that returns flags associated with a filename
285 """build a callable that returns flags associated with a filename
286
286
287 The information is extracted from three possible layers:
287 The information is extracted from three possible layers:
288 1. the file system if it supports the information
288 1. the file system if it supports the information
289 2. the "fallback" information stored in the dirstate if any
289 2. the "fallback" information stored in the dirstate if any
290 3. a more expensive mechanism inferring the flags from the parents.
290 3. a more expensive mechanism inferring the flags from the parents.
291 """
291 """
292
292
293 # small hack to cache the result of buildfallback()
293 # small hack to cache the result of buildfallback()
294 fallback_func = []
294 fallback_func = []
295
295
296 def get_flags(x):
296 def get_flags(x):
297 entry = None
297 entry = None
298 fallback_value = None
298 fallback_value = None
299 try:
299 try:
300 st = os.lstat(self._join(x))
300 st = os.lstat(self._join(x))
301 except OSError:
301 except OSError:
302 return b''
302 return b''
303
303
304 if self._checklink:
304 if self._checklink:
305 if util.statislink(st):
305 if util.statislink(st):
306 return b'l'
306 return b'l'
307 else:
307 else:
308 entry = self.get_entry(x)
308 entry = self.get_entry(x)
309 if entry.has_fallback_symlink:
309 if entry.has_fallback_symlink:
310 if entry.fallback_symlink:
310 if entry.fallback_symlink:
311 return b'l'
311 return b'l'
312 else:
312 else:
313 if not fallback_func:
313 if not fallback_func:
314 fallback_func.append(buildfallback())
314 fallback_func.append(buildfallback())
315 fallback_value = fallback_func[0](x)
315 fallback_value = fallback_func[0](x)
316 if b'l' in fallback_value:
316 if b'l' in fallback_value:
317 return b'l'
317 return b'l'
318
318
319 if self._checkexec:
319 if self._checkexec:
320 if util.statisexec(st):
320 if util.statisexec(st):
321 return b'x'
321 return b'x'
322 else:
322 else:
323 if entry is None:
323 if entry is None:
324 entry = self.get_entry(x)
324 entry = self.get_entry(x)
325 if entry.has_fallback_exec:
325 if entry.has_fallback_exec:
326 if entry.fallback_exec:
326 if entry.fallback_exec:
327 return b'x'
327 return b'x'
328 else:
328 else:
329 if fallback_value is None:
329 if fallback_value is None:
330 if not fallback_func:
330 if not fallback_func:
331 fallback_func.append(buildfallback())
331 fallback_func.append(buildfallback())
332 fallback_value = fallback_func[0](x)
332 fallback_value = fallback_func[0](x)
333 if b'x' in fallback_value:
333 if b'x' in fallback_value:
334 return b'x'
334 return b'x'
335 return b''
335 return b''
336
336
337 return get_flags
337 return get_flags
338
338
339 @propertycache
339 @propertycache
340 def _cwd(self):
340 def _cwd(self):
341 # internal config: ui.forcecwd
341 # internal config: ui.forcecwd
342 forcecwd = self._ui.config(b'ui', b'forcecwd')
342 forcecwd = self._ui.config(b'ui', b'forcecwd')
343 if forcecwd:
343 if forcecwd:
344 return forcecwd
344 return forcecwd
345 return encoding.getcwd()
345 return encoding.getcwd()
346
346
347 def getcwd(self):
347 def getcwd(self):
348 """Return the path from which a canonical path is calculated.
348 """Return the path from which a canonical path is calculated.
349
349
350 This path should be used to resolve file patterns or to convert
350 This path should be used to resolve file patterns or to convert
351 canonical paths back to file paths for display. It shouldn't be
351 canonical paths back to file paths for display. It shouldn't be
352 used to get real file paths. Use vfs functions instead.
352 used to get real file paths. Use vfs functions instead.
353 """
353 """
354 cwd = self._cwd
354 cwd = self._cwd
355 if cwd == self._root:
355 if cwd == self._root:
356 return b''
356 return b''
357 # self._root ends with a path separator if self._root is '/' or 'C:\'
357 # self._root ends with a path separator if self._root is '/' or 'C:\'
358 rootsep = self._root
358 rootsep = self._root
359 if not util.endswithsep(rootsep):
359 if not util.endswithsep(rootsep):
360 rootsep += pycompat.ossep
360 rootsep += pycompat.ossep
361 if cwd.startswith(rootsep):
361 if cwd.startswith(rootsep):
362 return cwd[len(rootsep) :]
362 return cwd[len(rootsep) :]
363 else:
363 else:
364 # we're outside the repo. return an absolute path.
364 # we're outside the repo. return an absolute path.
365 return cwd
365 return cwd
366
366
367 def pathto(self, f, cwd=None):
367 def pathto(self, f, cwd=None):
368 if cwd is None:
368 if cwd is None:
369 cwd = self.getcwd()
369 cwd = self.getcwd()
370 path = util.pathto(self._root, cwd, f)
370 path = util.pathto(self._root, cwd, f)
371 if self._slash:
371 if self._slash:
372 return util.pconvert(path)
372 return util.pconvert(path)
373 return path
373 return path
374
374
375 def get_entry(self, path):
375 def get_entry(self, path):
376 """return a DirstateItem for the associated path"""
376 """return a DirstateItem for the associated path"""
377 entry = self._map.get(path)
377 entry = self._map.get(path)
378 if entry is None:
378 if entry is None:
379 return DirstateItem()
379 return DirstateItem()
380 return entry
380 return entry
381
381
382 def __contains__(self, key):
382 def __contains__(self, key):
383 return key in self._map
383 return key in self._map
384
384
385 def __iter__(self):
385 def __iter__(self):
386 return iter(sorted(self._map))
386 return iter(sorted(self._map))
387
387
388 def items(self):
388 def items(self):
389 return self._map.items()
389 return self._map.items()
390
390
391 iteritems = items
391 iteritems = items
392
392
393 def parents(self):
393 def parents(self):
394 return [self._validate(p) for p in self._pl]
394 return [self._validate(p) for p in self._pl]
395
395
396 def p1(self):
396 def p1(self):
397 return self._validate(self._pl[0])
397 return self._validate(self._pl[0])
398
398
399 def p2(self):
399 def p2(self):
400 return self._validate(self._pl[1])
400 return self._validate(self._pl[1])
401
401
402 @property
402 @property
403 def in_merge(self):
403 def in_merge(self):
404 """True if a merge is in progress"""
404 """True if a merge is in progress"""
405 return self._pl[1] != self._nodeconstants.nullid
405 return self._pl[1] != self._nodeconstants.nullid
406
406
407 def branch(self):
407 def branch(self):
408 return encoding.tolocal(self._branch)
408 return encoding.tolocal(self._branch)
409
409
410 def setparents(self, p1, p2=None):
410 def setparents(self, p1, p2=None):
411 """Set dirstate parents to p1 and p2.
411 """Set dirstate parents to p1 and p2.
412
412
413 When moving from two parents to one, "merged" entries a
413 When moving from two parents to one, "merged" entries a
414 adjusted to normal and previous copy records discarded and
414 adjusted to normal and previous copy records discarded and
415 returned by the call.
415 returned by the call.
416
416
417 See localrepo.setparents()
417 See localrepo.setparents()
418 """
418 """
419 if p2 is None:
419 if p2 is None:
420 p2 = self._nodeconstants.nullid
420 p2 = self._nodeconstants.nullid
421 if self._parentwriters == 0:
421 if self._parentwriters == 0:
422 raise ValueError(
422 raise ValueError(
423 b"cannot set dirstate parent outside of "
423 b"cannot set dirstate parent outside of "
424 b"dirstate.changing_parents context manager"
424 b"dirstate.changing_parents context manager"
425 )
425 )
426
426
427 self._dirty = True
427 self._dirty = True
428 oldp2 = self._pl[1]
428 oldp2 = self._pl[1]
429 if self._origpl is None:
429 if self._origpl is None:
430 self._origpl = self._pl
430 self._origpl = self._pl
431 nullid = self._nodeconstants.nullid
431 nullid = self._nodeconstants.nullid
432 # True if we need to fold p2 related state back to a linear case
432 # True if we need to fold p2 related state back to a linear case
433 fold_p2 = oldp2 != nullid and p2 == nullid
433 fold_p2 = oldp2 != nullid and p2 == nullid
434 return self._map.setparents(p1, p2, fold_p2=fold_p2)
434 return self._map.setparents(p1, p2, fold_p2=fold_p2)
435
435
436 def setbranch(self, branch):
436 def setbranch(self, branch):
437 self.__class__._branch.set(self, encoding.fromlocal(branch))
437 self.__class__._branch.set(self, encoding.fromlocal(branch))
438 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
438 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
439 try:
439 try:
440 f.write(self._branch + b'\n')
440 f.write(self._branch + b'\n')
441 f.close()
441 f.close()
442
442
443 # make sure filecache has the correct stat info for _branch after
443 # make sure filecache has the correct stat info for _branch after
444 # replacing the underlying file
444 # replacing the underlying file
445 ce = self._filecache[b'_branch']
445 ce = self._filecache[b'_branch']
446 if ce:
446 if ce:
447 ce.refresh()
447 ce.refresh()
448 except: # re-raises
448 except: # re-raises
449 f.discard()
449 f.discard()
450 raise
450 raise
451
451
452 def invalidate(self):
452 def invalidate(self):
453 """Causes the next access to reread the dirstate.
453 """Causes the next access to reread the dirstate.
454
454
455 This is different from localrepo.invalidatedirstate() because it always
455 This is different from localrepo.invalidatedirstate() because it always
456 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
456 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
457 check whether the dirstate has changed before rereading it."""
457 check whether the dirstate has changed before rereading it."""
458
458
459 for a in ("_map", "_branch", "_ignore"):
459 for a in ("_map", "_branch", "_ignore"):
460 if a in self.__dict__:
460 if a in self.__dict__:
461 delattr(self, a)
461 delattr(self, a)
462 self._dirty = False
462 self._dirty = False
463 self._dirty_tracked_set = False
463 self._dirty_tracked_set = False
464 self._invalidated_context = self._parentwriters > 0
464 self._invalidated_context = self._parentwriters > 0
465 self._origpl = None
465 self._origpl = None
466
466
467 def copy(self, source, dest):
467 def copy(self, source, dest):
468 """Mark dest as a copy of source. Unmark dest if source is None."""
468 """Mark dest as a copy of source. Unmark dest if source is None."""
469 if source == dest:
469 if source == dest:
470 return
470 return
471 self._dirty = True
471 self._dirty = True
472 if source is not None:
472 if source is not None:
473 self._check_sparse(source)
473 self._check_sparse(source)
474 self._map.copymap[dest] = source
474 self._map.copymap[dest] = source
475 else:
475 else:
476 self._map.copymap.pop(dest, None)
476 self._map.copymap.pop(dest, None)
477
477
478 def copied(self, file):
478 def copied(self, file):
479 return self._map.copymap.get(file, None)
479 return self._map.copymap.get(file, None)
480
480
481 def copies(self):
481 def copies(self):
482 return self._map.copymap
482 return self._map.copymap
483
483
484 @requires_not_changing_parents
484 @requires_not_changing_parents
485 def set_tracked(self, filename, reset_copy=False):
485 def set_tracked(self, filename, reset_copy=False):
486 """a "public" method for generic code to mark a file as tracked
486 """a "public" method for generic code to mark a file as tracked
487
487
488 This function is to be called outside of "update/merge" case. For
488 This function is to be called outside of "update/merge" case. For
489 example by a command like `hg add X`.
489 example by a command like `hg add X`.
490
490
491 if reset_copy is set, any existing copy information will be dropped.
491 if reset_copy is set, any existing copy information will be dropped.
492
492
493 return True the file was previously untracked, False otherwise.
493 return True the file was previously untracked, False otherwise.
494 """
494 """
495 self._dirty = True
495 self._dirty = True
496 entry = self._map.get(filename)
496 entry = self._map.get(filename)
497 if entry is None or not entry.tracked:
497 if entry is None or not entry.tracked:
498 self._check_new_tracked_filename(filename)
498 self._check_new_tracked_filename(filename)
499 pre_tracked = self._map.set_tracked(filename)
499 pre_tracked = self._map.set_tracked(filename)
500 if reset_copy:
500 if reset_copy:
501 self._map.copymap.pop(filename, None)
501 self._map.copymap.pop(filename, None)
502 if pre_tracked:
502 if pre_tracked:
503 self._dirty_tracked_set = True
503 self._dirty_tracked_set = True
504 return pre_tracked
504 return pre_tracked
505
505
506 @requires_not_changing_parents
506 @requires_not_changing_parents
507 def set_untracked(self, filename):
507 def set_untracked(self, filename):
508 """a "public" method for generic code to mark a file as untracked
508 """a "public" method for generic code to mark a file as untracked
509
509
510 This function is to be called outside of "update/merge" case. For
510 This function is to be called outside of "update/merge" case. For
511 example by a command like `hg remove X`.
511 example by a command like `hg remove X`.
512
512
513 return True the file was previously tracked, False otherwise.
513 return True the file was previously tracked, False otherwise.
514 """
514 """
515 ret = self._map.set_untracked(filename)
515 ret = self._map.set_untracked(filename)
516 if ret:
516 if ret:
517 self._dirty = True
517 self._dirty = True
518 self._dirty_tracked_set = True
518 self._dirty_tracked_set = True
519 return ret
519 return ret
520
520
521 @requires_not_changing_parents
521 @requires_not_changing_parents
522 def set_clean(self, filename, parentfiledata):
522 def set_clean(self, filename, parentfiledata):
523 """record that the current state of the file on disk is known to be clean"""
523 """record that the current state of the file on disk is known to be clean"""
524 self._dirty = True
524 self._dirty = True
525 if not self._map[filename].tracked:
525 if not self._map[filename].tracked:
526 self._check_new_tracked_filename(filename)
526 self._check_new_tracked_filename(filename)
527 (mode, size, mtime) = parentfiledata
527 (mode, size, mtime) = parentfiledata
528 self._map.set_clean(filename, mode, size, mtime)
528 self._map.set_clean(filename, mode, size, mtime)
529
529
530 @requires_not_changing_parents
530 @requires_not_changing_parents
531 def set_possibly_dirty(self, filename):
531 def set_possibly_dirty(self, filename):
532 """record that the current state of the file on disk is unknown"""
532 """record that the current state of the file on disk is unknown"""
533 self._dirty = True
533 self._dirty = True
534 self._map.set_possibly_dirty(filename)
534 self._map.set_possibly_dirty(filename)
535
535
536 @requires_changing_parents
536 @requires_changing_parents
537 def update_file_p1(
537 def update_file_p1(
538 self,
538 self,
539 filename,
539 filename,
540 p1_tracked,
540 p1_tracked,
541 ):
541 ):
542 """Set a file as tracked in the parent (or not)
542 """Set a file as tracked in the parent (or not)
543
543
544 This is to be called when adjust the dirstate to a new parent after an history
544 This is to be called when adjust the dirstate to a new parent after an history
545 rewriting operation.
545 rewriting operation.
546
546
547 It should not be called during a merge (p2 != nullid) and only within
547 It should not be called during a merge (p2 != nullid) and only within
548 a `with dirstate.changing_parents(repo):` context.
548 a `with dirstate.changing_parents(repo):` context.
549 """
549 """
550 if self.in_merge:
550 if self.in_merge:
551 msg = b'update_file_reference should not be called when merging'
551 msg = b'update_file_reference should not be called when merging'
552 raise error.ProgrammingError(msg)
552 raise error.ProgrammingError(msg)
553 entry = self._map.get(filename)
553 entry = self._map.get(filename)
554 if entry is None:
554 if entry is None:
555 wc_tracked = False
555 wc_tracked = False
556 else:
556 else:
557 wc_tracked = entry.tracked
557 wc_tracked = entry.tracked
558 if not (p1_tracked or wc_tracked):
558 if not (p1_tracked or wc_tracked):
559 # the file is no longer relevant to anyone
559 # the file is no longer relevant to anyone
560 if self._map.get(filename) is not None:
560 if self._map.get(filename) is not None:
561 self._map.reset_state(filename)
561 self._map.reset_state(filename)
562 self._dirty = True
562 self._dirty = True
563 elif (not p1_tracked) and wc_tracked:
563 elif (not p1_tracked) and wc_tracked:
564 if entry is not None and entry.added:
564 if entry is not None and entry.added:
565 return # avoid dropping copy information (maybe?)
565 return # avoid dropping copy information (maybe?)
566
566
567 self._map.reset_state(
567 self._map.reset_state(
568 filename,
568 filename,
569 wc_tracked,
569 wc_tracked,
570 p1_tracked,
570 p1_tracked,
571 # the underlying reference might have changed, we will have to
571 # the underlying reference might have changed, we will have to
572 # check it.
572 # check it.
573 has_meaningful_mtime=False,
573 has_meaningful_mtime=False,
574 )
574 )
575
575
576 @requires_changing_parents
576 @requires_changing_parents
577 def update_file(
577 def update_file(
578 self,
578 self,
579 filename,
579 filename,
580 wc_tracked,
580 wc_tracked,
581 p1_tracked,
581 p1_tracked,
582 p2_info=False,
582 p2_info=False,
583 possibly_dirty=False,
583 possibly_dirty=False,
584 parentfiledata=None,
584 parentfiledata=None,
585 ):
585 ):
586 """update the information about a file in the dirstate
586 """update the information about a file in the dirstate
587
587
588 This is to be called when the direstates parent changes to keep track
588 This is to be called when the direstates parent changes to keep track
589 of what is the file situation in regards to the working copy and its parent.
589 of what is the file situation in regards to the working copy and its parent.
590
590
591 This function must be called within a `dirstate.changing_parents` context.
591 This function must be called within a `dirstate.changing_parents` context.
592
592
593 note: the API is at an early stage and we might need to adjust it
593 note: the API is at an early stage and we might need to adjust it
594 depending of what information ends up being relevant and useful to
594 depending of what information ends up being relevant and useful to
595 other processing.
595 other processing.
596 """
596 """
597 self._update_file(
598 filename=filename,
599 wc_tracked=wc_tracked,
600 p1_tracked=p1_tracked,
601 p2_info=p2_info,
602 possibly_dirty=possibly_dirty,
603 parentfiledata=parentfiledata,
604 )
605
606 def hacky_extension_update_file(self, *args, **kwargs):
607 """NEVER USE THIS, YOU DO NOT NEED IT
608
609 This function is a variant of "update_file" to be called by a small set
610 of extensions, it also adjust the internal state of file, but can be
611 called outside an `changing_parents` context.
612
613 A very small number of extension meddle with the working copy content
614 in a way that requires to adjust the dirstate accordingly. At the time
615 this command is written they are :
616 - keyword,
617 - largefile,
618 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
619
620 This function could probably be replaced by more semantic one (like
621 "adjust expected size" or "always revalidate file content", etc)
622 however at the time where this is writen, this is too much of a detour
623 to be considered.
624 """
625 self._update_file(
626 *args,
627 **kwargs,
628 )
629
630 def _update_file(
631 self,
632 filename,
633 wc_tracked,
634 p1_tracked,
635 p2_info=False,
636 possibly_dirty=False,
637 parentfiledata=None,
638 ):
597
639
598 # note: I do not think we need to double check name clash here since we
640 # note: I do not think we need to double check name clash here since we
599 # are in a update/merge case that should already have taken care of
641 # are in a update/merge case that should already have taken care of
600 # this. The test agrees
642 # this. The test agrees
601
643
602 self._dirty = True
644 self._dirty = True
603 old_entry = self._map.get(filename)
645 old_entry = self._map.get(filename)
604 if old_entry is None:
646 if old_entry is None:
605 prev_tracked = False
647 prev_tracked = False
606 else:
648 else:
607 prev_tracked = old_entry.tracked
649 prev_tracked = old_entry.tracked
608 if prev_tracked != wc_tracked:
650 if prev_tracked != wc_tracked:
609 self._dirty_tracked_set = True
651 self._dirty_tracked_set = True
610
652
611 self._map.reset_state(
653 self._map.reset_state(
612 filename,
654 filename,
613 wc_tracked,
655 wc_tracked,
614 p1_tracked,
656 p1_tracked,
615 p2_info=p2_info,
657 p2_info=p2_info,
616 has_meaningful_mtime=not possibly_dirty,
658 has_meaningful_mtime=not possibly_dirty,
617 parentfiledata=parentfiledata,
659 parentfiledata=parentfiledata,
618 )
660 )
619
661
620 def _check_new_tracked_filename(self, filename):
662 def _check_new_tracked_filename(self, filename):
621 scmutil.checkfilename(filename)
663 scmutil.checkfilename(filename)
622 if self._map.hastrackeddir(filename):
664 if self._map.hastrackeddir(filename):
623 msg = _(b'directory %r already in dirstate')
665 msg = _(b'directory %r already in dirstate')
624 msg %= pycompat.bytestr(filename)
666 msg %= pycompat.bytestr(filename)
625 raise error.Abort(msg)
667 raise error.Abort(msg)
626 # shadows
668 # shadows
627 for d in pathutil.finddirs(filename):
669 for d in pathutil.finddirs(filename):
628 if self._map.hastrackeddir(d):
670 if self._map.hastrackeddir(d):
629 break
671 break
630 entry = self._map.get(d)
672 entry = self._map.get(d)
631 if entry is not None and not entry.removed:
673 if entry is not None and not entry.removed:
632 msg = _(b'file %r in dirstate clashes with %r')
674 msg = _(b'file %r in dirstate clashes with %r')
633 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
675 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
634 raise error.Abort(msg)
676 raise error.Abort(msg)
635 self._check_sparse(filename)
677 self._check_sparse(filename)
636
678
637 def _check_sparse(self, filename):
679 def _check_sparse(self, filename):
638 """Check that a filename is inside the sparse profile"""
680 """Check that a filename is inside the sparse profile"""
639 sparsematch = self._sparsematcher
681 sparsematch = self._sparsematcher
640 if sparsematch is not None and not sparsematch.always():
682 if sparsematch is not None and not sparsematch.always():
641 if not sparsematch(filename):
683 if not sparsematch(filename):
642 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
684 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
643 hint = _(
685 hint = _(
644 b'include file with `hg debugsparse --include <pattern>` or use '
686 b'include file with `hg debugsparse --include <pattern>` or use '
645 b'`hg add -s <file>` to include file directory while adding'
687 b'`hg add -s <file>` to include file directory while adding'
646 )
688 )
647 raise error.Abort(msg % filename, hint=hint)
689 raise error.Abort(msg % filename, hint=hint)
648
690
649 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
691 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
650 if exists is None:
692 if exists is None:
651 exists = os.path.lexists(os.path.join(self._root, path))
693 exists = os.path.lexists(os.path.join(self._root, path))
652 if not exists:
694 if not exists:
653 # Maybe a path component exists
695 # Maybe a path component exists
654 if not ignoremissing and b'/' in path:
696 if not ignoremissing and b'/' in path:
655 d, f = path.rsplit(b'/', 1)
697 d, f = path.rsplit(b'/', 1)
656 d = self._normalize(d, False, ignoremissing, None)
698 d = self._normalize(d, False, ignoremissing, None)
657 folded = d + b"/" + f
699 folded = d + b"/" + f
658 else:
700 else:
659 # No path components, preserve original case
701 # No path components, preserve original case
660 folded = path
702 folded = path
661 else:
703 else:
662 # recursively normalize leading directory components
704 # recursively normalize leading directory components
663 # against dirstate
705 # against dirstate
664 if b'/' in normed:
706 if b'/' in normed:
665 d, f = normed.rsplit(b'/', 1)
707 d, f = normed.rsplit(b'/', 1)
666 d = self._normalize(d, False, ignoremissing, True)
708 d = self._normalize(d, False, ignoremissing, True)
667 r = self._root + b"/" + d
709 r = self._root + b"/" + d
668 folded = d + b"/" + util.fspath(f, r)
710 folded = d + b"/" + util.fspath(f, r)
669 else:
711 else:
670 folded = util.fspath(normed, self._root)
712 folded = util.fspath(normed, self._root)
671 storemap[normed] = folded
713 storemap[normed] = folded
672
714
673 return folded
715 return folded
674
716
675 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
717 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
676 normed = util.normcase(path)
718 normed = util.normcase(path)
677 folded = self._map.filefoldmap.get(normed, None)
719 folded = self._map.filefoldmap.get(normed, None)
678 if folded is None:
720 if folded is None:
679 if isknown:
721 if isknown:
680 folded = path
722 folded = path
681 else:
723 else:
682 folded = self._discoverpath(
724 folded = self._discoverpath(
683 path, normed, ignoremissing, exists, self._map.filefoldmap
725 path, normed, ignoremissing, exists, self._map.filefoldmap
684 )
726 )
685 return folded
727 return folded
686
728
687 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
729 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
688 normed = util.normcase(path)
730 normed = util.normcase(path)
689 folded = self._map.filefoldmap.get(normed, None)
731 folded = self._map.filefoldmap.get(normed, None)
690 if folded is None:
732 if folded is None:
691 folded = self._map.dirfoldmap.get(normed, None)
733 folded = self._map.dirfoldmap.get(normed, None)
692 if folded is None:
734 if folded is None:
693 if isknown:
735 if isknown:
694 folded = path
736 folded = path
695 else:
737 else:
696 # store discovered result in dirfoldmap so that future
738 # store discovered result in dirfoldmap so that future
697 # normalizefile calls don't start matching directories
739 # normalizefile calls don't start matching directories
698 folded = self._discoverpath(
740 folded = self._discoverpath(
699 path, normed, ignoremissing, exists, self._map.dirfoldmap
741 path, normed, ignoremissing, exists, self._map.dirfoldmap
700 )
742 )
701 return folded
743 return folded
702
744
703 def normalize(self, path, isknown=False, ignoremissing=False):
745 def normalize(self, path, isknown=False, ignoremissing=False):
704 """
746 """
705 normalize the case of a pathname when on a casefolding filesystem
747 normalize the case of a pathname when on a casefolding filesystem
706
748
707 isknown specifies whether the filename came from walking the
749 isknown specifies whether the filename came from walking the
708 disk, to avoid extra filesystem access.
750 disk, to avoid extra filesystem access.
709
751
710 If ignoremissing is True, missing path are returned
752 If ignoremissing is True, missing path are returned
711 unchanged. Otherwise, we try harder to normalize possibly
753 unchanged. Otherwise, we try harder to normalize possibly
712 existing path components.
754 existing path components.
713
755
714 The normalized case is determined based on the following precedence:
756 The normalized case is determined based on the following precedence:
715
757
716 - version of name already stored in the dirstate
758 - version of name already stored in the dirstate
717 - version of name stored on disk
759 - version of name stored on disk
718 - version provided via command arguments
760 - version provided via command arguments
719 """
761 """
720
762
721 if self._checkcase:
763 if self._checkcase:
722 return self._normalize(path, isknown, ignoremissing)
764 return self._normalize(path, isknown, ignoremissing)
723 return path
765 return path
724
766
725 def clear(self):
767 def clear(self):
726 self._map.clear()
768 self._map.clear()
727 self._dirty = True
769 self._dirty = True
728
770
729 def rebuild(self, parent, allfiles, changedfiles=None):
771 def rebuild(self, parent, allfiles, changedfiles=None):
730
772
731 matcher = self._sparsematcher
773 matcher = self._sparsematcher
732 if matcher is not None and not matcher.always():
774 if matcher is not None and not matcher.always():
733 # should not add non-matching files
775 # should not add non-matching files
734 allfiles = [f for f in allfiles if matcher(f)]
776 allfiles = [f for f in allfiles if matcher(f)]
735 if changedfiles:
777 if changedfiles:
736 changedfiles = [f for f in changedfiles if matcher(f)]
778 changedfiles = [f for f in changedfiles if matcher(f)]
737
779
738 if changedfiles is not None:
780 if changedfiles is not None:
739 # these files will be deleted from the dirstate when they are
781 # these files will be deleted from the dirstate when they are
740 # not found to be in allfiles
782 # not found to be in allfiles
741 dirstatefilestoremove = {f for f in self if not matcher(f)}
783 dirstatefilestoremove = {f for f in self if not matcher(f)}
742 changedfiles = dirstatefilestoremove.union(changedfiles)
784 changedfiles = dirstatefilestoremove.union(changedfiles)
743
785
744 if changedfiles is None:
786 if changedfiles is None:
745 # Rebuild entire dirstate
787 # Rebuild entire dirstate
746 to_lookup = allfiles
788 to_lookup = allfiles
747 to_drop = []
789 to_drop = []
748 self.clear()
790 self.clear()
749 elif len(changedfiles) < 10:
791 elif len(changedfiles) < 10:
750 # Avoid turning allfiles into a set, which can be expensive if it's
792 # Avoid turning allfiles into a set, which can be expensive if it's
751 # large.
793 # large.
752 to_lookup = []
794 to_lookup = []
753 to_drop = []
795 to_drop = []
754 for f in changedfiles:
796 for f in changedfiles:
755 if f in allfiles:
797 if f in allfiles:
756 to_lookup.append(f)
798 to_lookup.append(f)
757 else:
799 else:
758 to_drop.append(f)
800 to_drop.append(f)
759 else:
801 else:
760 changedfilesset = set(changedfiles)
802 changedfilesset = set(changedfiles)
761 to_lookup = changedfilesset & set(allfiles)
803 to_lookup = changedfilesset & set(allfiles)
762 to_drop = changedfilesset - to_lookup
804 to_drop = changedfilesset - to_lookup
763
805
764 if self._origpl is None:
806 if self._origpl is None:
765 self._origpl = self._pl
807 self._origpl = self._pl
766 self._map.setparents(parent, self._nodeconstants.nullid)
808 self._map.setparents(parent, self._nodeconstants.nullid)
767
809
768 for f in to_lookup:
810 for f in to_lookup:
769
811
770 if self.in_merge:
812 if self.in_merge:
771 self.set_tracked(f)
813 self.set_tracked(f)
772 else:
814 else:
773 self._map.reset_state(
815 self._map.reset_state(
774 f,
816 f,
775 wc_tracked=True,
817 wc_tracked=True,
776 p1_tracked=True,
818 p1_tracked=True,
777 )
819 )
778 for f in to_drop:
820 for f in to_drop:
779 self._map.reset_state(f)
821 self._map.reset_state(f)
780
822
781 self._dirty = True
823 self._dirty = True
782
824
783 def identity(self):
825 def identity(self):
784 """Return identity of dirstate itself to detect changing in storage
826 """Return identity of dirstate itself to detect changing in storage
785
827
786 If identity of previous dirstate is equal to this, writing
828 If identity of previous dirstate is equal to this, writing
787 changes based on the former dirstate out can keep consistency.
829 changes based on the former dirstate out can keep consistency.
788 """
830 """
789 return self._map.identity
831 return self._map.identity
790
832
791 def write(self, tr):
833 def write(self, tr):
792 if not self._dirty:
834 if not self._dirty:
793 return
835 return
794
836
795 write_key = self._use_tracked_hint and self._dirty_tracked_set
837 write_key = self._use_tracked_hint and self._dirty_tracked_set
796 if tr:
838 if tr:
797 # delay writing in-memory changes out
839 # delay writing in-memory changes out
798 tr.addfilegenerator(
840 tr.addfilegenerator(
799 b'dirstate-1-main',
841 b'dirstate-1-main',
800 (self._filename,),
842 (self._filename,),
801 lambda f: self._writedirstate(tr, f),
843 lambda f: self._writedirstate(tr, f),
802 location=b'plain',
844 location=b'plain',
803 post_finalize=True,
845 post_finalize=True,
804 )
846 )
805 if write_key:
847 if write_key:
806 tr.addfilegenerator(
848 tr.addfilegenerator(
807 b'dirstate-2-key-post',
849 b'dirstate-2-key-post',
808 (self._filename_th,),
850 (self._filename_th,),
809 lambda f: self._write_tracked_hint(tr, f),
851 lambda f: self._write_tracked_hint(tr, f),
810 location=b'plain',
852 location=b'plain',
811 post_finalize=True,
853 post_finalize=True,
812 )
854 )
813 return
855 return
814
856
815 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
857 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
816 with file(self._filename) as f:
858 with file(self._filename) as f:
817 self._writedirstate(tr, f)
859 self._writedirstate(tr, f)
818 if write_key:
860 if write_key:
819 # we update the key-file after writing to make sure reader have a
861 # we update the key-file after writing to make sure reader have a
820 # key that match the newly written content
862 # key that match the newly written content
821 with file(self._filename_th) as f:
863 with file(self._filename_th) as f:
822 self._write_tracked_hint(tr, f)
864 self._write_tracked_hint(tr, f)
823
865
824 def delete_tracked_hint(self):
866 def delete_tracked_hint(self):
825 """remove the tracked_hint file
867 """remove the tracked_hint file
826
868
827 To be used by format downgrades operation"""
869 To be used by format downgrades operation"""
828 self._opener.unlink(self._filename_th)
870 self._opener.unlink(self._filename_th)
829 self._use_tracked_hint = False
871 self._use_tracked_hint = False
830
872
831 def addparentchangecallback(self, category, callback):
873 def addparentchangecallback(self, category, callback):
832 """add a callback to be called when the wd parents are changed
874 """add a callback to be called when the wd parents are changed
833
875
834 Callback will be called with the following arguments:
876 Callback will be called with the following arguments:
835 dirstate, (oldp1, oldp2), (newp1, newp2)
877 dirstate, (oldp1, oldp2), (newp1, newp2)
836
878
837 Category is a unique identifier to allow overwriting an old callback
879 Category is a unique identifier to allow overwriting an old callback
838 with a newer callback.
880 with a newer callback.
839 """
881 """
840 self._plchangecallbacks[category] = callback
882 self._plchangecallbacks[category] = callback
841
883
842 def _writedirstate(self, tr, st):
884 def _writedirstate(self, tr, st):
843 # notify callbacks about parents change
885 # notify callbacks about parents change
844 if self._origpl is not None and self._origpl != self._pl:
886 if self._origpl is not None and self._origpl != self._pl:
845 for c, callback in sorted(self._plchangecallbacks.items()):
887 for c, callback in sorted(self._plchangecallbacks.items()):
846 callback(self, self._origpl, self._pl)
888 callback(self, self._origpl, self._pl)
847 self._origpl = None
889 self._origpl = None
848 self._map.write(tr, st)
890 self._map.write(tr, st)
849 self._dirty = False
891 self._dirty = False
850 self._dirty_tracked_set = False
892 self._dirty_tracked_set = False
851
893
852 def _write_tracked_hint(self, tr, f):
894 def _write_tracked_hint(self, tr, f):
853 key = node.hex(uuid.uuid4().bytes)
895 key = node.hex(uuid.uuid4().bytes)
854 f.write(b"1\n%s\n" % key) # 1 is the format version
896 f.write(b"1\n%s\n" % key) # 1 is the format version
855
897
856 def _dirignore(self, f):
898 def _dirignore(self, f):
857 if self._ignore(f):
899 if self._ignore(f):
858 return True
900 return True
859 for p in pathutil.finddirs(f):
901 for p in pathutil.finddirs(f):
860 if self._ignore(p):
902 if self._ignore(p):
861 return True
903 return True
862 return False
904 return False
863
905
864 def _ignorefiles(self):
906 def _ignorefiles(self):
865 files = []
907 files = []
866 if os.path.exists(self._join(b'.hgignore')):
908 if os.path.exists(self._join(b'.hgignore')):
867 files.append(self._join(b'.hgignore'))
909 files.append(self._join(b'.hgignore'))
868 for name, path in self._ui.configitems(b"ui"):
910 for name, path in self._ui.configitems(b"ui"):
869 if name == b'ignore' or name.startswith(b'ignore.'):
911 if name == b'ignore' or name.startswith(b'ignore.'):
870 # we need to use os.path.join here rather than self._join
912 # we need to use os.path.join here rather than self._join
871 # because path is arbitrary and user-specified
913 # because path is arbitrary and user-specified
872 files.append(os.path.join(self._rootdir, util.expandpath(path)))
914 files.append(os.path.join(self._rootdir, util.expandpath(path)))
873 return files
915 return files
874
916
875 def _ignorefileandline(self, f):
917 def _ignorefileandline(self, f):
876 files = collections.deque(self._ignorefiles())
918 files = collections.deque(self._ignorefiles())
877 visited = set()
919 visited = set()
878 while files:
920 while files:
879 i = files.popleft()
921 i = files.popleft()
880 patterns = matchmod.readpatternfile(
922 patterns = matchmod.readpatternfile(
881 i, self._ui.warn, sourceinfo=True
923 i, self._ui.warn, sourceinfo=True
882 )
924 )
883 for pattern, lineno, line in patterns:
925 for pattern, lineno, line in patterns:
884 kind, p = matchmod._patsplit(pattern, b'glob')
926 kind, p = matchmod._patsplit(pattern, b'glob')
885 if kind == b"subinclude":
927 if kind == b"subinclude":
886 if p not in visited:
928 if p not in visited:
887 files.append(p)
929 files.append(p)
888 continue
930 continue
889 m = matchmod.match(
931 m = matchmod.match(
890 self._root, b'', [], [pattern], warn=self._ui.warn
932 self._root, b'', [], [pattern], warn=self._ui.warn
891 )
933 )
892 if m(f):
934 if m(f):
893 return (i, lineno, line)
935 return (i, lineno, line)
894 visited.add(i)
936 visited.add(i)
895 return (None, -1, b"")
937 return (None, -1, b"")
896
938
897 def _walkexplicit(self, match, subrepos):
939 def _walkexplicit(self, match, subrepos):
898 """Get stat data about the files explicitly specified by match.
940 """Get stat data about the files explicitly specified by match.
899
941
900 Return a triple (results, dirsfound, dirsnotfound).
942 Return a triple (results, dirsfound, dirsnotfound).
901 - results is a mapping from filename to stat result. It also contains
943 - results is a mapping from filename to stat result. It also contains
902 listings mapping subrepos and .hg to None.
944 listings mapping subrepos and .hg to None.
903 - dirsfound is a list of files found to be directories.
945 - dirsfound is a list of files found to be directories.
904 - dirsnotfound is a list of files that the dirstate thinks are
946 - dirsnotfound is a list of files that the dirstate thinks are
905 directories and that were not found."""
947 directories and that were not found."""
906
948
907 def badtype(mode):
949 def badtype(mode):
908 kind = _(b'unknown')
950 kind = _(b'unknown')
909 if stat.S_ISCHR(mode):
951 if stat.S_ISCHR(mode):
910 kind = _(b'character device')
952 kind = _(b'character device')
911 elif stat.S_ISBLK(mode):
953 elif stat.S_ISBLK(mode):
912 kind = _(b'block device')
954 kind = _(b'block device')
913 elif stat.S_ISFIFO(mode):
955 elif stat.S_ISFIFO(mode):
914 kind = _(b'fifo')
956 kind = _(b'fifo')
915 elif stat.S_ISSOCK(mode):
957 elif stat.S_ISSOCK(mode):
916 kind = _(b'socket')
958 kind = _(b'socket')
917 elif stat.S_ISDIR(mode):
959 elif stat.S_ISDIR(mode):
918 kind = _(b'directory')
960 kind = _(b'directory')
919 return _(b'unsupported file type (type is %s)') % kind
961 return _(b'unsupported file type (type is %s)') % kind
920
962
921 badfn = match.bad
963 badfn = match.bad
922 dmap = self._map
964 dmap = self._map
923 lstat = os.lstat
965 lstat = os.lstat
924 getkind = stat.S_IFMT
966 getkind = stat.S_IFMT
925 dirkind = stat.S_IFDIR
967 dirkind = stat.S_IFDIR
926 regkind = stat.S_IFREG
968 regkind = stat.S_IFREG
927 lnkkind = stat.S_IFLNK
969 lnkkind = stat.S_IFLNK
928 join = self._join
970 join = self._join
929 dirsfound = []
971 dirsfound = []
930 foundadd = dirsfound.append
972 foundadd = dirsfound.append
931 dirsnotfound = []
973 dirsnotfound = []
932 notfoundadd = dirsnotfound.append
974 notfoundadd = dirsnotfound.append
933
975
934 if not match.isexact() and self._checkcase:
976 if not match.isexact() and self._checkcase:
935 normalize = self._normalize
977 normalize = self._normalize
936 else:
978 else:
937 normalize = None
979 normalize = None
938
980
939 files = sorted(match.files())
981 files = sorted(match.files())
940 subrepos.sort()
982 subrepos.sort()
941 i, j = 0, 0
983 i, j = 0, 0
942 while i < len(files) and j < len(subrepos):
984 while i < len(files) and j < len(subrepos):
943 subpath = subrepos[j] + b"/"
985 subpath = subrepos[j] + b"/"
944 if files[i] < subpath:
986 if files[i] < subpath:
945 i += 1
987 i += 1
946 continue
988 continue
947 while i < len(files) and files[i].startswith(subpath):
989 while i < len(files) and files[i].startswith(subpath):
948 del files[i]
990 del files[i]
949 j += 1
991 j += 1
950
992
951 if not files or b'' in files:
993 if not files or b'' in files:
952 files = [b'']
994 files = [b'']
953 # constructing the foldmap is expensive, so don't do it for the
995 # constructing the foldmap is expensive, so don't do it for the
954 # common case where files is ['']
996 # common case where files is ['']
955 normalize = None
997 normalize = None
956 results = dict.fromkeys(subrepos)
998 results = dict.fromkeys(subrepos)
957 results[b'.hg'] = None
999 results[b'.hg'] = None
958
1000
959 for ff in files:
1001 for ff in files:
960 if normalize:
1002 if normalize:
961 nf = normalize(ff, False, True)
1003 nf = normalize(ff, False, True)
962 else:
1004 else:
963 nf = ff
1005 nf = ff
964 if nf in results:
1006 if nf in results:
965 continue
1007 continue
966
1008
967 try:
1009 try:
968 st = lstat(join(nf))
1010 st = lstat(join(nf))
969 kind = getkind(st.st_mode)
1011 kind = getkind(st.st_mode)
970 if kind == dirkind:
1012 if kind == dirkind:
971 if nf in dmap:
1013 if nf in dmap:
972 # file replaced by dir on disk but still in dirstate
1014 # file replaced by dir on disk but still in dirstate
973 results[nf] = None
1015 results[nf] = None
974 foundadd((nf, ff))
1016 foundadd((nf, ff))
975 elif kind == regkind or kind == lnkkind:
1017 elif kind == regkind or kind == lnkkind:
976 results[nf] = st
1018 results[nf] = st
977 else:
1019 else:
978 badfn(ff, badtype(kind))
1020 badfn(ff, badtype(kind))
979 if nf in dmap:
1021 if nf in dmap:
980 results[nf] = None
1022 results[nf] = None
981 except OSError as inst: # nf not found on disk - it is dirstate only
1023 except OSError as inst: # nf not found on disk - it is dirstate only
982 if nf in dmap: # does it exactly match a missing file?
1024 if nf in dmap: # does it exactly match a missing file?
983 results[nf] = None
1025 results[nf] = None
984 else: # does it match a missing directory?
1026 else: # does it match a missing directory?
985 if self._map.hasdir(nf):
1027 if self._map.hasdir(nf):
986 notfoundadd(nf)
1028 notfoundadd(nf)
987 else:
1029 else:
988 badfn(ff, encoding.strtolocal(inst.strerror))
1030 badfn(ff, encoding.strtolocal(inst.strerror))
989
1031
990 # match.files() may contain explicitly-specified paths that shouldn't
1032 # match.files() may contain explicitly-specified paths that shouldn't
991 # be taken; drop them from the list of files found. dirsfound/notfound
1033 # be taken; drop them from the list of files found. dirsfound/notfound
992 # aren't filtered here because they will be tested later.
1034 # aren't filtered here because they will be tested later.
993 if match.anypats():
1035 if match.anypats():
994 for f in list(results):
1036 for f in list(results):
995 if f == b'.hg' or f in subrepos:
1037 if f == b'.hg' or f in subrepos:
996 # keep sentinel to disable further out-of-repo walks
1038 # keep sentinel to disable further out-of-repo walks
997 continue
1039 continue
998 if not match(f):
1040 if not match(f):
999 del results[f]
1041 del results[f]
1000
1042
1001 # Case insensitive filesystems cannot rely on lstat() failing to detect
1043 # Case insensitive filesystems cannot rely on lstat() failing to detect
1002 # a case-only rename. Prune the stat object for any file that does not
1044 # a case-only rename. Prune the stat object for any file that does not
1003 # match the case in the filesystem, if there are multiple files that
1045 # match the case in the filesystem, if there are multiple files that
1004 # normalize to the same path.
1046 # normalize to the same path.
1005 if match.isexact() and self._checkcase:
1047 if match.isexact() and self._checkcase:
1006 normed = {}
1048 normed = {}
1007
1049
1008 for f, st in results.items():
1050 for f, st in results.items():
1009 if st is None:
1051 if st is None:
1010 continue
1052 continue
1011
1053
1012 nc = util.normcase(f)
1054 nc = util.normcase(f)
1013 paths = normed.get(nc)
1055 paths = normed.get(nc)
1014
1056
1015 if paths is None:
1057 if paths is None:
1016 paths = set()
1058 paths = set()
1017 normed[nc] = paths
1059 normed[nc] = paths
1018
1060
1019 paths.add(f)
1061 paths.add(f)
1020
1062
1021 for norm, paths in normed.items():
1063 for norm, paths in normed.items():
1022 if len(paths) > 1:
1064 if len(paths) > 1:
1023 for path in paths:
1065 for path in paths:
1024 folded = self._discoverpath(
1066 folded = self._discoverpath(
1025 path, norm, True, None, self._map.dirfoldmap
1067 path, norm, True, None, self._map.dirfoldmap
1026 )
1068 )
1027 if path != folded:
1069 if path != folded:
1028 results[path] = None
1070 results[path] = None
1029
1071
1030 return results, dirsfound, dirsnotfound
1072 return results, dirsfound, dirsnotfound
1031
1073
1032 def walk(self, match, subrepos, unknown, ignored, full=True):
1074 def walk(self, match, subrepos, unknown, ignored, full=True):
1033 """
1075 """
1034 Walk recursively through the directory tree, finding all files
1076 Walk recursively through the directory tree, finding all files
1035 matched by match.
1077 matched by match.
1036
1078
1037 If full is False, maybe skip some known-clean files.
1079 If full is False, maybe skip some known-clean files.
1038
1080
1039 Return a dict mapping filename to stat-like object (either
1081 Return a dict mapping filename to stat-like object (either
1040 mercurial.osutil.stat instance or return value of os.stat()).
1082 mercurial.osutil.stat instance or return value of os.stat()).
1041
1083
1042 """
1084 """
1043 # full is a flag that extensions that hook into walk can use -- this
1085 # full is a flag that extensions that hook into walk can use -- this
1044 # implementation doesn't use it at all. This satisfies the contract
1086 # implementation doesn't use it at all. This satisfies the contract
1045 # because we only guarantee a "maybe".
1087 # because we only guarantee a "maybe".
1046
1088
1047 if ignored:
1089 if ignored:
1048 ignore = util.never
1090 ignore = util.never
1049 dirignore = util.never
1091 dirignore = util.never
1050 elif unknown:
1092 elif unknown:
1051 ignore = self._ignore
1093 ignore = self._ignore
1052 dirignore = self._dirignore
1094 dirignore = self._dirignore
1053 else:
1095 else:
1054 # if not unknown and not ignored, drop dir recursion and step 2
1096 # if not unknown and not ignored, drop dir recursion and step 2
1055 ignore = util.always
1097 ignore = util.always
1056 dirignore = util.always
1098 dirignore = util.always
1057
1099
1058 if self._sparsematchfn is not None:
1100 if self._sparsematchfn is not None:
1059 em = matchmod.exact(match.files())
1101 em = matchmod.exact(match.files())
1060 sm = matchmod.unionmatcher([self._sparsematcher, em])
1102 sm = matchmod.unionmatcher([self._sparsematcher, em])
1061 match = matchmod.intersectmatchers(match, sm)
1103 match = matchmod.intersectmatchers(match, sm)
1062
1104
1063 matchfn = match.matchfn
1105 matchfn = match.matchfn
1064 matchalways = match.always()
1106 matchalways = match.always()
1065 matchtdir = match.traversedir
1107 matchtdir = match.traversedir
1066 dmap = self._map
1108 dmap = self._map
1067 listdir = util.listdir
1109 listdir = util.listdir
1068 lstat = os.lstat
1110 lstat = os.lstat
1069 dirkind = stat.S_IFDIR
1111 dirkind = stat.S_IFDIR
1070 regkind = stat.S_IFREG
1112 regkind = stat.S_IFREG
1071 lnkkind = stat.S_IFLNK
1113 lnkkind = stat.S_IFLNK
1072 join = self._join
1114 join = self._join
1073
1115
1074 exact = skipstep3 = False
1116 exact = skipstep3 = False
1075 if match.isexact(): # match.exact
1117 if match.isexact(): # match.exact
1076 exact = True
1118 exact = True
1077 dirignore = util.always # skip step 2
1119 dirignore = util.always # skip step 2
1078 elif match.prefix(): # match.match, no patterns
1120 elif match.prefix(): # match.match, no patterns
1079 skipstep3 = True
1121 skipstep3 = True
1080
1122
1081 if not exact and self._checkcase:
1123 if not exact and self._checkcase:
1082 normalize = self._normalize
1124 normalize = self._normalize
1083 normalizefile = self._normalizefile
1125 normalizefile = self._normalizefile
1084 skipstep3 = False
1126 skipstep3 = False
1085 else:
1127 else:
1086 normalize = self._normalize
1128 normalize = self._normalize
1087 normalizefile = None
1129 normalizefile = None
1088
1130
1089 # step 1: find all explicit files
1131 # step 1: find all explicit files
1090 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1132 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1091 if matchtdir:
1133 if matchtdir:
1092 for d in work:
1134 for d in work:
1093 matchtdir(d[0])
1135 matchtdir(d[0])
1094 for d in dirsnotfound:
1136 for d in dirsnotfound:
1095 matchtdir(d)
1137 matchtdir(d)
1096
1138
1097 skipstep3 = skipstep3 and not (work or dirsnotfound)
1139 skipstep3 = skipstep3 and not (work or dirsnotfound)
1098 work = [d for d in work if not dirignore(d[0])]
1140 work = [d for d in work if not dirignore(d[0])]
1099
1141
1100 # step 2: visit subdirectories
1142 # step 2: visit subdirectories
1101 def traverse(work, alreadynormed):
1143 def traverse(work, alreadynormed):
1102 wadd = work.append
1144 wadd = work.append
1103 while work:
1145 while work:
1104 tracing.counter('dirstate.walk work', len(work))
1146 tracing.counter('dirstate.walk work', len(work))
1105 nd = work.pop()
1147 nd = work.pop()
1106 visitentries = match.visitchildrenset(nd)
1148 visitentries = match.visitchildrenset(nd)
1107 if not visitentries:
1149 if not visitentries:
1108 continue
1150 continue
1109 if visitentries == b'this' or visitentries == b'all':
1151 if visitentries == b'this' or visitentries == b'all':
1110 visitentries = None
1152 visitentries = None
1111 skip = None
1153 skip = None
1112 if nd != b'':
1154 if nd != b'':
1113 skip = b'.hg'
1155 skip = b'.hg'
1114 try:
1156 try:
1115 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1157 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1116 entries = listdir(join(nd), stat=True, skip=skip)
1158 entries = listdir(join(nd), stat=True, skip=skip)
1117 except (PermissionError, FileNotFoundError) as inst:
1159 except (PermissionError, FileNotFoundError) as inst:
1118 match.bad(
1160 match.bad(
1119 self.pathto(nd), encoding.strtolocal(inst.strerror)
1161 self.pathto(nd), encoding.strtolocal(inst.strerror)
1120 )
1162 )
1121 continue
1163 continue
1122 for f, kind, st in entries:
1164 for f, kind, st in entries:
1123 # Some matchers may return files in the visitentries set,
1165 # Some matchers may return files in the visitentries set,
1124 # instead of 'this', if the matcher explicitly mentions them
1166 # instead of 'this', if the matcher explicitly mentions them
1125 # and is not an exactmatcher. This is acceptable; we do not
1167 # and is not an exactmatcher. This is acceptable; we do not
1126 # make any hard assumptions about file-or-directory below
1168 # make any hard assumptions about file-or-directory below
1127 # based on the presence of `f` in visitentries. If
1169 # based on the presence of `f` in visitentries. If
1128 # visitchildrenset returned a set, we can always skip the
1170 # visitchildrenset returned a set, we can always skip the
1129 # entries *not* in the set it provided regardless of whether
1171 # entries *not* in the set it provided regardless of whether
1130 # they're actually a file or a directory.
1172 # they're actually a file or a directory.
1131 if visitentries and f not in visitentries:
1173 if visitentries and f not in visitentries:
1132 continue
1174 continue
1133 if normalizefile:
1175 if normalizefile:
1134 # even though f might be a directory, we're only
1176 # even though f might be a directory, we're only
1135 # interested in comparing it to files currently in the
1177 # interested in comparing it to files currently in the
1136 # dmap -- therefore normalizefile is enough
1178 # dmap -- therefore normalizefile is enough
1137 nf = normalizefile(
1179 nf = normalizefile(
1138 nd and (nd + b"/" + f) or f, True, True
1180 nd and (nd + b"/" + f) or f, True, True
1139 )
1181 )
1140 else:
1182 else:
1141 nf = nd and (nd + b"/" + f) or f
1183 nf = nd and (nd + b"/" + f) or f
1142 if nf not in results:
1184 if nf not in results:
1143 if kind == dirkind:
1185 if kind == dirkind:
1144 if not ignore(nf):
1186 if not ignore(nf):
1145 if matchtdir:
1187 if matchtdir:
1146 matchtdir(nf)
1188 matchtdir(nf)
1147 wadd(nf)
1189 wadd(nf)
1148 if nf in dmap and (matchalways or matchfn(nf)):
1190 if nf in dmap and (matchalways or matchfn(nf)):
1149 results[nf] = None
1191 results[nf] = None
1150 elif kind == regkind or kind == lnkkind:
1192 elif kind == regkind or kind == lnkkind:
1151 if nf in dmap:
1193 if nf in dmap:
1152 if matchalways or matchfn(nf):
1194 if matchalways or matchfn(nf):
1153 results[nf] = st
1195 results[nf] = st
1154 elif (matchalways or matchfn(nf)) and not ignore(
1196 elif (matchalways or matchfn(nf)) and not ignore(
1155 nf
1197 nf
1156 ):
1198 ):
1157 # unknown file -- normalize if necessary
1199 # unknown file -- normalize if necessary
1158 if not alreadynormed:
1200 if not alreadynormed:
1159 nf = normalize(nf, False, True)
1201 nf = normalize(nf, False, True)
1160 results[nf] = st
1202 results[nf] = st
1161 elif nf in dmap and (matchalways or matchfn(nf)):
1203 elif nf in dmap and (matchalways or matchfn(nf)):
1162 results[nf] = None
1204 results[nf] = None
1163
1205
1164 for nd, d in work:
1206 for nd, d in work:
1165 # alreadynormed means that processwork doesn't have to do any
1207 # alreadynormed means that processwork doesn't have to do any
1166 # expensive directory normalization
1208 # expensive directory normalization
1167 alreadynormed = not normalize or nd == d
1209 alreadynormed = not normalize or nd == d
1168 traverse([d], alreadynormed)
1210 traverse([d], alreadynormed)
1169
1211
1170 for s in subrepos:
1212 for s in subrepos:
1171 del results[s]
1213 del results[s]
1172 del results[b'.hg']
1214 del results[b'.hg']
1173
1215
1174 # step 3: visit remaining files from dmap
1216 # step 3: visit remaining files from dmap
1175 if not skipstep3 and not exact:
1217 if not skipstep3 and not exact:
1176 # If a dmap file is not in results yet, it was either
1218 # If a dmap file is not in results yet, it was either
1177 # a) not matching matchfn b) ignored, c) missing, or d) under a
1219 # a) not matching matchfn b) ignored, c) missing, or d) under a
1178 # symlink directory.
1220 # symlink directory.
1179 if not results and matchalways:
1221 if not results and matchalways:
1180 visit = [f for f in dmap]
1222 visit = [f for f in dmap]
1181 else:
1223 else:
1182 visit = [f for f in dmap if f not in results and matchfn(f)]
1224 visit = [f for f in dmap if f not in results and matchfn(f)]
1183 visit.sort()
1225 visit.sort()
1184
1226
1185 if unknown:
1227 if unknown:
1186 # unknown == True means we walked all dirs under the roots
1228 # unknown == True means we walked all dirs under the roots
1187 # that wasn't ignored, and everything that matched was stat'ed
1229 # that wasn't ignored, and everything that matched was stat'ed
1188 # and is already in results.
1230 # and is already in results.
1189 # The rest must thus be ignored or under a symlink.
1231 # The rest must thus be ignored or under a symlink.
1190 audit_path = pathutil.pathauditor(self._root, cached=True)
1232 audit_path = pathutil.pathauditor(self._root, cached=True)
1191
1233
1192 for nf in iter(visit):
1234 for nf in iter(visit):
1193 # If a stat for the same file was already added with a
1235 # If a stat for the same file was already added with a
1194 # different case, don't add one for this, since that would
1236 # different case, don't add one for this, since that would
1195 # make it appear as if the file exists under both names
1237 # make it appear as if the file exists under both names
1196 # on disk.
1238 # on disk.
1197 if (
1239 if (
1198 normalizefile
1240 normalizefile
1199 and normalizefile(nf, True, True) in results
1241 and normalizefile(nf, True, True) in results
1200 ):
1242 ):
1201 results[nf] = None
1243 results[nf] = None
1202 # Report ignored items in the dmap as long as they are not
1244 # Report ignored items in the dmap as long as they are not
1203 # under a symlink directory.
1245 # under a symlink directory.
1204 elif audit_path.check(nf):
1246 elif audit_path.check(nf):
1205 try:
1247 try:
1206 results[nf] = lstat(join(nf))
1248 results[nf] = lstat(join(nf))
1207 # file was just ignored, no links, and exists
1249 # file was just ignored, no links, and exists
1208 except OSError:
1250 except OSError:
1209 # file doesn't exist
1251 # file doesn't exist
1210 results[nf] = None
1252 results[nf] = None
1211 else:
1253 else:
1212 # It's either missing or under a symlink directory
1254 # It's either missing or under a symlink directory
1213 # which we in this case report as missing
1255 # which we in this case report as missing
1214 results[nf] = None
1256 results[nf] = None
1215 else:
1257 else:
1216 # We may not have walked the full directory tree above,
1258 # We may not have walked the full directory tree above,
1217 # so stat and check everything we missed.
1259 # so stat and check everything we missed.
1218 iv = iter(visit)
1260 iv = iter(visit)
1219 for st in util.statfiles([join(i) for i in visit]):
1261 for st in util.statfiles([join(i) for i in visit]):
1220 results[next(iv)] = st
1262 results[next(iv)] = st
1221 return results
1263 return results
1222
1264
1223 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1265 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1224 if self._sparsematchfn is not None:
1266 if self._sparsematchfn is not None:
1225 em = matchmod.exact(matcher.files())
1267 em = matchmod.exact(matcher.files())
1226 sm = matchmod.unionmatcher([self._sparsematcher, em])
1268 sm = matchmod.unionmatcher([self._sparsematcher, em])
1227 matcher = matchmod.intersectmatchers(matcher, sm)
1269 matcher = matchmod.intersectmatchers(matcher, sm)
1228 # Force Rayon (Rust parallelism library) to respect the number of
1270 # Force Rayon (Rust parallelism library) to respect the number of
1229 # workers. This is a temporary workaround until Rust code knows
1271 # workers. This is a temporary workaround until Rust code knows
1230 # how to read the config file.
1272 # how to read the config file.
1231 numcpus = self._ui.configint(b"worker", b"numcpus")
1273 numcpus = self._ui.configint(b"worker", b"numcpus")
1232 if numcpus is not None:
1274 if numcpus is not None:
1233 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1275 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1234
1276
1235 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1277 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1236 if not workers_enabled:
1278 if not workers_enabled:
1237 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1279 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1238
1280
1239 (
1281 (
1240 lookup,
1282 lookup,
1241 modified,
1283 modified,
1242 added,
1284 added,
1243 removed,
1285 removed,
1244 deleted,
1286 deleted,
1245 clean,
1287 clean,
1246 ignored,
1288 ignored,
1247 unknown,
1289 unknown,
1248 warnings,
1290 warnings,
1249 bad,
1291 bad,
1250 traversed,
1292 traversed,
1251 dirty,
1293 dirty,
1252 ) = rustmod.status(
1294 ) = rustmod.status(
1253 self._map._map,
1295 self._map._map,
1254 matcher,
1296 matcher,
1255 self._rootdir,
1297 self._rootdir,
1256 self._ignorefiles(),
1298 self._ignorefiles(),
1257 self._checkexec,
1299 self._checkexec,
1258 bool(list_clean),
1300 bool(list_clean),
1259 bool(list_ignored),
1301 bool(list_ignored),
1260 bool(list_unknown),
1302 bool(list_unknown),
1261 bool(matcher.traversedir),
1303 bool(matcher.traversedir),
1262 )
1304 )
1263
1305
1264 self._dirty |= dirty
1306 self._dirty |= dirty
1265
1307
1266 if matcher.traversedir:
1308 if matcher.traversedir:
1267 for dir in traversed:
1309 for dir in traversed:
1268 matcher.traversedir(dir)
1310 matcher.traversedir(dir)
1269
1311
1270 if self._ui.warn:
1312 if self._ui.warn:
1271 for item in warnings:
1313 for item in warnings:
1272 if isinstance(item, tuple):
1314 if isinstance(item, tuple):
1273 file_path, syntax = item
1315 file_path, syntax = item
1274 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1316 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1275 file_path,
1317 file_path,
1276 syntax,
1318 syntax,
1277 )
1319 )
1278 self._ui.warn(msg)
1320 self._ui.warn(msg)
1279 else:
1321 else:
1280 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1322 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1281 self._ui.warn(
1323 self._ui.warn(
1282 msg
1324 msg
1283 % (
1325 % (
1284 pathutil.canonpath(
1326 pathutil.canonpath(
1285 self._rootdir, self._rootdir, item
1327 self._rootdir, self._rootdir, item
1286 ),
1328 ),
1287 b"No such file or directory",
1329 b"No such file or directory",
1288 )
1330 )
1289 )
1331 )
1290
1332
1291 for (fn, message) in bad:
1333 for (fn, message) in bad:
1292 matcher.bad(fn, encoding.strtolocal(message))
1334 matcher.bad(fn, encoding.strtolocal(message))
1293
1335
1294 status = scmutil.status(
1336 status = scmutil.status(
1295 modified=modified,
1337 modified=modified,
1296 added=added,
1338 added=added,
1297 removed=removed,
1339 removed=removed,
1298 deleted=deleted,
1340 deleted=deleted,
1299 unknown=unknown,
1341 unknown=unknown,
1300 ignored=ignored,
1342 ignored=ignored,
1301 clean=clean,
1343 clean=clean,
1302 )
1344 )
1303 return (lookup, status)
1345 return (lookup, status)
1304
1346
1305 def status(self, match, subrepos, ignored, clean, unknown):
1347 def status(self, match, subrepos, ignored, clean, unknown):
1306 """Determine the status of the working copy relative to the
1348 """Determine the status of the working copy relative to the
1307 dirstate and return a pair of (unsure, status), where status is of type
1349 dirstate and return a pair of (unsure, status), where status is of type
1308 scmutil.status and:
1350 scmutil.status and:
1309
1351
1310 unsure:
1352 unsure:
1311 files that might have been modified since the dirstate was
1353 files that might have been modified since the dirstate was
1312 written, but need to be read to be sure (size is the same
1354 written, but need to be read to be sure (size is the same
1313 but mtime differs)
1355 but mtime differs)
1314 status.modified:
1356 status.modified:
1315 files that have definitely been modified since the dirstate
1357 files that have definitely been modified since the dirstate
1316 was written (different size or mode)
1358 was written (different size or mode)
1317 status.clean:
1359 status.clean:
1318 files that have definitely not been modified since the
1360 files that have definitely not been modified since the
1319 dirstate was written
1361 dirstate was written
1320 """
1362 """
1321 listignored, listclean, listunknown = ignored, clean, unknown
1363 listignored, listclean, listunknown = ignored, clean, unknown
1322 lookup, modified, added, unknown, ignored = [], [], [], [], []
1364 lookup, modified, added, unknown, ignored = [], [], [], [], []
1323 removed, deleted, clean = [], [], []
1365 removed, deleted, clean = [], [], []
1324
1366
1325 dmap = self._map
1367 dmap = self._map
1326 dmap.preload()
1368 dmap.preload()
1327
1369
1328 use_rust = True
1370 use_rust = True
1329
1371
1330 allowed_matchers = (
1372 allowed_matchers = (
1331 matchmod.alwaysmatcher,
1373 matchmod.alwaysmatcher,
1332 matchmod.differencematcher,
1374 matchmod.differencematcher,
1333 matchmod.exactmatcher,
1375 matchmod.exactmatcher,
1334 matchmod.includematcher,
1376 matchmod.includematcher,
1335 matchmod.intersectionmatcher,
1377 matchmod.intersectionmatcher,
1336 matchmod.nevermatcher,
1378 matchmod.nevermatcher,
1337 matchmod.unionmatcher,
1379 matchmod.unionmatcher,
1338 )
1380 )
1339
1381
1340 if rustmod is None:
1382 if rustmod is None:
1341 use_rust = False
1383 use_rust = False
1342 elif self._checkcase:
1384 elif self._checkcase:
1343 # Case-insensitive filesystems are not handled yet
1385 # Case-insensitive filesystems are not handled yet
1344 use_rust = False
1386 use_rust = False
1345 elif subrepos:
1387 elif subrepos:
1346 use_rust = False
1388 use_rust = False
1347 elif not isinstance(match, allowed_matchers):
1389 elif not isinstance(match, allowed_matchers):
1348 # Some matchers have yet to be implemented
1390 # Some matchers have yet to be implemented
1349 use_rust = False
1391 use_rust = False
1350
1392
1351 # Get the time from the filesystem so we can disambiguate files that
1393 # Get the time from the filesystem so we can disambiguate files that
1352 # appear modified in the present or future.
1394 # appear modified in the present or future.
1353 try:
1395 try:
1354 mtime_boundary = timestamp.get_fs_now(self._opener)
1396 mtime_boundary = timestamp.get_fs_now(self._opener)
1355 except OSError:
1397 except OSError:
1356 # In largefiles or readonly context
1398 # In largefiles or readonly context
1357 mtime_boundary = None
1399 mtime_boundary = None
1358
1400
1359 if use_rust:
1401 if use_rust:
1360 try:
1402 try:
1361 res = self._rust_status(
1403 res = self._rust_status(
1362 match, listclean, listignored, listunknown
1404 match, listclean, listignored, listunknown
1363 )
1405 )
1364 return res + (mtime_boundary,)
1406 return res + (mtime_boundary,)
1365 except rustmod.FallbackError:
1407 except rustmod.FallbackError:
1366 pass
1408 pass
1367
1409
1368 def noop(f):
1410 def noop(f):
1369 pass
1411 pass
1370
1412
1371 dcontains = dmap.__contains__
1413 dcontains = dmap.__contains__
1372 dget = dmap.__getitem__
1414 dget = dmap.__getitem__
1373 ladd = lookup.append # aka "unsure"
1415 ladd = lookup.append # aka "unsure"
1374 madd = modified.append
1416 madd = modified.append
1375 aadd = added.append
1417 aadd = added.append
1376 uadd = unknown.append if listunknown else noop
1418 uadd = unknown.append if listunknown else noop
1377 iadd = ignored.append if listignored else noop
1419 iadd = ignored.append if listignored else noop
1378 radd = removed.append
1420 radd = removed.append
1379 dadd = deleted.append
1421 dadd = deleted.append
1380 cadd = clean.append if listclean else noop
1422 cadd = clean.append if listclean else noop
1381 mexact = match.exact
1423 mexact = match.exact
1382 dirignore = self._dirignore
1424 dirignore = self._dirignore
1383 checkexec = self._checkexec
1425 checkexec = self._checkexec
1384 checklink = self._checklink
1426 checklink = self._checklink
1385 copymap = self._map.copymap
1427 copymap = self._map.copymap
1386
1428
1387 # We need to do full walks when either
1429 # We need to do full walks when either
1388 # - we're listing all clean files, or
1430 # - we're listing all clean files, or
1389 # - match.traversedir does something, because match.traversedir should
1431 # - match.traversedir does something, because match.traversedir should
1390 # be called for every dir in the working dir
1432 # be called for every dir in the working dir
1391 full = listclean or match.traversedir is not None
1433 full = listclean or match.traversedir is not None
1392 for fn, st in self.walk(
1434 for fn, st in self.walk(
1393 match, subrepos, listunknown, listignored, full=full
1435 match, subrepos, listunknown, listignored, full=full
1394 ).items():
1436 ).items():
1395 if not dcontains(fn):
1437 if not dcontains(fn):
1396 if (listignored or mexact(fn)) and dirignore(fn):
1438 if (listignored or mexact(fn)) and dirignore(fn):
1397 if listignored:
1439 if listignored:
1398 iadd(fn)
1440 iadd(fn)
1399 else:
1441 else:
1400 uadd(fn)
1442 uadd(fn)
1401 continue
1443 continue
1402
1444
1403 t = dget(fn)
1445 t = dget(fn)
1404 mode = t.mode
1446 mode = t.mode
1405 size = t.size
1447 size = t.size
1406
1448
1407 if not st and t.tracked:
1449 if not st and t.tracked:
1408 dadd(fn)
1450 dadd(fn)
1409 elif t.p2_info:
1451 elif t.p2_info:
1410 madd(fn)
1452 madd(fn)
1411 elif t.added:
1453 elif t.added:
1412 aadd(fn)
1454 aadd(fn)
1413 elif t.removed:
1455 elif t.removed:
1414 radd(fn)
1456 radd(fn)
1415 elif t.tracked:
1457 elif t.tracked:
1416 if not checklink and t.has_fallback_symlink:
1458 if not checklink and t.has_fallback_symlink:
1417 # If the file system does not support symlink, the mode
1459 # If the file system does not support symlink, the mode
1418 # might not be correctly stored in the dirstate, so do not
1460 # might not be correctly stored in the dirstate, so do not
1419 # trust it.
1461 # trust it.
1420 ladd(fn)
1462 ladd(fn)
1421 elif not checkexec and t.has_fallback_exec:
1463 elif not checkexec and t.has_fallback_exec:
1422 # If the file system does not support exec bits, the mode
1464 # If the file system does not support exec bits, the mode
1423 # might not be correctly stored in the dirstate, so do not
1465 # might not be correctly stored in the dirstate, so do not
1424 # trust it.
1466 # trust it.
1425 ladd(fn)
1467 ladd(fn)
1426 elif (
1468 elif (
1427 size >= 0
1469 size >= 0
1428 and (
1470 and (
1429 (size != st.st_size and size != st.st_size & _rangemask)
1471 (size != st.st_size and size != st.st_size & _rangemask)
1430 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1472 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1431 )
1473 )
1432 or fn in copymap
1474 or fn in copymap
1433 ):
1475 ):
1434 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1476 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1435 # issue6456: Size returned may be longer due to
1477 # issue6456: Size returned may be longer due to
1436 # encryption on EXT-4 fscrypt, undecided.
1478 # encryption on EXT-4 fscrypt, undecided.
1437 ladd(fn)
1479 ladd(fn)
1438 else:
1480 else:
1439 madd(fn)
1481 madd(fn)
1440 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1482 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1441 # There might be a change in the future if for example the
1483 # There might be a change in the future if for example the
1442 # internal clock is off, but this is a case where the issues
1484 # internal clock is off, but this is a case where the issues
1443 # the user would face would be a lot worse and there is
1485 # the user would face would be a lot worse and there is
1444 # nothing we can really do.
1486 # nothing we can really do.
1445 ladd(fn)
1487 ladd(fn)
1446 elif listclean:
1488 elif listclean:
1447 cadd(fn)
1489 cadd(fn)
1448 status = scmutil.status(
1490 status = scmutil.status(
1449 modified, added, removed, deleted, unknown, ignored, clean
1491 modified, added, removed, deleted, unknown, ignored, clean
1450 )
1492 )
1451 return (lookup, status, mtime_boundary)
1493 return (lookup, status, mtime_boundary)
1452
1494
1453 def matches(self, match):
1495 def matches(self, match):
1454 """
1496 """
1455 return files in the dirstate (in whatever state) filtered by match
1497 return files in the dirstate (in whatever state) filtered by match
1456 """
1498 """
1457 dmap = self._map
1499 dmap = self._map
1458 if rustmod is not None:
1500 if rustmod is not None:
1459 dmap = self._map._map
1501 dmap = self._map._map
1460
1502
1461 if match.always():
1503 if match.always():
1462 return dmap.keys()
1504 return dmap.keys()
1463 files = match.files()
1505 files = match.files()
1464 if match.isexact():
1506 if match.isexact():
1465 # fast path -- filter the other way around, since typically files is
1507 # fast path -- filter the other way around, since typically files is
1466 # much smaller than dmap
1508 # much smaller than dmap
1467 return [f for f in files if f in dmap]
1509 return [f for f in files if f in dmap]
1468 if match.prefix() and all(fn in dmap for fn in files):
1510 if match.prefix() and all(fn in dmap for fn in files):
1469 # fast path -- all the values are known to be files, so just return
1511 # fast path -- all the values are known to be files, so just return
1470 # that
1512 # that
1471 return list(files)
1513 return list(files)
1472 return [f for f in dmap if match(f)]
1514 return [f for f in dmap if match(f)]
1473
1515
1474 def _actualfilename(self, tr):
1516 def _actualfilename(self, tr):
1475 if tr:
1517 if tr:
1476 return self._pendingfilename
1518 return self._pendingfilename
1477 else:
1519 else:
1478 return self._filename
1520 return self._filename
1479
1521
1480 def data_backup_filename(self, backupname):
1522 def data_backup_filename(self, backupname):
1481 if not self._use_dirstate_v2:
1523 if not self._use_dirstate_v2:
1482 return None
1524 return None
1483 return backupname + b'.v2-data'
1525 return backupname + b'.v2-data'
1484
1526
1485 def _new_backup_data_filename(self, backupname):
1527 def _new_backup_data_filename(self, backupname):
1486 """return a filename to backup a data-file or None"""
1528 """return a filename to backup a data-file or None"""
1487 if not self._use_dirstate_v2:
1529 if not self._use_dirstate_v2:
1488 return None
1530 return None
1489 if self._map.docket.uuid is None:
1531 if self._map.docket.uuid is None:
1490 # not created yet, nothing to backup
1532 # not created yet, nothing to backup
1491 return None
1533 return None
1492 data_filename = self._map.docket.data_filename()
1534 data_filename = self._map.docket.data_filename()
1493 return data_filename, self.data_backup_filename(backupname)
1535 return data_filename, self.data_backup_filename(backupname)
1494
1536
1495 def backup_data_file(self, backupname):
1537 def backup_data_file(self, backupname):
1496 if not self._use_dirstate_v2:
1538 if not self._use_dirstate_v2:
1497 return None
1539 return None
1498 docket = docketmod.DirstateDocket.parse(
1540 docket = docketmod.DirstateDocket.parse(
1499 self._opener.read(backupname),
1541 self._opener.read(backupname),
1500 self._nodeconstants,
1542 self._nodeconstants,
1501 )
1543 )
1502 return self.data_backup_filename(backupname), docket.data_filename()
1544 return self.data_backup_filename(backupname), docket.data_filename()
1503
1545
1504 def savebackup(self, tr, backupname):
1546 def savebackup(self, tr, backupname):
1505 '''Save current dirstate into backup file'''
1547 '''Save current dirstate into backup file'''
1506 filename = self._actualfilename(tr)
1548 filename = self._actualfilename(tr)
1507 assert backupname != filename
1549 assert backupname != filename
1508
1550
1509 # use '_writedirstate' instead of 'write' to write changes certainly,
1551 # use '_writedirstate' instead of 'write' to write changes certainly,
1510 # because the latter omits writing out if transaction is running.
1552 # because the latter omits writing out if transaction is running.
1511 # output file will be used to create backup of dirstate at this point.
1553 # output file will be used to create backup of dirstate at this point.
1512 if self._dirty:
1554 if self._dirty:
1513 self._writedirstate(
1555 self._writedirstate(
1514 tr,
1556 tr,
1515 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1557 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1516 )
1558 )
1517
1559
1518 if tr:
1560 if tr:
1519 # ensure that subsequent tr.writepending returns True for
1561 # ensure that subsequent tr.writepending returns True for
1520 # changes written out above, even if dirstate is never
1562 # changes written out above, even if dirstate is never
1521 # changed after this
1563 # changed after this
1522 tr.addfilegenerator(
1564 tr.addfilegenerator(
1523 b'dirstate-1-main',
1565 b'dirstate-1-main',
1524 (self._filename,),
1566 (self._filename,),
1525 lambda f: self._writedirstate(tr, f),
1567 lambda f: self._writedirstate(tr, f),
1526 location=b'plain',
1568 location=b'plain',
1527 post_finalize=True,
1569 post_finalize=True,
1528 )
1570 )
1529
1571
1530 self._opener.tryunlink(backupname)
1572 self._opener.tryunlink(backupname)
1531 if self._opener.exists(filename):
1573 if self._opener.exists(filename):
1532 # hardlink backup is okay because _writedirstate is always called
1574 # hardlink backup is okay because _writedirstate is always called
1533 # with an "atomictemp=True" file.
1575 # with an "atomictemp=True" file.
1534 util.copyfile(
1576 util.copyfile(
1535 self._opener.join(filename),
1577 self._opener.join(filename),
1536 self._opener.join(backupname),
1578 self._opener.join(backupname),
1537 hardlink=True,
1579 hardlink=True,
1538 )
1580 )
1539 data_pair = self._new_backup_data_filename(backupname)
1581 data_pair = self._new_backup_data_filename(backupname)
1540 if data_pair is not None:
1582 if data_pair is not None:
1541 data_filename, bck_data_filename = data_pair
1583 data_filename, bck_data_filename = data_pair
1542 util.copyfile(
1584 util.copyfile(
1543 self._opener.join(data_filename),
1585 self._opener.join(data_filename),
1544 self._opener.join(bck_data_filename),
1586 self._opener.join(bck_data_filename),
1545 hardlink=True,
1587 hardlink=True,
1546 )
1588 )
1547 if tr is not None:
1589 if tr is not None:
1548 # ensure that pending file written above is unlinked at
1590 # ensure that pending file written above is unlinked at
1549 # failure, even if tr.writepending isn't invoked until the
1591 # failure, even if tr.writepending isn't invoked until the
1550 # end of this transaction
1592 # end of this transaction
1551 tr.registertmp(bck_data_filename, location=b'plain')
1593 tr.registertmp(bck_data_filename, location=b'plain')
1552
1594
1553 def restorebackup(self, tr, backupname):
1595 def restorebackup(self, tr, backupname):
1554 '''Restore dirstate by backup file'''
1596 '''Restore dirstate by backup file'''
1555 # this "invalidate()" prevents "wlock.release()" from writing
1597 # this "invalidate()" prevents "wlock.release()" from writing
1556 # changes of dirstate out after restoring from backup file
1598 # changes of dirstate out after restoring from backup file
1557 self.invalidate()
1599 self.invalidate()
1558 o = self._opener
1600 o = self._opener
1559 if not o.exists(backupname):
1601 if not o.exists(backupname):
1560 # there was no file backup, delete existing files
1602 # there was no file backup, delete existing files
1561 filename = self._actualfilename(tr)
1603 filename = self._actualfilename(tr)
1562 data_file = None
1604 data_file = None
1563 if self._use_dirstate_v2:
1605 if self._use_dirstate_v2:
1564 data_file = self._map.docket.data_filename()
1606 data_file = self._map.docket.data_filename()
1565 if o.exists(filename):
1607 if o.exists(filename):
1566 o.unlink(filename)
1608 o.unlink(filename)
1567 if data_file is not None and o.exists(data_file):
1609 if data_file is not None and o.exists(data_file):
1568 o.unlink(data_file)
1610 o.unlink(data_file)
1569 return
1611 return
1570 filename = self._actualfilename(tr)
1612 filename = self._actualfilename(tr)
1571 data_pair = self.backup_data_file(backupname)
1613 data_pair = self.backup_data_file(backupname)
1572 if o.exists(filename) and util.samefile(
1614 if o.exists(filename) and util.samefile(
1573 o.join(backupname), o.join(filename)
1615 o.join(backupname), o.join(filename)
1574 ):
1616 ):
1575 o.unlink(backupname)
1617 o.unlink(backupname)
1576 else:
1618 else:
1577 o.rename(backupname, filename, checkambig=True)
1619 o.rename(backupname, filename, checkambig=True)
1578
1620
1579 if data_pair is not None:
1621 if data_pair is not None:
1580 data_backup, target = data_pair
1622 data_backup, target = data_pair
1581 if o.exists(target) and util.samefile(
1623 if o.exists(target) and util.samefile(
1582 o.join(data_backup), o.join(target)
1624 o.join(data_backup), o.join(target)
1583 ):
1625 ):
1584 o.unlink(data_backup)
1626 o.unlink(data_backup)
1585 else:
1627 else:
1586 o.rename(data_backup, target, checkambig=True)
1628 o.rename(data_backup, target, checkambig=True)
1587
1629
1588 def clearbackup(self, tr, backupname):
1630 def clearbackup(self, tr, backupname):
1589 '''Clear backup file'''
1631 '''Clear backup file'''
1590 o = self._opener
1632 o = self._opener
1591 if o.exists(backupname):
1633 if o.exists(backupname):
1592 data_backup = self.backup_data_file(backupname)
1634 data_backup = self.backup_data_file(backupname)
1593 o.unlink(backupname)
1635 o.unlink(backupname)
1594 if data_backup is not None:
1636 if data_backup is not None:
1595 o.unlink(data_backup[0])
1637 o.unlink(data_backup[0])
1596
1638
1597 def verify(self, m1, m2, p1, narrow_matcher=None):
1639 def verify(self, m1, m2, p1, narrow_matcher=None):
1598 """
1640 """
1599 check the dirstate contents against the parent manifest and yield errors
1641 check the dirstate contents against the parent manifest and yield errors
1600 """
1642 """
1601 missing_from_p1 = _(
1643 missing_from_p1 = _(
1602 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1644 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1603 )
1645 )
1604 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1646 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1605 missing_from_ps = _(
1647 missing_from_ps = _(
1606 b"%s marked as modified, but not in either manifest\n"
1648 b"%s marked as modified, but not in either manifest\n"
1607 )
1649 )
1608 missing_from_ds = _(
1650 missing_from_ds = _(
1609 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1651 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1610 )
1652 )
1611 for f, entry in self.items():
1653 for f, entry in self.items():
1612 if entry.p1_tracked:
1654 if entry.p1_tracked:
1613 if entry.modified and f not in m1 and f not in m2:
1655 if entry.modified and f not in m1 and f not in m2:
1614 yield missing_from_ps % f
1656 yield missing_from_ps % f
1615 elif f not in m1:
1657 elif f not in m1:
1616 yield missing_from_p1 % (f, node.short(p1))
1658 yield missing_from_p1 % (f, node.short(p1))
1617 if entry.added and f in m1:
1659 if entry.added and f in m1:
1618 yield unexpected_in_p1 % f
1660 yield unexpected_in_p1 % f
1619 for f in m1:
1661 for f in m1:
1620 if narrow_matcher is not None and not narrow_matcher(f):
1662 if narrow_matcher is not None and not narrow_matcher(f):
1621 continue
1663 continue
1622 entry = self.get_entry(f)
1664 entry = self.get_entry(f)
1623 if not entry.p1_tracked:
1665 if not entry.p1_tracked:
1624 yield missing_from_ds % (f, node.short(p1))
1666 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now