##// END OF EJS Templates
dirstate: rename _parentwriters to _changing_level...
marmoute -
r50916:4e955a7a default
parent child Browse files
Show More
@@ -1,1666 +1,1665 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_changing_parents(func):
69 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.pendingparentchange():
71 if not self.pendingparentchange():
72 msg = 'calling `%s` outside of a changing_parents context'
72 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
77 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
79
79
80 return wrap
80 return wrap
81
81
82
82
83 def requires_not_changing_parents(func):
83 def requires_not_changing_parents(func):
84 def wrap(self, *args, **kwargs):
84 def wrap(self, *args, **kwargs):
85 if self.pendingparentchange():
85 if self.pendingparentchange():
86 msg = 'calling `%s` inside of a changing_parents context'
86 msg = 'calling `%s` inside of a changing_parents context'
87 msg %= func.__name__
87 msg %= func.__name__
88 raise error.ProgrammingError(msg)
88 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
89 return func(self, *args, **kwargs)
90
90
91 return wrap
91 return wrap
92
92
93
93
94 @interfaceutil.implementer(intdirstate.idirstate)
94 @interfaceutil.implementer(intdirstate.idirstate)
95 class dirstate:
95 class dirstate:
96 def __init__(
96 def __init__(
97 self,
97 self,
98 opener,
98 opener,
99 ui,
99 ui,
100 root,
100 root,
101 validate,
101 validate,
102 sparsematchfn,
102 sparsematchfn,
103 nodeconstants,
103 nodeconstants,
104 use_dirstate_v2,
104 use_dirstate_v2,
105 use_tracked_hint=False,
105 use_tracked_hint=False,
106 ):
106 ):
107 """Create a new dirstate object.
107 """Create a new dirstate object.
108
108
109 opener is an open()-like callable that can be used to open the
109 opener is an open()-like callable that can be used to open the
110 dirstate file; root is the root of the directory tracked by
110 dirstate file; root is the root of the directory tracked by
111 the dirstate.
111 the dirstate.
112 """
112 """
113 self._use_dirstate_v2 = use_dirstate_v2
113 self._use_dirstate_v2 = use_dirstate_v2
114 self._use_tracked_hint = use_tracked_hint
114 self._use_tracked_hint = use_tracked_hint
115 self._nodeconstants = nodeconstants
115 self._nodeconstants = nodeconstants
116 self._opener = opener
116 self._opener = opener
117 self._validate = validate
117 self._validate = validate
118 self._root = root
118 self._root = root
119 # Either build a sparse-matcher or None if sparse is disabled
119 # Either build a sparse-matcher or None if sparse is disabled
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 # True is any internal state may be different
124 # True is any internal state may be different
125 self._dirty = False
125 self._dirty = False
126 # True if the set of tracked file may be different
126 # True if the set of tracked file may be different
127 self._dirty_tracked_set = False
127 self._dirty_tracked_set = False
128 self._ui = ui
128 self._ui = ui
129 self._filecache = {}
129 self._filecache = {}
130 # nesting level of `changing_parents` context
130 # nesting level of `changing_parents` context
131 self._parentwriters = 0
131 self._changing_level = 0
132 # True if the current dirstate changing operations have been
132 # True if the current dirstate changing operations have been
133 # invalidated (used to make sure all nested contexts have been exited)
133 # invalidated (used to make sure all nested contexts have been exited)
134 self._invalidated_context = False
134 self._invalidated_context = False
135 self._filename = b'dirstate'
135 self._filename = b'dirstate'
136 self._filename_th = b'dirstate-tracked-hint'
136 self._filename_th = b'dirstate-tracked-hint'
137 self._pendingfilename = b'%s.pending' % self._filename
137 self._pendingfilename = b'%s.pending' % self._filename
138 self._plchangecallbacks = {}
138 self._plchangecallbacks = {}
139 self._origpl = None
139 self._origpl = None
140 self._mapcls = dirstatemap.dirstatemap
140 self._mapcls = dirstatemap.dirstatemap
141 # Access and cache cwd early, so we don't access it for the first time
141 # Access and cache cwd early, so we don't access it for the first time
142 # after a working-copy update caused it to not exist (accessing it then
142 # after a working-copy update caused it to not exist (accessing it then
143 # raises an exception).
143 # raises an exception).
144 self._cwd
144 self._cwd
145
145
146 def prefetch_parents(self):
146 def prefetch_parents(self):
147 """make sure the parents are loaded
147 """make sure the parents are loaded
148
148
149 Used to avoid a race condition.
149 Used to avoid a race condition.
150 """
150 """
151 self._pl
151 self._pl
152
152
153 @contextlib.contextmanager
153 @contextlib.contextmanager
154 def changing_parents(self, repo):
154 def changing_parents(self, repo):
155 """Context manager for handling dirstate parents.
155 """Context manager for handling dirstate parents.
156
156
157 If an exception occurs in the scope of the context manager,
157 If an exception occurs in the scope of the context manager,
158 the incoherent dirstate won't be written when wlock is
158 the incoherent dirstate won't be written when wlock is
159 released.
159 released.
160 """
160 """
161 if repo.currentwlock() is None:
161 if repo.currentwlock() is None:
162 msg = b"changing parents without holding the wlock"
162 msg = b"changing parents without holding the wlock"
163 raise error.ProgrammingError(msg)
163 raise error.ProgrammingError(msg)
164 if self._invalidated_context:
164 if self._invalidated_context:
165 msg = "trying to use an invalidated dirstate before it has reset"
165 msg = "trying to use an invalidated dirstate before it has reset"
166 raise error.ProgrammingError(msg)
166 raise error.ProgrammingError(msg)
167 self._parentwriters += 1
167 self._changing_level += 1
168 try:
168 try:
169 yield
169 yield
170 except Exception:
170 except Exception:
171 self.invalidate()
171 self.invalidate()
172 raise
172 raise
173 finally:
173 finally:
174 if self._parentwriters > 0:
174 if self._changing_level > 0:
175 if self._invalidated_context:
175 if self._invalidated_context:
176 # make sure we invalidate anything an upper context might
176 # make sure we invalidate anything an upper context might
177 # have changed.
177 # have changed.
178 self.invalidate()
178 self.invalidate()
179 self._parentwriters -= 1
179 self._changing_level -= 1
180 # The invalidation is complete once we exit the final context
180 # The invalidation is complete once we exit the final context
181 # manager
181 # manager
182 if self._parentwriters <= 0:
182 if self._changing_level <= 0:
183 assert self._parentwriters == 0
183 assert self._changing_level == 0
184 if self._invalidated_context:
184 if self._invalidated_context:
185 self._invalidated_context = False
185 self._invalidated_context = False
186 else:
186 else:
187 # When an exception occured, `_invalidated_context`
187 # When an exception occured, `_invalidated_context`
188 # would have been set to True by the `invalidate`
188 # would have been set to True by the `invalidate`
189 # call earlier.
189 # call earlier.
190 #
190 #
191 # We don't have more straightforward code, because the
191 # We don't have more straightforward code, because the
192 # Exception catching (and the associated `invalidate`
192 # Exception catching (and the associated `invalidate`
193 # calling) might have been called by a nested context
193 # calling) might have been called by a nested context
194 # instead of the top level one.
194 # instead of the top level one.
195 self.write(repo.currenttransaction())
195 self.write(repo.currenttransaction())
196
196
197 # here to help migration to the new code
197 # here to help migration to the new code
198 def parentchange(self):
198 def parentchange(self):
199 msg = (
199 msg = (
200 "Mercurial 6.4 and later requires call to "
200 "Mercurial 6.4 and later requires call to "
201 "`dirstate.changing_parents(repo)`"
201 "`dirstate.changing_parents(repo)`"
202 )
202 )
203 raise error.ProgrammingError(msg)
203 raise error.ProgrammingError(msg)
204
204
205 def pendingparentchange(self):
205 def pendingparentchange(self):
206 """Returns true if the dirstate is in the middle of a set of changes
206 """Returns true if the dirstate is in the middle of a set of changes
207 that modify the dirstate parent.
207 that modify the dirstate parent.
208 """
208 """
209 return self._parentwriters > 0
209 return self._changing_level > 0
210
210
211 @propertycache
211 @propertycache
212 def _map(self):
212 def _map(self):
213 """Return the dirstate contents (see documentation for dirstatemap)."""
213 """Return the dirstate contents (see documentation for dirstatemap)."""
214 self._map = self._mapcls(
214 self._map = self._mapcls(
215 self._ui,
215 self._ui,
216 self._opener,
216 self._opener,
217 self._root,
217 self._root,
218 self._nodeconstants,
218 self._nodeconstants,
219 self._use_dirstate_v2,
219 self._use_dirstate_v2,
220 )
220 )
221 return self._map
221 return self._map
222
222
223 @property
223 @property
224 def _sparsematcher(self):
224 def _sparsematcher(self):
225 """The matcher for the sparse checkout.
225 """The matcher for the sparse checkout.
226
226
227 The working directory may not include every file from a manifest. The
227 The working directory may not include every file from a manifest. The
228 matcher obtained by this property will match a path if it is to be
228 matcher obtained by this property will match a path if it is to be
229 included in the working directory.
229 included in the working directory.
230
230
231 When sparse if disabled, return None.
231 When sparse if disabled, return None.
232 """
232 """
233 if self._sparsematchfn is None:
233 if self._sparsematchfn is None:
234 return None
234 return None
235 # TODO there is potential to cache this property. For now, the matcher
235 # TODO there is potential to cache this property. For now, the matcher
236 # is resolved on every access. (But the called function does use a
236 # is resolved on every access. (But the called function does use a
237 # cache to keep the lookup fast.)
237 # cache to keep the lookup fast.)
238 return self._sparsematchfn()
238 return self._sparsematchfn()
239
239
240 @repocache(b'branch')
240 @repocache(b'branch')
241 def _branch(self):
241 def _branch(self):
242 try:
242 try:
243 return self._opener.read(b"branch").strip() or b"default"
243 return self._opener.read(b"branch").strip() or b"default"
244 except FileNotFoundError:
244 except FileNotFoundError:
245 return b"default"
245 return b"default"
246
246
247 @property
247 @property
248 def _pl(self):
248 def _pl(self):
249 return self._map.parents()
249 return self._map.parents()
250
250
251 def hasdir(self, d):
251 def hasdir(self, d):
252 return self._map.hastrackeddir(d)
252 return self._map.hastrackeddir(d)
253
253
254 @rootcache(b'.hgignore')
254 @rootcache(b'.hgignore')
255 def _ignore(self):
255 def _ignore(self):
256 files = self._ignorefiles()
256 files = self._ignorefiles()
257 if not files:
257 if not files:
258 return matchmod.never()
258 return matchmod.never()
259
259
260 pats = [b'include:%s' % f for f in files]
260 pats = [b'include:%s' % f for f in files]
261 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
261 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
262
262
263 @propertycache
263 @propertycache
264 def _slash(self):
264 def _slash(self):
265 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
265 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
266
266
267 @propertycache
267 @propertycache
268 def _checklink(self):
268 def _checklink(self):
269 return util.checklink(self._root)
269 return util.checklink(self._root)
270
270
271 @propertycache
271 @propertycache
272 def _checkexec(self):
272 def _checkexec(self):
273 return bool(util.checkexec(self._root))
273 return bool(util.checkexec(self._root))
274
274
275 @propertycache
275 @propertycache
276 def _checkcase(self):
276 def _checkcase(self):
277 return not util.fscasesensitive(self._join(b'.hg'))
277 return not util.fscasesensitive(self._join(b'.hg'))
278
278
279 def _join(self, f):
279 def _join(self, f):
280 # much faster than os.path.join()
280 # much faster than os.path.join()
281 # it's safe because f is always a relative path
281 # it's safe because f is always a relative path
282 return self._rootdir + f
282 return self._rootdir + f
283
283
284 def flagfunc(self, buildfallback):
284 def flagfunc(self, buildfallback):
285 """build a callable that returns flags associated with a filename
285 """build a callable that returns flags associated with a filename
286
286
287 The information is extracted from three possible layers:
287 The information is extracted from three possible layers:
288 1. the file system if it supports the information
288 1. the file system if it supports the information
289 2. the "fallback" information stored in the dirstate if any
289 2. the "fallback" information stored in the dirstate if any
290 3. a more expensive mechanism inferring the flags from the parents.
290 3. a more expensive mechanism inferring the flags from the parents.
291 """
291 """
292
292
293 # small hack to cache the result of buildfallback()
293 # small hack to cache the result of buildfallback()
294 fallback_func = []
294 fallback_func = []
295
295
296 def get_flags(x):
296 def get_flags(x):
297 entry = None
297 entry = None
298 fallback_value = None
298 fallback_value = None
299 try:
299 try:
300 st = os.lstat(self._join(x))
300 st = os.lstat(self._join(x))
301 except OSError:
301 except OSError:
302 return b''
302 return b''
303
303
304 if self._checklink:
304 if self._checklink:
305 if util.statislink(st):
305 if util.statislink(st):
306 return b'l'
306 return b'l'
307 else:
307 else:
308 entry = self.get_entry(x)
308 entry = self.get_entry(x)
309 if entry.has_fallback_symlink:
309 if entry.has_fallback_symlink:
310 if entry.fallback_symlink:
310 if entry.fallback_symlink:
311 return b'l'
311 return b'l'
312 else:
312 else:
313 if not fallback_func:
313 if not fallback_func:
314 fallback_func.append(buildfallback())
314 fallback_func.append(buildfallback())
315 fallback_value = fallback_func[0](x)
315 fallback_value = fallback_func[0](x)
316 if b'l' in fallback_value:
316 if b'l' in fallback_value:
317 return b'l'
317 return b'l'
318
318
319 if self._checkexec:
319 if self._checkexec:
320 if util.statisexec(st):
320 if util.statisexec(st):
321 return b'x'
321 return b'x'
322 else:
322 else:
323 if entry is None:
323 if entry is None:
324 entry = self.get_entry(x)
324 entry = self.get_entry(x)
325 if entry.has_fallback_exec:
325 if entry.has_fallback_exec:
326 if entry.fallback_exec:
326 if entry.fallback_exec:
327 return b'x'
327 return b'x'
328 else:
328 else:
329 if fallback_value is None:
329 if fallback_value is None:
330 if not fallback_func:
330 if not fallback_func:
331 fallback_func.append(buildfallback())
331 fallback_func.append(buildfallback())
332 fallback_value = fallback_func[0](x)
332 fallback_value = fallback_func[0](x)
333 if b'x' in fallback_value:
333 if b'x' in fallback_value:
334 return b'x'
334 return b'x'
335 return b''
335 return b''
336
336
337 return get_flags
337 return get_flags
338
338
339 @propertycache
339 @propertycache
340 def _cwd(self):
340 def _cwd(self):
341 # internal config: ui.forcecwd
341 # internal config: ui.forcecwd
342 forcecwd = self._ui.config(b'ui', b'forcecwd')
342 forcecwd = self._ui.config(b'ui', b'forcecwd')
343 if forcecwd:
343 if forcecwd:
344 return forcecwd
344 return forcecwd
345 return encoding.getcwd()
345 return encoding.getcwd()
346
346
347 def getcwd(self):
347 def getcwd(self):
348 """Return the path from which a canonical path is calculated.
348 """Return the path from which a canonical path is calculated.
349
349
350 This path should be used to resolve file patterns or to convert
350 This path should be used to resolve file patterns or to convert
351 canonical paths back to file paths for display. It shouldn't be
351 canonical paths back to file paths for display. It shouldn't be
352 used to get real file paths. Use vfs functions instead.
352 used to get real file paths. Use vfs functions instead.
353 """
353 """
354 cwd = self._cwd
354 cwd = self._cwd
355 if cwd == self._root:
355 if cwd == self._root:
356 return b''
356 return b''
357 # self._root ends with a path separator if self._root is '/' or 'C:\'
357 # self._root ends with a path separator if self._root is '/' or 'C:\'
358 rootsep = self._root
358 rootsep = self._root
359 if not util.endswithsep(rootsep):
359 if not util.endswithsep(rootsep):
360 rootsep += pycompat.ossep
360 rootsep += pycompat.ossep
361 if cwd.startswith(rootsep):
361 if cwd.startswith(rootsep):
362 return cwd[len(rootsep) :]
362 return cwd[len(rootsep) :]
363 else:
363 else:
364 # we're outside the repo. return an absolute path.
364 # we're outside the repo. return an absolute path.
365 return cwd
365 return cwd
366
366
367 def pathto(self, f, cwd=None):
367 def pathto(self, f, cwd=None):
368 if cwd is None:
368 if cwd is None:
369 cwd = self.getcwd()
369 cwd = self.getcwd()
370 path = util.pathto(self._root, cwd, f)
370 path = util.pathto(self._root, cwd, f)
371 if self._slash:
371 if self._slash:
372 return util.pconvert(path)
372 return util.pconvert(path)
373 return path
373 return path
374
374
375 def get_entry(self, path):
375 def get_entry(self, path):
376 """return a DirstateItem for the associated path"""
376 """return a DirstateItem for the associated path"""
377 entry = self._map.get(path)
377 entry = self._map.get(path)
378 if entry is None:
378 if entry is None:
379 return DirstateItem()
379 return DirstateItem()
380 return entry
380 return entry
381
381
382 def __contains__(self, key):
382 def __contains__(self, key):
383 return key in self._map
383 return key in self._map
384
384
385 def __iter__(self):
385 def __iter__(self):
386 return iter(sorted(self._map))
386 return iter(sorted(self._map))
387
387
388 def items(self):
388 def items(self):
389 return self._map.items()
389 return self._map.items()
390
390
391 iteritems = items
391 iteritems = items
392
392
393 def parents(self):
393 def parents(self):
394 return [self._validate(p) for p in self._pl]
394 return [self._validate(p) for p in self._pl]
395
395
396 def p1(self):
396 def p1(self):
397 return self._validate(self._pl[0])
397 return self._validate(self._pl[0])
398
398
399 def p2(self):
399 def p2(self):
400 return self._validate(self._pl[1])
400 return self._validate(self._pl[1])
401
401
402 @property
402 @property
403 def in_merge(self):
403 def in_merge(self):
404 """True if a merge is in progress"""
404 """True if a merge is in progress"""
405 return self._pl[1] != self._nodeconstants.nullid
405 return self._pl[1] != self._nodeconstants.nullid
406
406
407 def branch(self):
407 def branch(self):
408 return encoding.tolocal(self._branch)
408 return encoding.tolocal(self._branch)
409
409
410 def setparents(self, p1, p2=None):
410 def setparents(self, p1, p2=None):
411 """Set dirstate parents to p1 and p2.
411 """Set dirstate parents to p1 and p2.
412
412
413 When moving from two parents to one, "merged" entries a
413 When moving from two parents to one, "merged" entries a
414 adjusted to normal and previous copy records discarded and
414 adjusted to normal and previous copy records discarded and
415 returned by the call.
415 returned by the call.
416
416
417 See localrepo.setparents()
417 See localrepo.setparents()
418 """
418 """
419 if p2 is None:
419 if p2 is None:
420 p2 = self._nodeconstants.nullid
420 p2 = self._nodeconstants.nullid
421 if self._parentwriters == 0:
421 if self._changing_level == 0:
422 raise ValueError(
422 raise ValueError(
423 b"cannot set dirstate parent outside of "
423 b"cannot set dirstate parent outside of "
424 b"dirstate.changing_parents context manager"
424 b"dirstate.changing_parents context manager"
425 )
425 )
426
426
427 self._dirty = True
427 self._dirty = True
428 oldp2 = self._pl[1]
428 oldp2 = self._pl[1]
429 if self._origpl is None:
429 if self._origpl is None:
430 self._origpl = self._pl
430 self._origpl = self._pl
431 nullid = self._nodeconstants.nullid
431 nullid = self._nodeconstants.nullid
432 # True if we need to fold p2 related state back to a linear case
432 # True if we need to fold p2 related state back to a linear case
433 fold_p2 = oldp2 != nullid and p2 == nullid
433 fold_p2 = oldp2 != nullid and p2 == nullid
434 return self._map.setparents(p1, p2, fold_p2=fold_p2)
434 return self._map.setparents(p1, p2, fold_p2=fold_p2)
435
435
436 def setbranch(self, branch):
436 def setbranch(self, branch):
437 self.__class__._branch.set(self, encoding.fromlocal(branch))
437 self.__class__._branch.set(self, encoding.fromlocal(branch))
438 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
438 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
439 try:
439 try:
440 f.write(self._branch + b'\n')
440 f.write(self._branch + b'\n')
441 f.close()
441 f.close()
442
442
443 # make sure filecache has the correct stat info for _branch after
443 # make sure filecache has the correct stat info for _branch after
444 # replacing the underlying file
444 # replacing the underlying file
445 ce = self._filecache[b'_branch']
445 ce = self._filecache[b'_branch']
446 if ce:
446 if ce:
447 ce.refresh()
447 ce.refresh()
448 except: # re-raises
448 except: # re-raises
449 f.discard()
449 f.discard()
450 raise
450 raise
451
451
452 def invalidate(self):
452 def invalidate(self):
453 """Causes the next access to reread the dirstate.
453 """Causes the next access to reread the dirstate.
454
454
455 This is different from localrepo.invalidatedirstate() because it always
455 This is different from localrepo.invalidatedirstate() because it always
456 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
456 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
457 check whether the dirstate has changed before rereading it."""
457 check whether the dirstate has changed before rereading it."""
458
458
459 for a in ("_map", "_branch", "_ignore"):
459 for a in ("_map", "_branch", "_ignore"):
460 if a in self.__dict__:
460 if a in self.__dict__:
461 delattr(self, a)
461 delattr(self, a)
462 self._dirty = False
462 self._dirty = False
463 self._dirty_tracked_set = False
463 self._dirty_tracked_set = False
464 self._invalidated_context = self._parentwriters > 0
464 self._invalidated_context = self._changing_level > 0
465 self._origpl = None
465 self._origpl = None
466
466
467 def copy(self, source, dest):
467 def copy(self, source, dest):
468 """Mark dest as a copy of source. Unmark dest if source is None."""
468 """Mark dest as a copy of source. Unmark dest if source is None."""
469 if source == dest:
469 if source == dest:
470 return
470 return
471 self._dirty = True
471 self._dirty = True
472 if source is not None:
472 if source is not None:
473 self._check_sparse(source)
473 self._check_sparse(source)
474 self._map.copymap[dest] = source
474 self._map.copymap[dest] = source
475 else:
475 else:
476 self._map.copymap.pop(dest, None)
476 self._map.copymap.pop(dest, None)
477
477
478 def copied(self, file):
478 def copied(self, file):
479 return self._map.copymap.get(file, None)
479 return self._map.copymap.get(file, None)
480
480
481 def copies(self):
481 def copies(self):
482 return self._map.copymap
482 return self._map.copymap
483
483
484 @requires_not_changing_parents
484 @requires_not_changing_parents
485 def set_tracked(self, filename, reset_copy=False):
485 def set_tracked(self, filename, reset_copy=False):
486 """a "public" method for generic code to mark a file as tracked
486 """a "public" method for generic code to mark a file as tracked
487
487
488 This function is to be called outside of "update/merge" case. For
488 This function is to be called outside of "update/merge" case. For
489 example by a command like `hg add X`.
489 example by a command like `hg add X`.
490
490
491 if reset_copy is set, any existing copy information will be dropped.
491 if reset_copy is set, any existing copy information will be dropped.
492
492
493 return True the file was previously untracked, False otherwise.
493 return True the file was previously untracked, False otherwise.
494 """
494 """
495 self._dirty = True
495 self._dirty = True
496 entry = self._map.get(filename)
496 entry = self._map.get(filename)
497 if entry is None or not entry.tracked:
497 if entry is None or not entry.tracked:
498 self._check_new_tracked_filename(filename)
498 self._check_new_tracked_filename(filename)
499 pre_tracked = self._map.set_tracked(filename)
499 pre_tracked = self._map.set_tracked(filename)
500 if reset_copy:
500 if reset_copy:
501 self._map.copymap.pop(filename, None)
501 self._map.copymap.pop(filename, None)
502 if pre_tracked:
502 if pre_tracked:
503 self._dirty_tracked_set = True
503 self._dirty_tracked_set = True
504 return pre_tracked
504 return pre_tracked
505
505
506 @requires_not_changing_parents
506 @requires_not_changing_parents
507 def set_untracked(self, filename):
507 def set_untracked(self, filename):
508 """a "public" method for generic code to mark a file as untracked
508 """a "public" method for generic code to mark a file as untracked
509
509
510 This function is to be called outside of "update/merge" case. For
510 This function is to be called outside of "update/merge" case. For
511 example by a command like `hg remove X`.
511 example by a command like `hg remove X`.
512
512
513 return True the file was previously tracked, False otherwise.
513 return True the file was previously tracked, False otherwise.
514 """
514 """
515 ret = self._map.set_untracked(filename)
515 ret = self._map.set_untracked(filename)
516 if ret:
516 if ret:
517 self._dirty = True
517 self._dirty = True
518 self._dirty_tracked_set = True
518 self._dirty_tracked_set = True
519 return ret
519 return ret
520
520
521 @requires_not_changing_parents
521 @requires_not_changing_parents
522 def set_clean(self, filename, parentfiledata):
522 def set_clean(self, filename, parentfiledata):
523 """record that the current state of the file on disk is known to be clean"""
523 """record that the current state of the file on disk is known to be clean"""
524 self._dirty = True
524 self._dirty = True
525 if not self._map[filename].tracked:
525 if not self._map[filename].tracked:
526 self._check_new_tracked_filename(filename)
526 self._check_new_tracked_filename(filename)
527 (mode, size, mtime) = parentfiledata
527 (mode, size, mtime) = parentfiledata
528 self._map.set_clean(filename, mode, size, mtime)
528 self._map.set_clean(filename, mode, size, mtime)
529
529
530 @requires_not_changing_parents
530 @requires_not_changing_parents
531 def set_possibly_dirty(self, filename):
531 def set_possibly_dirty(self, filename):
532 """record that the current state of the file on disk is unknown"""
532 """record that the current state of the file on disk is unknown"""
533 self._dirty = True
533 self._dirty = True
534 self._map.set_possibly_dirty(filename)
534 self._map.set_possibly_dirty(filename)
535
535
536 @requires_changing_parents
536 @requires_changing_parents
537 def update_file_p1(
537 def update_file_p1(
538 self,
538 self,
539 filename,
539 filename,
540 p1_tracked,
540 p1_tracked,
541 ):
541 ):
542 """Set a file as tracked in the parent (or not)
542 """Set a file as tracked in the parent (or not)
543
543
544 This is to be called when adjust the dirstate to a new parent after an history
544 This is to be called when adjust the dirstate to a new parent after an history
545 rewriting operation.
545 rewriting operation.
546
546
547 It should not be called during a merge (p2 != nullid) and only within
547 It should not be called during a merge (p2 != nullid) and only within
548 a `with dirstate.changing_parents(repo):` context.
548 a `with dirstate.changing_parents(repo):` context.
549 """
549 """
550 if self.in_merge:
550 if self.in_merge:
551 msg = b'update_file_reference should not be called when merging'
551 msg = b'update_file_reference should not be called when merging'
552 raise error.ProgrammingError(msg)
552 raise error.ProgrammingError(msg)
553 entry = self._map.get(filename)
553 entry = self._map.get(filename)
554 if entry is None:
554 if entry is None:
555 wc_tracked = False
555 wc_tracked = False
556 else:
556 else:
557 wc_tracked = entry.tracked
557 wc_tracked = entry.tracked
558 if not (p1_tracked or wc_tracked):
558 if not (p1_tracked or wc_tracked):
559 # the file is no longer relevant to anyone
559 # the file is no longer relevant to anyone
560 if self._map.get(filename) is not None:
560 if self._map.get(filename) is not None:
561 self._map.reset_state(filename)
561 self._map.reset_state(filename)
562 self._dirty = True
562 self._dirty = True
563 elif (not p1_tracked) and wc_tracked:
563 elif (not p1_tracked) and wc_tracked:
564 if entry is not None and entry.added:
564 if entry is not None and entry.added:
565 return # avoid dropping copy information (maybe?)
565 return # avoid dropping copy information (maybe?)
566
566
567 self._map.reset_state(
567 self._map.reset_state(
568 filename,
568 filename,
569 wc_tracked,
569 wc_tracked,
570 p1_tracked,
570 p1_tracked,
571 # the underlying reference might have changed, we will have to
571 # the underlying reference might have changed, we will have to
572 # check it.
572 # check it.
573 has_meaningful_mtime=False,
573 has_meaningful_mtime=False,
574 )
574 )
575
575
576 @requires_changing_parents
576 @requires_changing_parents
577 def update_file(
577 def update_file(
578 self,
578 self,
579 filename,
579 filename,
580 wc_tracked,
580 wc_tracked,
581 p1_tracked,
581 p1_tracked,
582 p2_info=False,
582 p2_info=False,
583 possibly_dirty=False,
583 possibly_dirty=False,
584 parentfiledata=None,
584 parentfiledata=None,
585 ):
585 ):
586 """update the information about a file in the dirstate
586 """update the information about a file in the dirstate
587
587
588 This is to be called when the direstates parent changes to keep track
588 This is to be called when the direstates parent changes to keep track
589 of what is the file situation in regards to the working copy and its parent.
589 of what is the file situation in regards to the working copy and its parent.
590
590
591 This function must be called within a `dirstate.changing_parents` context.
591 This function must be called within a `dirstate.changing_parents` context.
592
592
593 note: the API is at an early stage and we might need to adjust it
593 note: the API is at an early stage and we might need to adjust it
594 depending of what information ends up being relevant and useful to
594 depending of what information ends up being relevant and useful to
595 other processing.
595 other processing.
596 """
596 """
597 self._update_file(
597 self._update_file(
598 filename=filename,
598 filename=filename,
599 wc_tracked=wc_tracked,
599 wc_tracked=wc_tracked,
600 p1_tracked=p1_tracked,
600 p1_tracked=p1_tracked,
601 p2_info=p2_info,
601 p2_info=p2_info,
602 possibly_dirty=possibly_dirty,
602 possibly_dirty=possibly_dirty,
603 parentfiledata=parentfiledata,
603 parentfiledata=parentfiledata,
604 )
604 )
605
605
606 def hacky_extension_update_file(self, *args, **kwargs):
606 def hacky_extension_update_file(self, *args, **kwargs):
607 """NEVER USE THIS, YOU DO NOT NEED IT
607 """NEVER USE THIS, YOU DO NOT NEED IT
608
608
609 This function is a variant of "update_file" to be called by a small set
609 This function is a variant of "update_file" to be called by a small set
610 of extensions, it also adjust the internal state of file, but can be
610 of extensions, it also adjust the internal state of file, but can be
611 called outside an `changing_parents` context.
611 called outside an `changing_parents` context.
612
612
613 A very small number of extension meddle with the working copy content
613 A very small number of extension meddle with the working copy content
614 in a way that requires to adjust the dirstate accordingly. At the time
614 in a way that requires to adjust the dirstate accordingly. At the time
615 this command is written they are :
615 this command is written they are :
616 - keyword,
616 - keyword,
617 - largefile,
617 - largefile,
618 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
618 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
619
619
620 This function could probably be replaced by more semantic one (like
620 This function could probably be replaced by more semantic one (like
621 "adjust expected size" or "always revalidate file content", etc)
621 "adjust expected size" or "always revalidate file content", etc)
622 however at the time where this is writen, this is too much of a detour
622 however at the time where this is writen, this is too much of a detour
623 to be considered.
623 to be considered.
624 """
624 """
625 self._update_file(
625 self._update_file(
626 *args,
626 *args,
627 **kwargs,
627 **kwargs,
628 )
628 )
629
629
630 def _update_file(
630 def _update_file(
631 self,
631 self,
632 filename,
632 filename,
633 wc_tracked,
633 wc_tracked,
634 p1_tracked,
634 p1_tracked,
635 p2_info=False,
635 p2_info=False,
636 possibly_dirty=False,
636 possibly_dirty=False,
637 parentfiledata=None,
637 parentfiledata=None,
638 ):
638 ):
639
639
640 # note: I do not think we need to double check name clash here since we
640 # note: I do not think we need to double check name clash here since we
641 # are in a update/merge case that should already have taken care of
641 # are in a update/merge case that should already have taken care of
642 # this. The test agrees
642 # this. The test agrees
643
643
644 self._dirty = True
644 self._dirty = True
645 old_entry = self._map.get(filename)
645 old_entry = self._map.get(filename)
646 if old_entry is None:
646 if old_entry is None:
647 prev_tracked = False
647 prev_tracked = False
648 else:
648 else:
649 prev_tracked = old_entry.tracked
649 prev_tracked = old_entry.tracked
650 if prev_tracked != wc_tracked:
650 if prev_tracked != wc_tracked:
651 self._dirty_tracked_set = True
651 self._dirty_tracked_set = True
652
652
653 self._map.reset_state(
653 self._map.reset_state(
654 filename,
654 filename,
655 wc_tracked,
655 wc_tracked,
656 p1_tracked,
656 p1_tracked,
657 p2_info=p2_info,
657 p2_info=p2_info,
658 has_meaningful_mtime=not possibly_dirty,
658 has_meaningful_mtime=not possibly_dirty,
659 parentfiledata=parentfiledata,
659 parentfiledata=parentfiledata,
660 )
660 )
661
661
662 def _check_new_tracked_filename(self, filename):
662 def _check_new_tracked_filename(self, filename):
663 scmutil.checkfilename(filename)
663 scmutil.checkfilename(filename)
664 if self._map.hastrackeddir(filename):
664 if self._map.hastrackeddir(filename):
665 msg = _(b'directory %r already in dirstate')
665 msg = _(b'directory %r already in dirstate')
666 msg %= pycompat.bytestr(filename)
666 msg %= pycompat.bytestr(filename)
667 raise error.Abort(msg)
667 raise error.Abort(msg)
668 # shadows
668 # shadows
669 for d in pathutil.finddirs(filename):
669 for d in pathutil.finddirs(filename):
670 if self._map.hastrackeddir(d):
670 if self._map.hastrackeddir(d):
671 break
671 break
672 entry = self._map.get(d)
672 entry = self._map.get(d)
673 if entry is not None and not entry.removed:
673 if entry is not None and not entry.removed:
674 msg = _(b'file %r in dirstate clashes with %r')
674 msg = _(b'file %r in dirstate clashes with %r')
675 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
675 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
676 raise error.Abort(msg)
676 raise error.Abort(msg)
677 self._check_sparse(filename)
677 self._check_sparse(filename)
678
678
679 def _check_sparse(self, filename):
679 def _check_sparse(self, filename):
680 """Check that a filename is inside the sparse profile"""
680 """Check that a filename is inside the sparse profile"""
681 sparsematch = self._sparsematcher
681 sparsematch = self._sparsematcher
682 if sparsematch is not None and not sparsematch.always():
682 if sparsematch is not None and not sparsematch.always():
683 if not sparsematch(filename):
683 if not sparsematch(filename):
684 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
684 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
685 hint = _(
685 hint = _(
686 b'include file with `hg debugsparse --include <pattern>` or use '
686 b'include file with `hg debugsparse --include <pattern>` or use '
687 b'`hg add -s <file>` to include file directory while adding'
687 b'`hg add -s <file>` to include file directory while adding'
688 )
688 )
689 raise error.Abort(msg % filename, hint=hint)
689 raise error.Abort(msg % filename, hint=hint)
690
690
691 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
691 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
692 if exists is None:
692 if exists is None:
693 exists = os.path.lexists(os.path.join(self._root, path))
693 exists = os.path.lexists(os.path.join(self._root, path))
694 if not exists:
694 if not exists:
695 # Maybe a path component exists
695 # Maybe a path component exists
696 if not ignoremissing and b'/' in path:
696 if not ignoremissing and b'/' in path:
697 d, f = path.rsplit(b'/', 1)
697 d, f = path.rsplit(b'/', 1)
698 d = self._normalize(d, False, ignoremissing, None)
698 d = self._normalize(d, False, ignoremissing, None)
699 folded = d + b"/" + f
699 folded = d + b"/" + f
700 else:
700 else:
701 # No path components, preserve original case
701 # No path components, preserve original case
702 folded = path
702 folded = path
703 else:
703 else:
704 # recursively normalize leading directory components
704 # recursively normalize leading directory components
705 # against dirstate
705 # against dirstate
706 if b'/' in normed:
706 if b'/' in normed:
707 d, f = normed.rsplit(b'/', 1)
707 d, f = normed.rsplit(b'/', 1)
708 d = self._normalize(d, False, ignoremissing, True)
708 d = self._normalize(d, False, ignoremissing, True)
709 r = self._root + b"/" + d
709 r = self._root + b"/" + d
710 folded = d + b"/" + util.fspath(f, r)
710 folded = d + b"/" + util.fspath(f, r)
711 else:
711 else:
712 folded = util.fspath(normed, self._root)
712 folded = util.fspath(normed, self._root)
713 storemap[normed] = folded
713 storemap[normed] = folded
714
714
715 return folded
715 return folded
716
716
717 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
717 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
718 normed = util.normcase(path)
718 normed = util.normcase(path)
719 folded = self._map.filefoldmap.get(normed, None)
719 folded = self._map.filefoldmap.get(normed, None)
720 if folded is None:
720 if folded is None:
721 if isknown:
721 if isknown:
722 folded = path
722 folded = path
723 else:
723 else:
724 folded = self._discoverpath(
724 folded = self._discoverpath(
725 path, normed, ignoremissing, exists, self._map.filefoldmap
725 path, normed, ignoremissing, exists, self._map.filefoldmap
726 )
726 )
727 return folded
727 return folded
728
728
729 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
729 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
730 normed = util.normcase(path)
730 normed = util.normcase(path)
731 folded = self._map.filefoldmap.get(normed, None)
731 folded = self._map.filefoldmap.get(normed, None)
732 if folded is None:
732 if folded is None:
733 folded = self._map.dirfoldmap.get(normed, None)
733 folded = self._map.dirfoldmap.get(normed, None)
734 if folded is None:
734 if folded is None:
735 if isknown:
735 if isknown:
736 folded = path
736 folded = path
737 else:
737 else:
738 # store discovered result in dirfoldmap so that future
738 # store discovered result in dirfoldmap so that future
739 # normalizefile calls don't start matching directories
739 # normalizefile calls don't start matching directories
740 folded = self._discoverpath(
740 folded = self._discoverpath(
741 path, normed, ignoremissing, exists, self._map.dirfoldmap
741 path, normed, ignoremissing, exists, self._map.dirfoldmap
742 )
742 )
743 return folded
743 return folded
744
744
745 def normalize(self, path, isknown=False, ignoremissing=False):
745 def normalize(self, path, isknown=False, ignoremissing=False):
746 """
746 """
747 normalize the case of a pathname when on a casefolding filesystem
747 normalize the case of a pathname when on a casefolding filesystem
748
748
749 isknown specifies whether the filename came from walking the
749 isknown specifies whether the filename came from walking the
750 disk, to avoid extra filesystem access.
750 disk, to avoid extra filesystem access.
751
751
752 If ignoremissing is True, missing path are returned
752 If ignoremissing is True, missing path are returned
753 unchanged. Otherwise, we try harder to normalize possibly
753 unchanged. Otherwise, we try harder to normalize possibly
754 existing path components.
754 existing path components.
755
755
756 The normalized case is determined based on the following precedence:
756 The normalized case is determined based on the following precedence:
757
757
758 - version of name already stored in the dirstate
758 - version of name already stored in the dirstate
759 - version of name stored on disk
759 - version of name stored on disk
760 - version provided via command arguments
760 - version provided via command arguments
761 """
761 """
762
762
763 if self._checkcase:
763 if self._checkcase:
764 return self._normalize(path, isknown, ignoremissing)
764 return self._normalize(path, isknown, ignoremissing)
765 return path
765 return path
766
766
767 def clear(self):
767 def clear(self):
768 self._map.clear()
768 self._map.clear()
769 self._dirty = True
769 self._dirty = True
770
770
771 def rebuild(self, parent, allfiles, changedfiles=None):
771 def rebuild(self, parent, allfiles, changedfiles=None):
772
773 matcher = self._sparsematcher
772 matcher = self._sparsematcher
774 if matcher is not None and not matcher.always():
773 if matcher is not None and not matcher.always():
775 # should not add non-matching files
774 # should not add non-matching files
776 allfiles = [f for f in allfiles if matcher(f)]
775 allfiles = [f for f in allfiles if matcher(f)]
777 if changedfiles:
776 if changedfiles:
778 changedfiles = [f for f in changedfiles if matcher(f)]
777 changedfiles = [f for f in changedfiles if matcher(f)]
779
778
780 if changedfiles is not None:
779 if changedfiles is not None:
781 # these files will be deleted from the dirstate when they are
780 # these files will be deleted from the dirstate when they are
782 # not found to be in allfiles
781 # not found to be in allfiles
783 dirstatefilestoremove = {f for f in self if not matcher(f)}
782 dirstatefilestoremove = {f for f in self if not matcher(f)}
784 changedfiles = dirstatefilestoremove.union(changedfiles)
783 changedfiles = dirstatefilestoremove.union(changedfiles)
785
784
786 if changedfiles is None:
785 if changedfiles is None:
787 # Rebuild entire dirstate
786 # Rebuild entire dirstate
788 to_lookup = allfiles
787 to_lookup = allfiles
789 to_drop = []
788 to_drop = []
790 self.clear()
789 self.clear()
791 elif len(changedfiles) < 10:
790 elif len(changedfiles) < 10:
792 # Avoid turning allfiles into a set, which can be expensive if it's
791 # Avoid turning allfiles into a set, which can be expensive if it's
793 # large.
792 # large.
794 to_lookup = []
793 to_lookup = []
795 to_drop = []
794 to_drop = []
796 for f in changedfiles:
795 for f in changedfiles:
797 if f in allfiles:
796 if f in allfiles:
798 to_lookup.append(f)
797 to_lookup.append(f)
799 else:
798 else:
800 to_drop.append(f)
799 to_drop.append(f)
801 else:
800 else:
802 changedfilesset = set(changedfiles)
801 changedfilesset = set(changedfiles)
803 to_lookup = changedfilesset & set(allfiles)
802 to_lookup = changedfilesset & set(allfiles)
804 to_drop = changedfilesset - to_lookup
803 to_drop = changedfilesset - to_lookup
805
804
806 if self._origpl is None:
805 if self._origpl is None:
807 self._origpl = self._pl
806 self._origpl = self._pl
808 self._map.setparents(parent, self._nodeconstants.nullid)
807 self._map.setparents(parent, self._nodeconstants.nullid)
809
808
810 for f in to_lookup:
809 for f in to_lookup:
811
812 if self.in_merge:
810 if self.in_merge:
813 self.set_tracked(f)
811 self.set_tracked(f)
814 else:
812 else:
815 self._map.reset_state(
813 self._map.reset_state(
816 f,
814 f,
817 wc_tracked=True,
815 wc_tracked=True,
818 p1_tracked=True,
816 p1_tracked=True,
819 )
817 )
820 for f in to_drop:
818 for f in to_drop:
821 self._map.reset_state(f)
819 self._map.reset_state(f)
822
820
823 self._dirty = True
821 self._dirty = True
824
822
825 def identity(self):
823 def identity(self):
826 """Return identity of dirstate itself to detect changing in storage
824 """Return identity of dirstate itself to detect changing in storage
827
825
828 If identity of previous dirstate is equal to this, writing
826 If identity of previous dirstate is equal to this, writing
829 changes based on the former dirstate out can keep consistency.
827 changes based on the former dirstate out can keep consistency.
830 """
828 """
831 return self._map.identity
829 return self._map.identity
832
830
833 def write(self, tr):
831 def write(self, tr):
834 if not self._dirty:
832 if not self._dirty:
835 return
833 return
836
834
837 write_key = self._use_tracked_hint and self._dirty_tracked_set
835 write_key = self._use_tracked_hint and self._dirty_tracked_set
838 if tr:
836 if tr:
839 # delay writing in-memory changes out
837 # delay writing in-memory changes out
840 tr.addfilegenerator(
838 tr.addfilegenerator(
841 b'dirstate-1-main',
839 b'dirstate-1-main',
842 (self._filename,),
840 (self._filename,),
843 lambda f: self._writedirstate(tr, f),
841 lambda f: self._writedirstate(tr, f),
844 location=b'plain',
842 location=b'plain',
845 post_finalize=True,
843 post_finalize=True,
846 )
844 )
847 if write_key:
845 if write_key:
848 tr.addfilegenerator(
846 tr.addfilegenerator(
849 b'dirstate-2-key-post',
847 b'dirstate-2-key-post',
850 (self._filename_th,),
848 (self._filename_th,),
851 lambda f: self._write_tracked_hint(tr, f),
849 lambda f: self._write_tracked_hint(tr, f),
852 location=b'plain',
850 location=b'plain',
853 post_finalize=True,
851 post_finalize=True,
854 )
852 )
855 return
853 return
856
854
857 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
855 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
858 with file(self._filename) as f:
856 with file(self._filename) as f:
859 self._writedirstate(tr, f)
857 self._writedirstate(tr, f)
860 if write_key:
858 if write_key:
861 # we update the key-file after writing to make sure reader have a
859 # we update the key-file after writing to make sure reader have a
862 # key that match the newly written content
860 # key that match the newly written content
863 with file(self._filename_th) as f:
861 with file(self._filename_th) as f:
864 self._write_tracked_hint(tr, f)
862 self._write_tracked_hint(tr, f)
865
863
866 def delete_tracked_hint(self):
864 def delete_tracked_hint(self):
867 """remove the tracked_hint file
865 """remove the tracked_hint file
868
866
869 To be used by format downgrades operation"""
867 To be used by format downgrades operation"""
870 self._opener.unlink(self._filename_th)
868 self._opener.unlink(self._filename_th)
871 self._use_tracked_hint = False
869 self._use_tracked_hint = False
872
870
873 def addparentchangecallback(self, category, callback):
871 def addparentchangecallback(self, category, callback):
874 """add a callback to be called when the wd parents are changed
872 """add a callback to be called when the wd parents are changed
875
873
876 Callback will be called with the following arguments:
874 Callback will be called with the following arguments:
877 dirstate, (oldp1, oldp2), (newp1, newp2)
875 dirstate, (oldp1, oldp2), (newp1, newp2)
878
876
879 Category is a unique identifier to allow overwriting an old callback
877 Category is a unique identifier to allow overwriting an old callback
880 with a newer callback.
878 with a newer callback.
881 """
879 """
882 self._plchangecallbacks[category] = callback
880 self._plchangecallbacks[category] = callback
883
881
884 def _writedirstate(self, tr, st):
882 def _writedirstate(self, tr, st):
885 # notify callbacks about parents change
883 # notify callbacks about parents change
886 if self._origpl is not None and self._origpl != self._pl:
884 if self._origpl is not None and self._origpl != self._pl:
887 for c, callback in sorted(self._plchangecallbacks.items()):
885 for c, callback in sorted(self._plchangecallbacks.items()):
888 callback(self, self._origpl, self._pl)
886 callback(self, self._origpl, self._pl)
889 self._origpl = None
887 self._origpl = None
890 self._map.write(tr, st)
888 self._map.write(tr, st)
891 self._dirty = False
889 self._dirty = False
892 self._dirty_tracked_set = False
890 self._dirty_tracked_set = False
893
891
894 def _write_tracked_hint(self, tr, f):
892 def _write_tracked_hint(self, tr, f):
895 key = node.hex(uuid.uuid4().bytes)
893 key = node.hex(uuid.uuid4().bytes)
896 f.write(b"1\n%s\n" % key) # 1 is the format version
894 f.write(b"1\n%s\n" % key) # 1 is the format version
897
895
898 def _dirignore(self, f):
896 def _dirignore(self, f):
899 if self._ignore(f):
897 if self._ignore(f):
900 return True
898 return True
901 for p in pathutil.finddirs(f):
899 for p in pathutil.finddirs(f):
902 if self._ignore(p):
900 if self._ignore(p):
903 return True
901 return True
904 return False
902 return False
905
903
906 def _ignorefiles(self):
904 def _ignorefiles(self):
907 files = []
905 files = []
908 if os.path.exists(self._join(b'.hgignore')):
906 if os.path.exists(self._join(b'.hgignore')):
909 files.append(self._join(b'.hgignore'))
907 files.append(self._join(b'.hgignore'))
910 for name, path in self._ui.configitems(b"ui"):
908 for name, path in self._ui.configitems(b"ui"):
911 if name == b'ignore' or name.startswith(b'ignore.'):
909 if name == b'ignore' or name.startswith(b'ignore.'):
912 # we need to use os.path.join here rather than self._join
910 # we need to use os.path.join here rather than self._join
913 # because path is arbitrary and user-specified
911 # because path is arbitrary and user-specified
914 files.append(os.path.join(self._rootdir, util.expandpath(path)))
912 files.append(os.path.join(self._rootdir, util.expandpath(path)))
915 return files
913 return files
916
914
917 def _ignorefileandline(self, f):
915 def _ignorefileandline(self, f):
918 files = collections.deque(self._ignorefiles())
916 files = collections.deque(self._ignorefiles())
919 visited = set()
917 visited = set()
920 while files:
918 while files:
921 i = files.popleft()
919 i = files.popleft()
922 patterns = matchmod.readpatternfile(
920 patterns = matchmod.readpatternfile(
923 i, self._ui.warn, sourceinfo=True
921 i, self._ui.warn, sourceinfo=True
924 )
922 )
925 for pattern, lineno, line in patterns:
923 for pattern, lineno, line in patterns:
926 kind, p = matchmod._patsplit(pattern, b'glob')
924 kind, p = matchmod._patsplit(pattern, b'glob')
927 if kind == b"subinclude":
925 if kind == b"subinclude":
928 if p not in visited:
926 if p not in visited:
929 files.append(p)
927 files.append(p)
930 continue
928 continue
931 m = matchmod.match(
929 m = matchmod.match(
932 self._root, b'', [], [pattern], warn=self._ui.warn
930 self._root, b'', [], [pattern], warn=self._ui.warn
933 )
931 )
934 if m(f):
932 if m(f):
935 return (i, lineno, line)
933 return (i, lineno, line)
936 visited.add(i)
934 visited.add(i)
937 return (None, -1, b"")
935 return (None, -1, b"")
938
936
939 def _walkexplicit(self, match, subrepos):
937 def _walkexplicit(self, match, subrepos):
940 """Get stat data about the files explicitly specified by match.
938 """Get stat data about the files explicitly specified by match.
941
939
942 Return a triple (results, dirsfound, dirsnotfound).
940 Return a triple (results, dirsfound, dirsnotfound).
943 - results is a mapping from filename to stat result. It also contains
941 - results is a mapping from filename to stat result. It also contains
944 listings mapping subrepos and .hg to None.
942 listings mapping subrepos and .hg to None.
945 - dirsfound is a list of files found to be directories.
943 - dirsfound is a list of files found to be directories.
946 - dirsnotfound is a list of files that the dirstate thinks are
944 - dirsnotfound is a list of files that the dirstate thinks are
947 directories and that were not found."""
945 directories and that were not found."""
948
946
949 def badtype(mode):
947 def badtype(mode):
950 kind = _(b'unknown')
948 kind = _(b'unknown')
951 if stat.S_ISCHR(mode):
949 if stat.S_ISCHR(mode):
952 kind = _(b'character device')
950 kind = _(b'character device')
953 elif stat.S_ISBLK(mode):
951 elif stat.S_ISBLK(mode):
954 kind = _(b'block device')
952 kind = _(b'block device')
955 elif stat.S_ISFIFO(mode):
953 elif stat.S_ISFIFO(mode):
956 kind = _(b'fifo')
954 kind = _(b'fifo')
957 elif stat.S_ISSOCK(mode):
955 elif stat.S_ISSOCK(mode):
958 kind = _(b'socket')
956 kind = _(b'socket')
959 elif stat.S_ISDIR(mode):
957 elif stat.S_ISDIR(mode):
960 kind = _(b'directory')
958 kind = _(b'directory')
961 return _(b'unsupported file type (type is %s)') % kind
959 return _(b'unsupported file type (type is %s)') % kind
962
960
963 badfn = match.bad
961 badfn = match.bad
964 dmap = self._map
962 dmap = self._map
965 lstat = os.lstat
963 lstat = os.lstat
966 getkind = stat.S_IFMT
964 getkind = stat.S_IFMT
967 dirkind = stat.S_IFDIR
965 dirkind = stat.S_IFDIR
968 regkind = stat.S_IFREG
966 regkind = stat.S_IFREG
969 lnkkind = stat.S_IFLNK
967 lnkkind = stat.S_IFLNK
970 join = self._join
968 join = self._join
971 dirsfound = []
969 dirsfound = []
972 foundadd = dirsfound.append
970 foundadd = dirsfound.append
973 dirsnotfound = []
971 dirsnotfound = []
974 notfoundadd = dirsnotfound.append
972 notfoundadd = dirsnotfound.append
975
973
976 if not match.isexact() and self._checkcase:
974 if not match.isexact() and self._checkcase:
977 normalize = self._normalize
975 normalize = self._normalize
978 else:
976 else:
979 normalize = None
977 normalize = None
980
978
981 files = sorted(match.files())
979 files = sorted(match.files())
982 subrepos.sort()
980 subrepos.sort()
983 i, j = 0, 0
981 i, j = 0, 0
984 while i < len(files) and j < len(subrepos):
982 while i < len(files) and j < len(subrepos):
985 subpath = subrepos[j] + b"/"
983 subpath = subrepos[j] + b"/"
986 if files[i] < subpath:
984 if files[i] < subpath:
987 i += 1
985 i += 1
988 continue
986 continue
989 while i < len(files) and files[i].startswith(subpath):
987 while i < len(files) and files[i].startswith(subpath):
990 del files[i]
988 del files[i]
991 j += 1
989 j += 1
992
990
993 if not files or b'' in files:
991 if not files or b'' in files:
994 files = [b'']
992 files = [b'']
995 # constructing the foldmap is expensive, so don't do it for the
993 # constructing the foldmap is expensive, so don't do it for the
996 # common case where files is ['']
994 # common case where files is ['']
997 normalize = None
995 normalize = None
998 results = dict.fromkeys(subrepos)
996 results = dict.fromkeys(subrepos)
999 results[b'.hg'] = None
997 results[b'.hg'] = None
1000
998
1001 for ff in files:
999 for ff in files:
1002 if normalize:
1000 if normalize:
1003 nf = normalize(ff, False, True)
1001 nf = normalize(ff, False, True)
1004 else:
1002 else:
1005 nf = ff
1003 nf = ff
1006 if nf in results:
1004 if nf in results:
1007 continue
1005 continue
1008
1006
1009 try:
1007 try:
1010 st = lstat(join(nf))
1008 st = lstat(join(nf))
1011 kind = getkind(st.st_mode)
1009 kind = getkind(st.st_mode)
1012 if kind == dirkind:
1010 if kind == dirkind:
1013 if nf in dmap:
1011 if nf in dmap:
1014 # file replaced by dir on disk but still in dirstate
1012 # file replaced by dir on disk but still in dirstate
1015 results[nf] = None
1013 results[nf] = None
1016 foundadd((nf, ff))
1014 foundadd((nf, ff))
1017 elif kind == regkind or kind == lnkkind:
1015 elif kind == regkind or kind == lnkkind:
1018 results[nf] = st
1016 results[nf] = st
1019 else:
1017 else:
1020 badfn(ff, badtype(kind))
1018 badfn(ff, badtype(kind))
1021 if nf in dmap:
1019 if nf in dmap:
1022 results[nf] = None
1020 results[nf] = None
1023 except OSError as inst: # nf not found on disk - it is dirstate only
1021 except (OSError) as inst:
1022 # nf not found on disk - it is dirstate only
1024 if nf in dmap: # does it exactly match a missing file?
1023 if nf in dmap: # does it exactly match a missing file?
1025 results[nf] = None
1024 results[nf] = None
1026 else: # does it match a missing directory?
1025 else: # does it match a missing directory?
1027 if self._map.hasdir(nf):
1026 if self._map.hasdir(nf):
1028 notfoundadd(nf)
1027 notfoundadd(nf)
1029 else:
1028 else:
1030 badfn(ff, encoding.strtolocal(inst.strerror))
1029 badfn(ff, encoding.strtolocal(inst.strerror))
1031
1030
1032 # match.files() may contain explicitly-specified paths that shouldn't
1031 # match.files() may contain explicitly-specified paths that shouldn't
1033 # be taken; drop them from the list of files found. dirsfound/notfound
1032 # be taken; drop them from the list of files found. dirsfound/notfound
1034 # aren't filtered here because they will be tested later.
1033 # aren't filtered here because they will be tested later.
1035 if match.anypats():
1034 if match.anypats():
1036 for f in list(results):
1035 for f in list(results):
1037 if f == b'.hg' or f in subrepos:
1036 if f == b'.hg' or f in subrepos:
1038 # keep sentinel to disable further out-of-repo walks
1037 # keep sentinel to disable further out-of-repo walks
1039 continue
1038 continue
1040 if not match(f):
1039 if not match(f):
1041 del results[f]
1040 del results[f]
1042
1041
1043 # Case insensitive filesystems cannot rely on lstat() failing to detect
1042 # Case insensitive filesystems cannot rely on lstat() failing to detect
1044 # a case-only rename. Prune the stat object for any file that does not
1043 # a case-only rename. Prune the stat object for any file that does not
1045 # match the case in the filesystem, if there are multiple files that
1044 # match the case in the filesystem, if there are multiple files that
1046 # normalize to the same path.
1045 # normalize to the same path.
1047 if match.isexact() and self._checkcase:
1046 if match.isexact() and self._checkcase:
1048 normed = {}
1047 normed = {}
1049
1048
1050 for f, st in results.items():
1049 for f, st in results.items():
1051 if st is None:
1050 if st is None:
1052 continue
1051 continue
1053
1052
1054 nc = util.normcase(f)
1053 nc = util.normcase(f)
1055 paths = normed.get(nc)
1054 paths = normed.get(nc)
1056
1055
1057 if paths is None:
1056 if paths is None:
1058 paths = set()
1057 paths = set()
1059 normed[nc] = paths
1058 normed[nc] = paths
1060
1059
1061 paths.add(f)
1060 paths.add(f)
1062
1061
1063 for norm, paths in normed.items():
1062 for norm, paths in normed.items():
1064 if len(paths) > 1:
1063 if len(paths) > 1:
1065 for path in paths:
1064 for path in paths:
1066 folded = self._discoverpath(
1065 folded = self._discoverpath(
1067 path, norm, True, None, self._map.dirfoldmap
1066 path, norm, True, None, self._map.dirfoldmap
1068 )
1067 )
1069 if path != folded:
1068 if path != folded:
1070 results[path] = None
1069 results[path] = None
1071
1070
1072 return results, dirsfound, dirsnotfound
1071 return results, dirsfound, dirsnotfound
1073
1072
1074 def walk(self, match, subrepos, unknown, ignored, full=True):
1073 def walk(self, match, subrepos, unknown, ignored, full=True):
1075 """
1074 """
1076 Walk recursively through the directory tree, finding all files
1075 Walk recursively through the directory tree, finding all files
1077 matched by match.
1076 matched by match.
1078
1077
1079 If full is False, maybe skip some known-clean files.
1078 If full is False, maybe skip some known-clean files.
1080
1079
1081 Return a dict mapping filename to stat-like object (either
1080 Return a dict mapping filename to stat-like object (either
1082 mercurial.osutil.stat instance or return value of os.stat()).
1081 mercurial.osutil.stat instance or return value of os.stat()).
1083
1082
1084 """
1083 """
1085 # full is a flag that extensions that hook into walk can use -- this
1084 # full is a flag that extensions that hook into walk can use -- this
1086 # implementation doesn't use it at all. This satisfies the contract
1085 # implementation doesn't use it at all. This satisfies the contract
1087 # because we only guarantee a "maybe".
1086 # because we only guarantee a "maybe".
1088
1087
1089 if ignored:
1088 if ignored:
1090 ignore = util.never
1089 ignore = util.never
1091 dirignore = util.never
1090 dirignore = util.never
1092 elif unknown:
1091 elif unknown:
1093 ignore = self._ignore
1092 ignore = self._ignore
1094 dirignore = self._dirignore
1093 dirignore = self._dirignore
1095 else:
1094 else:
1096 # if not unknown and not ignored, drop dir recursion and step 2
1095 # if not unknown and not ignored, drop dir recursion and step 2
1097 ignore = util.always
1096 ignore = util.always
1098 dirignore = util.always
1097 dirignore = util.always
1099
1098
1100 if self._sparsematchfn is not None:
1099 if self._sparsematchfn is not None:
1101 em = matchmod.exact(match.files())
1100 em = matchmod.exact(match.files())
1102 sm = matchmod.unionmatcher([self._sparsematcher, em])
1101 sm = matchmod.unionmatcher([self._sparsematcher, em])
1103 match = matchmod.intersectmatchers(match, sm)
1102 match = matchmod.intersectmatchers(match, sm)
1104
1103
1105 matchfn = match.matchfn
1104 matchfn = match.matchfn
1106 matchalways = match.always()
1105 matchalways = match.always()
1107 matchtdir = match.traversedir
1106 matchtdir = match.traversedir
1108 dmap = self._map
1107 dmap = self._map
1109 listdir = util.listdir
1108 listdir = util.listdir
1110 lstat = os.lstat
1109 lstat = os.lstat
1111 dirkind = stat.S_IFDIR
1110 dirkind = stat.S_IFDIR
1112 regkind = stat.S_IFREG
1111 regkind = stat.S_IFREG
1113 lnkkind = stat.S_IFLNK
1112 lnkkind = stat.S_IFLNK
1114 join = self._join
1113 join = self._join
1115
1114
1116 exact = skipstep3 = False
1115 exact = skipstep3 = False
1117 if match.isexact(): # match.exact
1116 if match.isexact(): # match.exact
1118 exact = True
1117 exact = True
1119 dirignore = util.always # skip step 2
1118 dirignore = util.always # skip step 2
1120 elif match.prefix(): # match.match, no patterns
1119 elif match.prefix(): # match.match, no patterns
1121 skipstep3 = True
1120 skipstep3 = True
1122
1121
1123 if not exact and self._checkcase:
1122 if not exact and self._checkcase:
1124 normalize = self._normalize
1123 normalize = self._normalize
1125 normalizefile = self._normalizefile
1124 normalizefile = self._normalizefile
1126 skipstep3 = False
1125 skipstep3 = False
1127 else:
1126 else:
1128 normalize = self._normalize
1127 normalize = self._normalize
1129 normalizefile = None
1128 normalizefile = None
1130
1129
1131 # step 1: find all explicit files
1130 # step 1: find all explicit files
1132 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1131 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1133 if matchtdir:
1132 if matchtdir:
1134 for d in work:
1133 for d in work:
1135 matchtdir(d[0])
1134 matchtdir(d[0])
1136 for d in dirsnotfound:
1135 for d in dirsnotfound:
1137 matchtdir(d)
1136 matchtdir(d)
1138
1137
1139 skipstep3 = skipstep3 and not (work or dirsnotfound)
1138 skipstep3 = skipstep3 and not (work or dirsnotfound)
1140 work = [d for d in work if not dirignore(d[0])]
1139 work = [d for d in work if not dirignore(d[0])]
1141
1140
1142 # step 2: visit subdirectories
1141 # step 2: visit subdirectories
1143 def traverse(work, alreadynormed):
1142 def traverse(work, alreadynormed):
1144 wadd = work.append
1143 wadd = work.append
1145 while work:
1144 while work:
1146 tracing.counter('dirstate.walk work', len(work))
1145 tracing.counter('dirstate.walk work', len(work))
1147 nd = work.pop()
1146 nd = work.pop()
1148 visitentries = match.visitchildrenset(nd)
1147 visitentries = match.visitchildrenset(nd)
1149 if not visitentries:
1148 if not visitentries:
1150 continue
1149 continue
1151 if visitentries == b'this' or visitentries == b'all':
1150 if visitentries == b'this' or visitentries == b'all':
1152 visitentries = None
1151 visitentries = None
1153 skip = None
1152 skip = None
1154 if nd != b'':
1153 if nd != b'':
1155 skip = b'.hg'
1154 skip = b'.hg'
1156 try:
1155 try:
1157 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1156 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1158 entries = listdir(join(nd), stat=True, skip=skip)
1157 entries = listdir(join(nd), stat=True, skip=skip)
1159 except (PermissionError, FileNotFoundError) as inst:
1158 except (PermissionError, FileNotFoundError) as inst:
1160 match.bad(
1159 match.bad(
1161 self.pathto(nd), encoding.strtolocal(inst.strerror)
1160 self.pathto(nd), encoding.strtolocal(inst.strerror)
1162 )
1161 )
1163 continue
1162 continue
1164 for f, kind, st in entries:
1163 for f, kind, st in entries:
1165 # Some matchers may return files in the visitentries set,
1164 # Some matchers may return files in the visitentries set,
1166 # instead of 'this', if the matcher explicitly mentions them
1165 # instead of 'this', if the matcher explicitly mentions them
1167 # and is not an exactmatcher. This is acceptable; we do not
1166 # and is not an exactmatcher. This is acceptable; we do not
1168 # make any hard assumptions about file-or-directory below
1167 # make any hard assumptions about file-or-directory below
1169 # based on the presence of `f` in visitentries. If
1168 # based on the presence of `f` in visitentries. If
1170 # visitchildrenset returned a set, we can always skip the
1169 # visitchildrenset returned a set, we can always skip the
1171 # entries *not* in the set it provided regardless of whether
1170 # entries *not* in the set it provided regardless of whether
1172 # they're actually a file or a directory.
1171 # they're actually a file or a directory.
1173 if visitentries and f not in visitentries:
1172 if visitentries and f not in visitentries:
1174 continue
1173 continue
1175 if normalizefile:
1174 if normalizefile:
1176 # even though f might be a directory, we're only
1175 # even though f might be a directory, we're only
1177 # interested in comparing it to files currently in the
1176 # interested in comparing it to files currently in the
1178 # dmap -- therefore normalizefile is enough
1177 # dmap -- therefore normalizefile is enough
1179 nf = normalizefile(
1178 nf = normalizefile(
1180 nd and (nd + b"/" + f) or f, True, True
1179 nd and (nd + b"/" + f) or f, True, True
1181 )
1180 )
1182 else:
1181 else:
1183 nf = nd and (nd + b"/" + f) or f
1182 nf = nd and (nd + b"/" + f) or f
1184 if nf not in results:
1183 if nf not in results:
1185 if kind == dirkind:
1184 if kind == dirkind:
1186 if not ignore(nf):
1185 if not ignore(nf):
1187 if matchtdir:
1186 if matchtdir:
1188 matchtdir(nf)
1187 matchtdir(nf)
1189 wadd(nf)
1188 wadd(nf)
1190 if nf in dmap and (matchalways or matchfn(nf)):
1189 if nf in dmap and (matchalways or matchfn(nf)):
1191 results[nf] = None
1190 results[nf] = None
1192 elif kind == regkind or kind == lnkkind:
1191 elif kind == regkind or kind == lnkkind:
1193 if nf in dmap:
1192 if nf in dmap:
1194 if matchalways or matchfn(nf):
1193 if matchalways or matchfn(nf):
1195 results[nf] = st
1194 results[nf] = st
1196 elif (matchalways or matchfn(nf)) and not ignore(
1195 elif (matchalways or matchfn(nf)) and not ignore(
1197 nf
1196 nf
1198 ):
1197 ):
1199 # unknown file -- normalize if necessary
1198 # unknown file -- normalize if necessary
1200 if not alreadynormed:
1199 if not alreadynormed:
1201 nf = normalize(nf, False, True)
1200 nf = normalize(nf, False, True)
1202 results[nf] = st
1201 results[nf] = st
1203 elif nf in dmap and (matchalways or matchfn(nf)):
1202 elif nf in dmap and (matchalways or matchfn(nf)):
1204 results[nf] = None
1203 results[nf] = None
1205
1204
1206 for nd, d in work:
1205 for nd, d in work:
1207 # alreadynormed means that processwork doesn't have to do any
1206 # alreadynormed means that processwork doesn't have to do any
1208 # expensive directory normalization
1207 # expensive directory normalization
1209 alreadynormed = not normalize or nd == d
1208 alreadynormed = not normalize or nd == d
1210 traverse([d], alreadynormed)
1209 traverse([d], alreadynormed)
1211
1210
1212 for s in subrepos:
1211 for s in subrepos:
1213 del results[s]
1212 del results[s]
1214 del results[b'.hg']
1213 del results[b'.hg']
1215
1214
1216 # step 3: visit remaining files from dmap
1215 # step 3: visit remaining files from dmap
1217 if not skipstep3 and not exact:
1216 if not skipstep3 and not exact:
1218 # If a dmap file is not in results yet, it was either
1217 # If a dmap file is not in results yet, it was either
1219 # a) not matching matchfn b) ignored, c) missing, or d) under a
1218 # a) not matching matchfn b) ignored, c) missing, or d) under a
1220 # symlink directory.
1219 # symlink directory.
1221 if not results and matchalways:
1220 if not results and matchalways:
1222 visit = [f for f in dmap]
1221 visit = [f for f in dmap]
1223 else:
1222 else:
1224 visit = [f for f in dmap if f not in results and matchfn(f)]
1223 visit = [f for f in dmap if f not in results and matchfn(f)]
1225 visit.sort()
1224 visit.sort()
1226
1225
1227 if unknown:
1226 if unknown:
1228 # unknown == True means we walked all dirs under the roots
1227 # unknown == True means we walked all dirs under the roots
1229 # that wasn't ignored, and everything that matched was stat'ed
1228 # that wasn't ignored, and everything that matched was stat'ed
1230 # and is already in results.
1229 # and is already in results.
1231 # The rest must thus be ignored or under a symlink.
1230 # The rest must thus be ignored or under a symlink.
1232 audit_path = pathutil.pathauditor(self._root, cached=True)
1231 audit_path = pathutil.pathauditor(self._root, cached=True)
1233
1232
1234 for nf in iter(visit):
1233 for nf in iter(visit):
1235 # If a stat for the same file was already added with a
1234 # If a stat for the same file was already added with a
1236 # different case, don't add one for this, since that would
1235 # different case, don't add one for this, since that would
1237 # make it appear as if the file exists under both names
1236 # make it appear as if the file exists under both names
1238 # on disk.
1237 # on disk.
1239 if (
1238 if (
1240 normalizefile
1239 normalizefile
1241 and normalizefile(nf, True, True) in results
1240 and normalizefile(nf, True, True) in results
1242 ):
1241 ):
1243 results[nf] = None
1242 results[nf] = None
1244 # Report ignored items in the dmap as long as they are not
1243 # Report ignored items in the dmap as long as they are not
1245 # under a symlink directory.
1244 # under a symlink directory.
1246 elif audit_path.check(nf):
1245 elif audit_path.check(nf):
1247 try:
1246 try:
1248 results[nf] = lstat(join(nf))
1247 results[nf] = lstat(join(nf))
1249 # file was just ignored, no links, and exists
1248 # file was just ignored, no links, and exists
1250 except OSError:
1249 except OSError:
1251 # file doesn't exist
1250 # file doesn't exist
1252 results[nf] = None
1251 results[nf] = None
1253 else:
1252 else:
1254 # It's either missing or under a symlink directory
1253 # It's either missing or under a symlink directory
1255 # which we in this case report as missing
1254 # which we in this case report as missing
1256 results[nf] = None
1255 results[nf] = None
1257 else:
1256 else:
1258 # We may not have walked the full directory tree above,
1257 # We may not have walked the full directory tree above,
1259 # so stat and check everything we missed.
1258 # so stat and check everything we missed.
1260 iv = iter(visit)
1259 iv = iter(visit)
1261 for st in util.statfiles([join(i) for i in visit]):
1260 for st in util.statfiles([join(i) for i in visit]):
1262 results[next(iv)] = st
1261 results[next(iv)] = st
1263 return results
1262 return results
1264
1263
1265 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1264 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1266 if self._sparsematchfn is not None:
1265 if self._sparsematchfn is not None:
1267 em = matchmod.exact(matcher.files())
1266 em = matchmod.exact(matcher.files())
1268 sm = matchmod.unionmatcher([self._sparsematcher, em])
1267 sm = matchmod.unionmatcher([self._sparsematcher, em])
1269 matcher = matchmod.intersectmatchers(matcher, sm)
1268 matcher = matchmod.intersectmatchers(matcher, sm)
1270 # Force Rayon (Rust parallelism library) to respect the number of
1269 # Force Rayon (Rust parallelism library) to respect the number of
1271 # workers. This is a temporary workaround until Rust code knows
1270 # workers. This is a temporary workaround until Rust code knows
1272 # how to read the config file.
1271 # how to read the config file.
1273 numcpus = self._ui.configint(b"worker", b"numcpus")
1272 numcpus = self._ui.configint(b"worker", b"numcpus")
1274 if numcpus is not None:
1273 if numcpus is not None:
1275 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1274 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1276
1275
1277 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1276 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1278 if not workers_enabled:
1277 if not workers_enabled:
1279 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1278 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1280
1279
1281 (
1280 (
1282 lookup,
1281 lookup,
1283 modified,
1282 modified,
1284 added,
1283 added,
1285 removed,
1284 removed,
1286 deleted,
1285 deleted,
1287 clean,
1286 clean,
1288 ignored,
1287 ignored,
1289 unknown,
1288 unknown,
1290 warnings,
1289 warnings,
1291 bad,
1290 bad,
1292 traversed,
1291 traversed,
1293 dirty,
1292 dirty,
1294 ) = rustmod.status(
1293 ) = rustmod.status(
1295 self._map._map,
1294 self._map._map,
1296 matcher,
1295 matcher,
1297 self._rootdir,
1296 self._rootdir,
1298 self._ignorefiles(),
1297 self._ignorefiles(),
1299 self._checkexec,
1298 self._checkexec,
1300 bool(list_clean),
1299 bool(list_clean),
1301 bool(list_ignored),
1300 bool(list_ignored),
1302 bool(list_unknown),
1301 bool(list_unknown),
1303 bool(matcher.traversedir),
1302 bool(matcher.traversedir),
1304 )
1303 )
1305
1304
1306 self._dirty |= dirty
1305 self._dirty |= dirty
1307
1306
1308 if matcher.traversedir:
1307 if matcher.traversedir:
1309 for dir in traversed:
1308 for dir in traversed:
1310 matcher.traversedir(dir)
1309 matcher.traversedir(dir)
1311
1310
1312 if self._ui.warn:
1311 if self._ui.warn:
1313 for item in warnings:
1312 for item in warnings:
1314 if isinstance(item, tuple):
1313 if isinstance(item, tuple):
1315 file_path, syntax = item
1314 file_path, syntax = item
1316 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1315 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1317 file_path,
1316 file_path,
1318 syntax,
1317 syntax,
1319 )
1318 )
1320 self._ui.warn(msg)
1319 self._ui.warn(msg)
1321 else:
1320 else:
1322 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1321 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1323 self._ui.warn(
1322 self._ui.warn(
1324 msg
1323 msg
1325 % (
1324 % (
1326 pathutil.canonpath(
1325 pathutil.canonpath(
1327 self._rootdir, self._rootdir, item
1326 self._rootdir, self._rootdir, item
1328 ),
1327 ),
1329 b"No such file or directory",
1328 b"No such file or directory",
1330 )
1329 )
1331 )
1330 )
1332
1331
1333 for (fn, message) in bad:
1332 for fn, message in bad:
1334 matcher.bad(fn, encoding.strtolocal(message))
1333 matcher.bad(fn, encoding.strtolocal(message))
1335
1334
1336 status = scmutil.status(
1335 status = scmutil.status(
1337 modified=modified,
1336 modified=modified,
1338 added=added,
1337 added=added,
1339 removed=removed,
1338 removed=removed,
1340 deleted=deleted,
1339 deleted=deleted,
1341 unknown=unknown,
1340 unknown=unknown,
1342 ignored=ignored,
1341 ignored=ignored,
1343 clean=clean,
1342 clean=clean,
1344 )
1343 )
1345 return (lookup, status)
1344 return (lookup, status)
1346
1345
1347 def status(self, match, subrepos, ignored, clean, unknown):
1346 def status(self, match, subrepos, ignored, clean, unknown):
1348 """Determine the status of the working copy relative to the
1347 """Determine the status of the working copy relative to the
1349 dirstate and return a pair of (unsure, status), where status is of type
1348 dirstate and return a pair of (unsure, status), where status is of type
1350 scmutil.status and:
1349 scmutil.status and:
1351
1350
1352 unsure:
1351 unsure:
1353 files that might have been modified since the dirstate was
1352 files that might have been modified since the dirstate was
1354 written, but need to be read to be sure (size is the same
1353 written, but need to be read to be sure (size is the same
1355 but mtime differs)
1354 but mtime differs)
1356 status.modified:
1355 status.modified:
1357 files that have definitely been modified since the dirstate
1356 files that have definitely been modified since the dirstate
1358 was written (different size or mode)
1357 was written (different size or mode)
1359 status.clean:
1358 status.clean:
1360 files that have definitely not been modified since the
1359 files that have definitely not been modified since the
1361 dirstate was written
1360 dirstate was written
1362 """
1361 """
1363 listignored, listclean, listunknown = ignored, clean, unknown
1362 listignored, listclean, listunknown = ignored, clean, unknown
1364 lookup, modified, added, unknown, ignored = [], [], [], [], []
1363 lookup, modified, added, unknown, ignored = [], [], [], [], []
1365 removed, deleted, clean = [], [], []
1364 removed, deleted, clean = [], [], []
1366
1365
1367 dmap = self._map
1366 dmap = self._map
1368 dmap.preload()
1367 dmap.preload()
1369
1368
1370 use_rust = True
1369 use_rust = True
1371
1370
1372 allowed_matchers = (
1371 allowed_matchers = (
1373 matchmod.alwaysmatcher,
1372 matchmod.alwaysmatcher,
1374 matchmod.differencematcher,
1373 matchmod.differencematcher,
1375 matchmod.exactmatcher,
1374 matchmod.exactmatcher,
1376 matchmod.includematcher,
1375 matchmod.includematcher,
1377 matchmod.intersectionmatcher,
1376 matchmod.intersectionmatcher,
1378 matchmod.nevermatcher,
1377 matchmod.nevermatcher,
1379 matchmod.unionmatcher,
1378 matchmod.unionmatcher,
1380 )
1379 )
1381
1380
1382 if rustmod is None:
1381 if rustmod is None:
1383 use_rust = False
1382 use_rust = False
1384 elif self._checkcase:
1383 elif self._checkcase:
1385 # Case-insensitive filesystems are not handled yet
1384 # Case-insensitive filesystems are not handled yet
1386 use_rust = False
1385 use_rust = False
1387 elif subrepos:
1386 elif subrepos:
1388 use_rust = False
1387 use_rust = False
1389 elif not isinstance(match, allowed_matchers):
1388 elif not isinstance(match, allowed_matchers):
1390 # Some matchers have yet to be implemented
1389 # Some matchers have yet to be implemented
1391 use_rust = False
1390 use_rust = False
1392
1391
1393 # Get the time from the filesystem so we can disambiguate files that
1392 # Get the time from the filesystem so we can disambiguate files that
1394 # appear modified in the present or future.
1393 # appear modified in the present or future.
1395 try:
1394 try:
1396 mtime_boundary = timestamp.get_fs_now(self._opener)
1395 mtime_boundary = timestamp.get_fs_now(self._opener)
1397 except OSError:
1396 except OSError:
1398 # In largefiles or readonly context
1397 # In largefiles or readonly context
1399 mtime_boundary = None
1398 mtime_boundary = None
1400
1399
1401 if use_rust:
1400 if use_rust:
1402 try:
1401 try:
1403 res = self._rust_status(
1402 res = self._rust_status(
1404 match, listclean, listignored, listunknown
1403 match, listclean, listignored, listunknown
1405 )
1404 )
1406 return res + (mtime_boundary,)
1405 return res + (mtime_boundary,)
1407 except rustmod.FallbackError:
1406 except rustmod.FallbackError:
1408 pass
1407 pass
1409
1408
1410 def noop(f):
1409 def noop(f):
1411 pass
1410 pass
1412
1411
1413 dcontains = dmap.__contains__
1412 dcontains = dmap.__contains__
1414 dget = dmap.__getitem__
1413 dget = dmap.__getitem__
1415 ladd = lookup.append # aka "unsure"
1414 ladd = lookup.append # aka "unsure"
1416 madd = modified.append
1415 madd = modified.append
1417 aadd = added.append
1416 aadd = added.append
1418 uadd = unknown.append if listunknown else noop
1417 uadd = unknown.append if listunknown else noop
1419 iadd = ignored.append if listignored else noop
1418 iadd = ignored.append if listignored else noop
1420 radd = removed.append
1419 radd = removed.append
1421 dadd = deleted.append
1420 dadd = deleted.append
1422 cadd = clean.append if listclean else noop
1421 cadd = clean.append if listclean else noop
1423 mexact = match.exact
1422 mexact = match.exact
1424 dirignore = self._dirignore
1423 dirignore = self._dirignore
1425 checkexec = self._checkexec
1424 checkexec = self._checkexec
1426 checklink = self._checklink
1425 checklink = self._checklink
1427 copymap = self._map.copymap
1426 copymap = self._map.copymap
1428
1427
1429 # We need to do full walks when either
1428 # We need to do full walks when either
1430 # - we're listing all clean files, or
1429 # - we're listing all clean files, or
1431 # - match.traversedir does something, because match.traversedir should
1430 # - match.traversedir does something, because match.traversedir should
1432 # be called for every dir in the working dir
1431 # be called for every dir in the working dir
1433 full = listclean or match.traversedir is not None
1432 full = listclean or match.traversedir is not None
1434 for fn, st in self.walk(
1433 for fn, st in self.walk(
1435 match, subrepos, listunknown, listignored, full=full
1434 match, subrepos, listunknown, listignored, full=full
1436 ).items():
1435 ).items():
1437 if not dcontains(fn):
1436 if not dcontains(fn):
1438 if (listignored or mexact(fn)) and dirignore(fn):
1437 if (listignored or mexact(fn)) and dirignore(fn):
1439 if listignored:
1438 if listignored:
1440 iadd(fn)
1439 iadd(fn)
1441 else:
1440 else:
1442 uadd(fn)
1441 uadd(fn)
1443 continue
1442 continue
1444
1443
1445 t = dget(fn)
1444 t = dget(fn)
1446 mode = t.mode
1445 mode = t.mode
1447 size = t.size
1446 size = t.size
1448
1447
1449 if not st and t.tracked:
1448 if not st and t.tracked:
1450 dadd(fn)
1449 dadd(fn)
1451 elif t.p2_info:
1450 elif t.p2_info:
1452 madd(fn)
1451 madd(fn)
1453 elif t.added:
1452 elif t.added:
1454 aadd(fn)
1453 aadd(fn)
1455 elif t.removed:
1454 elif t.removed:
1456 radd(fn)
1455 radd(fn)
1457 elif t.tracked:
1456 elif t.tracked:
1458 if not checklink and t.has_fallback_symlink:
1457 if not checklink and t.has_fallback_symlink:
1459 # If the file system does not support symlink, the mode
1458 # If the file system does not support symlink, the mode
1460 # might not be correctly stored in the dirstate, so do not
1459 # might not be correctly stored in the dirstate, so do not
1461 # trust it.
1460 # trust it.
1462 ladd(fn)
1461 ladd(fn)
1463 elif not checkexec and t.has_fallback_exec:
1462 elif not checkexec and t.has_fallback_exec:
1464 # If the file system does not support exec bits, the mode
1463 # If the file system does not support exec bits, the mode
1465 # might not be correctly stored in the dirstate, so do not
1464 # might not be correctly stored in the dirstate, so do not
1466 # trust it.
1465 # trust it.
1467 ladd(fn)
1466 ladd(fn)
1468 elif (
1467 elif (
1469 size >= 0
1468 size >= 0
1470 and (
1469 and (
1471 (size != st.st_size and size != st.st_size & _rangemask)
1470 (size != st.st_size and size != st.st_size & _rangemask)
1472 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1471 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1473 )
1472 )
1474 or fn in copymap
1473 or fn in copymap
1475 ):
1474 ):
1476 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1475 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1477 # issue6456: Size returned may be longer due to
1476 # issue6456: Size returned may be longer due to
1478 # encryption on EXT-4 fscrypt, undecided.
1477 # encryption on EXT-4 fscrypt, undecided.
1479 ladd(fn)
1478 ladd(fn)
1480 else:
1479 else:
1481 madd(fn)
1480 madd(fn)
1482 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1481 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1483 # There might be a change in the future if for example the
1482 # There might be a change in the future if for example the
1484 # internal clock is off, but this is a case where the issues
1483 # internal clock is off, but this is a case where the issues
1485 # the user would face would be a lot worse and there is
1484 # the user would face would be a lot worse and there is
1486 # nothing we can really do.
1485 # nothing we can really do.
1487 ladd(fn)
1486 ladd(fn)
1488 elif listclean:
1487 elif listclean:
1489 cadd(fn)
1488 cadd(fn)
1490 status = scmutil.status(
1489 status = scmutil.status(
1491 modified, added, removed, deleted, unknown, ignored, clean
1490 modified, added, removed, deleted, unknown, ignored, clean
1492 )
1491 )
1493 return (lookup, status, mtime_boundary)
1492 return (lookup, status, mtime_boundary)
1494
1493
1495 def matches(self, match):
1494 def matches(self, match):
1496 """
1495 """
1497 return files in the dirstate (in whatever state) filtered by match
1496 return files in the dirstate (in whatever state) filtered by match
1498 """
1497 """
1499 dmap = self._map
1498 dmap = self._map
1500 if rustmod is not None:
1499 if rustmod is not None:
1501 dmap = self._map._map
1500 dmap = self._map._map
1502
1501
1503 if match.always():
1502 if match.always():
1504 return dmap.keys()
1503 return dmap.keys()
1505 files = match.files()
1504 files = match.files()
1506 if match.isexact():
1505 if match.isexact():
1507 # fast path -- filter the other way around, since typically files is
1506 # fast path -- filter the other way around, since typically files is
1508 # much smaller than dmap
1507 # much smaller than dmap
1509 return [f for f in files if f in dmap]
1508 return [f for f in files if f in dmap]
1510 if match.prefix() and all(fn in dmap for fn in files):
1509 if match.prefix() and all(fn in dmap for fn in files):
1511 # fast path -- all the values are known to be files, so just return
1510 # fast path -- all the values are known to be files, so just return
1512 # that
1511 # that
1513 return list(files)
1512 return list(files)
1514 return [f for f in dmap if match(f)]
1513 return [f for f in dmap if match(f)]
1515
1514
1516 def _actualfilename(self, tr):
1515 def _actualfilename(self, tr):
1517 if tr:
1516 if tr:
1518 return self._pendingfilename
1517 return self._pendingfilename
1519 else:
1518 else:
1520 return self._filename
1519 return self._filename
1521
1520
1522 def data_backup_filename(self, backupname):
1521 def data_backup_filename(self, backupname):
1523 if not self._use_dirstate_v2:
1522 if not self._use_dirstate_v2:
1524 return None
1523 return None
1525 return backupname + b'.v2-data'
1524 return backupname + b'.v2-data'
1526
1525
1527 def _new_backup_data_filename(self, backupname):
1526 def _new_backup_data_filename(self, backupname):
1528 """return a filename to backup a data-file or None"""
1527 """return a filename to backup a data-file or None"""
1529 if not self._use_dirstate_v2:
1528 if not self._use_dirstate_v2:
1530 return None
1529 return None
1531 if self._map.docket.uuid is None:
1530 if self._map.docket.uuid is None:
1532 # not created yet, nothing to backup
1531 # not created yet, nothing to backup
1533 return None
1532 return None
1534 data_filename = self._map.docket.data_filename()
1533 data_filename = self._map.docket.data_filename()
1535 return data_filename, self.data_backup_filename(backupname)
1534 return data_filename, self.data_backup_filename(backupname)
1536
1535
1537 def backup_data_file(self, backupname):
1536 def backup_data_file(self, backupname):
1538 if not self._use_dirstate_v2:
1537 if not self._use_dirstate_v2:
1539 return None
1538 return None
1540 docket = docketmod.DirstateDocket.parse(
1539 docket = docketmod.DirstateDocket.parse(
1541 self._opener.read(backupname),
1540 self._opener.read(backupname),
1542 self._nodeconstants,
1541 self._nodeconstants,
1543 )
1542 )
1544 return self.data_backup_filename(backupname), docket.data_filename()
1543 return self.data_backup_filename(backupname), docket.data_filename()
1545
1544
1546 def savebackup(self, tr, backupname):
1545 def savebackup(self, tr, backupname):
1547 '''Save current dirstate into backup file'''
1546 '''Save current dirstate into backup file'''
1548 filename = self._actualfilename(tr)
1547 filename = self._actualfilename(tr)
1549 assert backupname != filename
1548 assert backupname != filename
1550
1549
1551 # use '_writedirstate' instead of 'write' to write changes certainly,
1550 # use '_writedirstate' instead of 'write' to write changes certainly,
1552 # because the latter omits writing out if transaction is running.
1551 # because the latter omits writing out if transaction is running.
1553 # output file will be used to create backup of dirstate at this point.
1552 # output file will be used to create backup of dirstate at this point.
1554 if self._dirty:
1553 if self._dirty:
1555 self._writedirstate(
1554 self._writedirstate(
1556 tr,
1555 tr,
1557 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1556 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1558 )
1557 )
1559
1558
1560 if tr:
1559 if tr:
1561 # ensure that subsequent tr.writepending returns True for
1560 # ensure that subsequent tr.writepending returns True for
1562 # changes written out above, even if dirstate is never
1561 # changes written out above, even if dirstate is never
1563 # changed after this
1562 # changed after this
1564 tr.addfilegenerator(
1563 tr.addfilegenerator(
1565 b'dirstate-1-main',
1564 b'dirstate-1-main',
1566 (self._filename,),
1565 (self._filename,),
1567 lambda f: self._writedirstate(tr, f),
1566 lambda f: self._writedirstate(tr, f),
1568 location=b'plain',
1567 location=b'plain',
1569 post_finalize=True,
1568 post_finalize=True,
1570 )
1569 )
1571
1570
1572 self._opener.tryunlink(backupname)
1571 self._opener.tryunlink(backupname)
1573 if self._opener.exists(filename):
1572 if self._opener.exists(filename):
1574 # hardlink backup is okay because _writedirstate is always called
1573 # hardlink backup is okay because _writedirstate is always called
1575 # with an "atomictemp=True" file.
1574 # with an "atomictemp=True" file.
1576 util.copyfile(
1575 util.copyfile(
1577 self._opener.join(filename),
1576 self._opener.join(filename),
1578 self._opener.join(backupname),
1577 self._opener.join(backupname),
1579 hardlink=True,
1578 hardlink=True,
1580 )
1579 )
1581 data_pair = self._new_backup_data_filename(backupname)
1580 data_pair = self._new_backup_data_filename(backupname)
1582 if data_pair is not None:
1581 if data_pair is not None:
1583 data_filename, bck_data_filename = data_pair
1582 data_filename, bck_data_filename = data_pair
1584 util.copyfile(
1583 util.copyfile(
1585 self._opener.join(data_filename),
1584 self._opener.join(data_filename),
1586 self._opener.join(bck_data_filename),
1585 self._opener.join(bck_data_filename),
1587 hardlink=True,
1586 hardlink=True,
1588 )
1587 )
1589 if tr is not None:
1588 if tr is not None:
1590 # ensure that pending file written above is unlinked at
1589 # ensure that pending file written above is unlinked at
1591 # failure, even if tr.writepending isn't invoked until the
1590 # failure, even if tr.writepending isn't invoked until the
1592 # end of this transaction
1591 # end of this transaction
1593 tr.registertmp(bck_data_filename, location=b'plain')
1592 tr.registertmp(bck_data_filename, location=b'plain')
1594
1593
1595 def restorebackup(self, tr, backupname):
1594 def restorebackup(self, tr, backupname):
1596 '''Restore dirstate by backup file'''
1595 '''Restore dirstate by backup file'''
1597 # this "invalidate()" prevents "wlock.release()" from writing
1596 # this "invalidate()" prevents "wlock.release()" from writing
1598 # changes of dirstate out after restoring from backup file
1597 # changes of dirstate out after restoring from backup file
1599 self.invalidate()
1598 self.invalidate()
1600 o = self._opener
1599 o = self._opener
1601 if not o.exists(backupname):
1600 if not o.exists(backupname):
1602 # there was no file backup, delete existing files
1601 # there was no file backup, delete existing files
1603 filename = self._actualfilename(tr)
1602 filename = self._actualfilename(tr)
1604 data_file = None
1603 data_file = None
1605 if self._use_dirstate_v2:
1604 if self._use_dirstate_v2:
1606 data_file = self._map.docket.data_filename()
1605 data_file = self._map.docket.data_filename()
1607 if o.exists(filename):
1606 if o.exists(filename):
1608 o.unlink(filename)
1607 o.unlink(filename)
1609 if data_file is not None and o.exists(data_file):
1608 if data_file is not None and o.exists(data_file):
1610 o.unlink(data_file)
1609 o.unlink(data_file)
1611 return
1610 return
1612 filename = self._actualfilename(tr)
1611 filename = self._actualfilename(tr)
1613 data_pair = self.backup_data_file(backupname)
1612 data_pair = self.backup_data_file(backupname)
1614 if o.exists(filename) and util.samefile(
1613 if o.exists(filename) and util.samefile(
1615 o.join(backupname), o.join(filename)
1614 o.join(backupname), o.join(filename)
1616 ):
1615 ):
1617 o.unlink(backupname)
1616 o.unlink(backupname)
1618 else:
1617 else:
1619 o.rename(backupname, filename, checkambig=True)
1618 o.rename(backupname, filename, checkambig=True)
1620
1619
1621 if data_pair is not None:
1620 if data_pair is not None:
1622 data_backup, target = data_pair
1621 data_backup, target = data_pair
1623 if o.exists(target) and util.samefile(
1622 if o.exists(target) and util.samefile(
1624 o.join(data_backup), o.join(target)
1623 o.join(data_backup), o.join(target)
1625 ):
1624 ):
1626 o.unlink(data_backup)
1625 o.unlink(data_backup)
1627 else:
1626 else:
1628 o.rename(data_backup, target, checkambig=True)
1627 o.rename(data_backup, target, checkambig=True)
1629
1628
1630 def clearbackup(self, tr, backupname):
1629 def clearbackup(self, tr, backupname):
1631 '''Clear backup file'''
1630 '''Clear backup file'''
1632 o = self._opener
1631 o = self._opener
1633 if o.exists(backupname):
1632 if o.exists(backupname):
1634 data_backup = self.backup_data_file(backupname)
1633 data_backup = self.backup_data_file(backupname)
1635 o.unlink(backupname)
1634 o.unlink(backupname)
1636 if data_backup is not None:
1635 if data_backup is not None:
1637 o.unlink(data_backup[0])
1636 o.unlink(data_backup[0])
1638
1637
1639 def verify(self, m1, m2, p1, narrow_matcher=None):
1638 def verify(self, m1, m2, p1, narrow_matcher=None):
1640 """
1639 """
1641 check the dirstate contents against the parent manifest and yield errors
1640 check the dirstate contents against the parent manifest and yield errors
1642 """
1641 """
1643 missing_from_p1 = _(
1642 missing_from_p1 = _(
1644 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1643 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1645 )
1644 )
1646 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1645 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1647 missing_from_ps = _(
1646 missing_from_ps = _(
1648 b"%s marked as modified, but not in either manifest\n"
1647 b"%s marked as modified, but not in either manifest\n"
1649 )
1648 )
1650 missing_from_ds = _(
1649 missing_from_ds = _(
1651 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1650 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1652 )
1651 )
1653 for f, entry in self.items():
1652 for f, entry in self.items():
1654 if entry.p1_tracked:
1653 if entry.p1_tracked:
1655 if entry.modified and f not in m1 and f not in m2:
1654 if entry.modified and f not in m1 and f not in m2:
1656 yield missing_from_ps % f
1655 yield missing_from_ps % f
1657 elif f not in m1:
1656 elif f not in m1:
1658 yield missing_from_p1 % (f, node.short(p1))
1657 yield missing_from_p1 % (f, node.short(p1))
1659 if entry.added and f in m1:
1658 if entry.added and f in m1:
1660 yield unexpected_in_p1 % f
1659 yield unexpected_in_p1 % f
1661 for f in m1:
1660 for f in m1:
1662 if narrow_matcher is not None and not narrow_matcher(f):
1661 if narrow_matcher is not None and not narrow_matcher(f):
1663 continue
1662 continue
1664 entry = self.get_entry(f)
1663 entry = self.get_entry(f)
1665 if not entry.p1_tracked:
1664 if not entry.p1_tracked:
1666 yield missing_from_ds % (f, node.short(p1))
1665 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now