##// END OF EJS Templates
dirstate: factor the "changing" context logic out...
marmoute -
r50919:0dc2fb4b default
parent child Browse files
Show More
@@ -1,1681 +1,1703 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_changing_parents(func):
69 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.is_changing_parents:
71 if not self.is_changing_parents:
72 msg = 'calling `%s` outside of a changing_parents context'
72 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
77 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
79
79
80 return wrap
80 return wrap
81
81
82
82
83 def requires_not_changing_parents(func):
83 def requires_not_changing_parents(func):
84 def wrap(self, *args, **kwargs):
84 def wrap(self, *args, **kwargs):
85 if self.is_changing_parents:
85 if self.is_changing_parents:
86 msg = 'calling `%s` inside of a changing_parents context'
86 msg = 'calling `%s` inside of a changing_parents context'
87 msg %= func.__name__
87 msg %= func.__name__
88 raise error.ProgrammingError(msg)
88 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
89 return func(self, *args, **kwargs)
90
90
91 return wrap
91 return wrap
92
92
93
93
94 CHANGE_TYPE_PARENTS = "parents"
95
96
94 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
95 class dirstate:
98 class dirstate:
96 def __init__(
99 def __init__(
97 self,
100 self,
98 opener,
101 opener,
99 ui,
102 ui,
100 root,
103 root,
101 validate,
104 validate,
102 sparsematchfn,
105 sparsematchfn,
103 nodeconstants,
106 nodeconstants,
104 use_dirstate_v2,
107 use_dirstate_v2,
105 use_tracked_hint=False,
108 use_tracked_hint=False,
106 ):
109 ):
107 """Create a new dirstate object.
110 """Create a new dirstate object.
108
111
109 opener is an open()-like callable that can be used to open the
112 opener is an open()-like callable that can be used to open the
110 dirstate file; root is the root of the directory tracked by
113 dirstate file; root is the root of the directory tracked by
111 the dirstate.
114 the dirstate.
112 """
115 """
113 self._use_dirstate_v2 = use_dirstate_v2
116 self._use_dirstate_v2 = use_dirstate_v2
114 self._use_tracked_hint = use_tracked_hint
117 self._use_tracked_hint = use_tracked_hint
115 self._nodeconstants = nodeconstants
118 self._nodeconstants = nodeconstants
116 self._opener = opener
119 self._opener = opener
117 self._validate = validate
120 self._validate = validate
118 self._root = root
121 self._root = root
119 # Either build a sparse-matcher or None if sparse is disabled
122 # Either build a sparse-matcher or None if sparse is disabled
120 self._sparsematchfn = sparsematchfn
123 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
124 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
125 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
126 self._rootdir = pathutil.normasprefix(root)
124 # True is any internal state may be different
127 # True is any internal state may be different
125 self._dirty = False
128 self._dirty = False
126 # True if the set of tracked file may be different
129 # True if the set of tracked file may be different
127 self._dirty_tracked_set = False
130 self._dirty_tracked_set = False
128 self._ui = ui
131 self._ui = ui
129 self._filecache = {}
132 self._filecache = {}
130 # nesting level of `changing_parents` context
133 # nesting level of `changing_parents` context
131 self._changing_level = 0
134 self._changing_level = 0
135 # the change currently underway
136 self._change_type = None
132 # True if the current dirstate changing operations have been
137 # True if the current dirstate changing operations have been
133 # invalidated (used to make sure all nested contexts have been exited)
138 # invalidated (used to make sure all nested contexts have been exited)
134 self._invalidated_context = False
139 self._invalidated_context = False
135 self._filename = b'dirstate'
140 self._filename = b'dirstate'
136 self._filename_th = b'dirstate-tracked-hint'
141 self._filename_th = b'dirstate-tracked-hint'
137 self._pendingfilename = b'%s.pending' % self._filename
142 self._pendingfilename = b'%s.pending' % self._filename
138 self._plchangecallbacks = {}
143 self._plchangecallbacks = {}
139 self._origpl = None
144 self._origpl = None
140 self._mapcls = dirstatemap.dirstatemap
145 self._mapcls = dirstatemap.dirstatemap
141 # Access and cache cwd early, so we don't access it for the first time
146 # Access and cache cwd early, so we don't access it for the first time
142 # after a working-copy update caused it to not exist (accessing it then
147 # after a working-copy update caused it to not exist (accessing it then
143 # raises an exception).
148 # raises an exception).
144 self._cwd
149 self._cwd
145
150
146 def prefetch_parents(self):
151 def prefetch_parents(self):
147 """make sure the parents are loaded
152 """make sure the parents are loaded
148
153
149 Used to avoid a race condition.
154 Used to avoid a race condition.
150 """
155 """
151 self._pl
156 self._pl
152
157
153 @contextlib.contextmanager
158 @contextlib.contextmanager
154 def changing_parents(self, repo):
159 def _changing(self, repo, change_type):
155 """Context manager for handling dirstate parents.
156
157 If an exception occurs in the scope of the context manager,
158 the incoherent dirstate won't be written when wlock is
159 released.
160 """
161 if repo.currentwlock() is None:
160 if repo.currentwlock() is None:
162 msg = b"changing parents without holding the wlock"
161 msg = b"trying to change the dirstate without holding the wlock"
163 raise error.ProgrammingError(msg)
162 raise error.ProgrammingError(msg)
164 if self._invalidated_context:
163 if self._invalidated_context:
165 msg = "trying to use an invalidated dirstate before it has reset"
164 msg = "trying to use an invalidated dirstate before it has reset"
166 raise error.ProgrammingError(msg)
165 raise error.ProgrammingError(msg)
166
167 # different type of change are mutually exclusive
168 if self._change_type is None:
169 assert self._changing_level == 0
170 self._change_type = change_type
171 elif self._change_type != change_type:
172 msg = (
173 'trying to open "%s" dirstate-changing context while a "%s" is'
174 ' already open'
175 )
176 msg %= (change_type, self._change_type)
177 raise error.ProgrammingError(msg)
167 self._changing_level += 1
178 self._changing_level += 1
168 try:
179 try:
169 yield
180 yield
170 except Exception:
181 except Exception:
171 self.invalidate()
182 self.invalidate()
172 raise
183 raise
173 finally:
184 finally:
174 if self._changing_level > 0:
185 if self._changing_level > 0:
175 if self._invalidated_context:
186 if self._invalidated_context:
176 # make sure we invalidate anything an upper context might
187 # make sure we invalidate anything an upper context might
177 # have changed.
188 # have changed.
178 self.invalidate()
189 self.invalidate()
179 self._changing_level -= 1
190 self._changing_level -= 1
180 # The invalidation is complete once we exit the final context
191 # The invalidation is complete once we exit the final context
181 # manager
192 # manager
182 if self._changing_level <= 0:
193 if self._changing_level <= 0:
194 self._change_type = None
183 assert self._changing_level == 0
195 assert self._changing_level == 0
184 if self._invalidated_context:
196 if self._invalidated_context:
185 self._invalidated_context = False
197 self._invalidated_context = False
186 else:
198 else:
187 # When an exception occured, `_invalidated_context`
199 # When an exception occured, `_invalidated_context`
188 # would have been set to True by the `invalidate`
200 # would have been set to True by the `invalidate`
189 # call earlier.
201 # call earlier.
190 #
202 #
191 # We don't have more straightforward code, because the
203 # We don't have more straightforward code, because the
192 # Exception catching (and the associated `invalidate`
204 # Exception catching (and the associated `invalidate`
193 # calling) might have been called by a nested context
205 # calling) might have been called by a nested context
194 # instead of the top level one.
206 # instead of the top level one.
195 self.write(repo.currenttransaction())
207 self.write(repo.currenttransaction())
196
208
209 @contextlib.contextmanager
210 def changing_parents(self, repo):
211 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
212 yield c
213
197 # here to help migration to the new code
214 # here to help migration to the new code
198 def parentchange(self):
215 def parentchange(self):
199 msg = (
216 msg = (
200 "Mercurial 6.4 and later requires call to "
217 "Mercurial 6.4 and later requires call to "
201 "`dirstate.changing_parents(repo)`"
218 "`dirstate.changing_parents(repo)`"
202 )
219 )
203 raise error.ProgrammingError(msg)
220 raise error.ProgrammingError(msg)
204
221
205 @property
222 @property
206 def is_changing_any(self):
223 def is_changing_any(self):
207 """Returns true if the dirstate is in the middle of a set of changes.
224 """Returns true if the dirstate is in the middle of a set of changes.
208
225
209 This returns True for any kind of change.
226 This returns True for any kind of change.
210 """
227 """
211 return self._changing_level > 0
228 return self._changing_level > 0
212
229
213 def pendingparentchange(self):
230 def pendingparentchange(self):
231 return self.is_changing_parent()
232
233 def is_changing_parent(self):
214 """Returns true if the dirstate is in the middle of a set of changes
234 """Returns true if the dirstate is in the middle of a set of changes
215 that modify the dirstate parent.
235 that modify the dirstate parent.
216 """
236 """
217 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
237 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
218 return self.is_changing_parents
238 return self.is_changing_parents
219
239
220 @property
240 @property
221 def is_changing_parents(self):
241 def is_changing_parents(self):
222 """Returns true if the dirstate is in the middle of a set of changes
242 """Returns true if the dirstate is in the middle of a set of changes
223 that modify the dirstate parent.
243 that modify the dirstate parent.
224 """
244 """
225 return self._changing_level > 0
245 if self._changing_level <= 0:
246 return False
247 return self._change_type == CHANGE_TYPE_PARENTS
226
248
227 @propertycache
249 @propertycache
228 def _map(self):
250 def _map(self):
229 """Return the dirstate contents (see documentation for dirstatemap)."""
251 """Return the dirstate contents (see documentation for dirstatemap)."""
230 self._map = self._mapcls(
252 self._map = self._mapcls(
231 self._ui,
253 self._ui,
232 self._opener,
254 self._opener,
233 self._root,
255 self._root,
234 self._nodeconstants,
256 self._nodeconstants,
235 self._use_dirstate_v2,
257 self._use_dirstate_v2,
236 )
258 )
237 return self._map
259 return self._map
238
260
239 @property
261 @property
240 def _sparsematcher(self):
262 def _sparsematcher(self):
241 """The matcher for the sparse checkout.
263 """The matcher for the sparse checkout.
242
264
243 The working directory may not include every file from a manifest. The
265 The working directory may not include every file from a manifest. The
244 matcher obtained by this property will match a path if it is to be
266 matcher obtained by this property will match a path if it is to be
245 included in the working directory.
267 included in the working directory.
246
268
247 When sparse if disabled, return None.
269 When sparse if disabled, return None.
248 """
270 """
249 if self._sparsematchfn is None:
271 if self._sparsematchfn is None:
250 return None
272 return None
251 # TODO there is potential to cache this property. For now, the matcher
273 # TODO there is potential to cache this property. For now, the matcher
252 # is resolved on every access. (But the called function does use a
274 # is resolved on every access. (But the called function does use a
253 # cache to keep the lookup fast.)
275 # cache to keep the lookup fast.)
254 return self._sparsematchfn()
276 return self._sparsematchfn()
255
277
256 @repocache(b'branch')
278 @repocache(b'branch')
257 def _branch(self):
279 def _branch(self):
258 try:
280 try:
259 return self._opener.read(b"branch").strip() or b"default"
281 return self._opener.read(b"branch").strip() or b"default"
260 except FileNotFoundError:
282 except FileNotFoundError:
261 return b"default"
283 return b"default"
262
284
263 @property
285 @property
264 def _pl(self):
286 def _pl(self):
265 return self._map.parents()
287 return self._map.parents()
266
288
267 def hasdir(self, d):
289 def hasdir(self, d):
268 return self._map.hastrackeddir(d)
290 return self._map.hastrackeddir(d)
269
291
270 @rootcache(b'.hgignore')
292 @rootcache(b'.hgignore')
271 def _ignore(self):
293 def _ignore(self):
272 files = self._ignorefiles()
294 files = self._ignorefiles()
273 if not files:
295 if not files:
274 return matchmod.never()
296 return matchmod.never()
275
297
276 pats = [b'include:%s' % f for f in files]
298 pats = [b'include:%s' % f for f in files]
277 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
299 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
278
300
279 @propertycache
301 @propertycache
280 def _slash(self):
302 def _slash(self):
281 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
303 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
282
304
283 @propertycache
305 @propertycache
284 def _checklink(self):
306 def _checklink(self):
285 return util.checklink(self._root)
307 return util.checklink(self._root)
286
308
287 @propertycache
309 @propertycache
288 def _checkexec(self):
310 def _checkexec(self):
289 return bool(util.checkexec(self._root))
311 return bool(util.checkexec(self._root))
290
312
291 @propertycache
313 @propertycache
292 def _checkcase(self):
314 def _checkcase(self):
293 return not util.fscasesensitive(self._join(b'.hg'))
315 return not util.fscasesensitive(self._join(b'.hg'))
294
316
295 def _join(self, f):
317 def _join(self, f):
296 # much faster than os.path.join()
318 # much faster than os.path.join()
297 # it's safe because f is always a relative path
319 # it's safe because f is always a relative path
298 return self._rootdir + f
320 return self._rootdir + f
299
321
300 def flagfunc(self, buildfallback):
322 def flagfunc(self, buildfallback):
301 """build a callable that returns flags associated with a filename
323 """build a callable that returns flags associated with a filename
302
324
303 The information is extracted from three possible layers:
325 The information is extracted from three possible layers:
304 1. the file system if it supports the information
326 1. the file system if it supports the information
305 2. the "fallback" information stored in the dirstate if any
327 2. the "fallback" information stored in the dirstate if any
306 3. a more expensive mechanism inferring the flags from the parents.
328 3. a more expensive mechanism inferring the flags from the parents.
307 """
329 """
308
330
309 # small hack to cache the result of buildfallback()
331 # small hack to cache the result of buildfallback()
310 fallback_func = []
332 fallback_func = []
311
333
312 def get_flags(x):
334 def get_flags(x):
313 entry = None
335 entry = None
314 fallback_value = None
336 fallback_value = None
315 try:
337 try:
316 st = os.lstat(self._join(x))
338 st = os.lstat(self._join(x))
317 except OSError:
339 except OSError:
318 return b''
340 return b''
319
341
320 if self._checklink:
342 if self._checklink:
321 if util.statislink(st):
343 if util.statislink(st):
322 return b'l'
344 return b'l'
323 else:
345 else:
324 entry = self.get_entry(x)
346 entry = self.get_entry(x)
325 if entry.has_fallback_symlink:
347 if entry.has_fallback_symlink:
326 if entry.fallback_symlink:
348 if entry.fallback_symlink:
327 return b'l'
349 return b'l'
328 else:
350 else:
329 if not fallback_func:
351 if not fallback_func:
330 fallback_func.append(buildfallback())
352 fallback_func.append(buildfallback())
331 fallback_value = fallback_func[0](x)
353 fallback_value = fallback_func[0](x)
332 if b'l' in fallback_value:
354 if b'l' in fallback_value:
333 return b'l'
355 return b'l'
334
356
335 if self._checkexec:
357 if self._checkexec:
336 if util.statisexec(st):
358 if util.statisexec(st):
337 return b'x'
359 return b'x'
338 else:
360 else:
339 if entry is None:
361 if entry is None:
340 entry = self.get_entry(x)
362 entry = self.get_entry(x)
341 if entry.has_fallback_exec:
363 if entry.has_fallback_exec:
342 if entry.fallback_exec:
364 if entry.fallback_exec:
343 return b'x'
365 return b'x'
344 else:
366 else:
345 if fallback_value is None:
367 if fallback_value is None:
346 if not fallback_func:
368 if not fallback_func:
347 fallback_func.append(buildfallback())
369 fallback_func.append(buildfallback())
348 fallback_value = fallback_func[0](x)
370 fallback_value = fallback_func[0](x)
349 if b'x' in fallback_value:
371 if b'x' in fallback_value:
350 return b'x'
372 return b'x'
351 return b''
373 return b''
352
374
353 return get_flags
375 return get_flags
354
376
355 @propertycache
377 @propertycache
356 def _cwd(self):
378 def _cwd(self):
357 # internal config: ui.forcecwd
379 # internal config: ui.forcecwd
358 forcecwd = self._ui.config(b'ui', b'forcecwd')
380 forcecwd = self._ui.config(b'ui', b'forcecwd')
359 if forcecwd:
381 if forcecwd:
360 return forcecwd
382 return forcecwd
361 return encoding.getcwd()
383 return encoding.getcwd()
362
384
363 def getcwd(self):
385 def getcwd(self):
364 """Return the path from which a canonical path is calculated.
386 """Return the path from which a canonical path is calculated.
365
387
366 This path should be used to resolve file patterns or to convert
388 This path should be used to resolve file patterns or to convert
367 canonical paths back to file paths for display. It shouldn't be
389 canonical paths back to file paths for display. It shouldn't be
368 used to get real file paths. Use vfs functions instead.
390 used to get real file paths. Use vfs functions instead.
369 """
391 """
370 cwd = self._cwd
392 cwd = self._cwd
371 if cwd == self._root:
393 if cwd == self._root:
372 return b''
394 return b''
373 # self._root ends with a path separator if self._root is '/' or 'C:\'
395 # self._root ends with a path separator if self._root is '/' or 'C:\'
374 rootsep = self._root
396 rootsep = self._root
375 if not util.endswithsep(rootsep):
397 if not util.endswithsep(rootsep):
376 rootsep += pycompat.ossep
398 rootsep += pycompat.ossep
377 if cwd.startswith(rootsep):
399 if cwd.startswith(rootsep):
378 return cwd[len(rootsep) :]
400 return cwd[len(rootsep) :]
379 else:
401 else:
380 # we're outside the repo. return an absolute path.
402 # we're outside the repo. return an absolute path.
381 return cwd
403 return cwd
382
404
383 def pathto(self, f, cwd=None):
405 def pathto(self, f, cwd=None):
384 if cwd is None:
406 if cwd is None:
385 cwd = self.getcwd()
407 cwd = self.getcwd()
386 path = util.pathto(self._root, cwd, f)
408 path = util.pathto(self._root, cwd, f)
387 if self._slash:
409 if self._slash:
388 return util.pconvert(path)
410 return util.pconvert(path)
389 return path
411 return path
390
412
391 def get_entry(self, path):
413 def get_entry(self, path):
392 """return a DirstateItem for the associated path"""
414 """return a DirstateItem for the associated path"""
393 entry = self._map.get(path)
415 entry = self._map.get(path)
394 if entry is None:
416 if entry is None:
395 return DirstateItem()
417 return DirstateItem()
396 return entry
418 return entry
397
419
398 def __contains__(self, key):
420 def __contains__(self, key):
399 return key in self._map
421 return key in self._map
400
422
401 def __iter__(self):
423 def __iter__(self):
402 return iter(sorted(self._map))
424 return iter(sorted(self._map))
403
425
404 def items(self):
426 def items(self):
405 return self._map.items()
427 return self._map.items()
406
428
407 iteritems = items
429 iteritems = items
408
430
409 def parents(self):
431 def parents(self):
410 return [self._validate(p) for p in self._pl]
432 return [self._validate(p) for p in self._pl]
411
433
412 def p1(self):
434 def p1(self):
413 return self._validate(self._pl[0])
435 return self._validate(self._pl[0])
414
436
415 def p2(self):
437 def p2(self):
416 return self._validate(self._pl[1])
438 return self._validate(self._pl[1])
417
439
418 @property
440 @property
419 def in_merge(self):
441 def in_merge(self):
420 """True if a merge is in progress"""
442 """True if a merge is in progress"""
421 return self._pl[1] != self._nodeconstants.nullid
443 return self._pl[1] != self._nodeconstants.nullid
422
444
423 def branch(self):
445 def branch(self):
424 return encoding.tolocal(self._branch)
446 return encoding.tolocal(self._branch)
425
447
426 def setparents(self, p1, p2=None):
448 def setparents(self, p1, p2=None):
427 """Set dirstate parents to p1 and p2.
449 """Set dirstate parents to p1 and p2.
428
450
429 When moving from two parents to one, "merged" entries a
451 When moving from two parents to one, "merged" entries a
430 adjusted to normal and previous copy records discarded and
452 adjusted to normal and previous copy records discarded and
431 returned by the call.
453 returned by the call.
432
454
433 See localrepo.setparents()
455 See localrepo.setparents()
434 """
456 """
435 if p2 is None:
457 if p2 is None:
436 p2 = self._nodeconstants.nullid
458 p2 = self._nodeconstants.nullid
437 if self._changing_level == 0:
459 if self._changing_level == 0:
438 raise ValueError(
460 raise ValueError(
439 b"cannot set dirstate parent outside of "
461 b"cannot set dirstate parent outside of "
440 b"dirstate.changing_parents context manager"
462 b"dirstate.changing_parents context manager"
441 )
463 )
442
464
443 self._dirty = True
465 self._dirty = True
444 oldp2 = self._pl[1]
466 oldp2 = self._pl[1]
445 if self._origpl is None:
467 if self._origpl is None:
446 self._origpl = self._pl
468 self._origpl = self._pl
447 nullid = self._nodeconstants.nullid
469 nullid = self._nodeconstants.nullid
448 # True if we need to fold p2 related state back to a linear case
470 # True if we need to fold p2 related state back to a linear case
449 fold_p2 = oldp2 != nullid and p2 == nullid
471 fold_p2 = oldp2 != nullid and p2 == nullid
450 return self._map.setparents(p1, p2, fold_p2=fold_p2)
472 return self._map.setparents(p1, p2, fold_p2=fold_p2)
451
473
452 def setbranch(self, branch):
474 def setbranch(self, branch):
453 self.__class__._branch.set(self, encoding.fromlocal(branch))
475 self.__class__._branch.set(self, encoding.fromlocal(branch))
454 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
476 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
455 try:
477 try:
456 f.write(self._branch + b'\n')
478 f.write(self._branch + b'\n')
457 f.close()
479 f.close()
458
480
459 # make sure filecache has the correct stat info for _branch after
481 # make sure filecache has the correct stat info for _branch after
460 # replacing the underlying file
482 # replacing the underlying file
461 ce = self._filecache[b'_branch']
483 ce = self._filecache[b'_branch']
462 if ce:
484 if ce:
463 ce.refresh()
485 ce.refresh()
464 except: # re-raises
486 except: # re-raises
465 f.discard()
487 f.discard()
466 raise
488 raise
467
489
468 def invalidate(self):
490 def invalidate(self):
469 """Causes the next access to reread the dirstate.
491 """Causes the next access to reread the dirstate.
470
492
471 This is different from localrepo.invalidatedirstate() because it always
493 This is different from localrepo.invalidatedirstate() because it always
472 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
494 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
473 check whether the dirstate has changed before rereading it."""
495 check whether the dirstate has changed before rereading it."""
474
496
475 for a in ("_map", "_branch", "_ignore"):
497 for a in ("_map", "_branch", "_ignore"):
476 if a in self.__dict__:
498 if a in self.__dict__:
477 delattr(self, a)
499 delattr(self, a)
478 self._dirty = False
500 self._dirty = False
479 self._dirty_tracked_set = False
501 self._dirty_tracked_set = False
480 self._invalidated_context = self._changing_level > 0
502 self._invalidated_context = self._changing_level > 0
481 self._origpl = None
503 self._origpl = None
482
504
483 def copy(self, source, dest):
505 def copy(self, source, dest):
484 """Mark dest as a copy of source. Unmark dest if source is None."""
506 """Mark dest as a copy of source. Unmark dest if source is None."""
485 if source == dest:
507 if source == dest:
486 return
508 return
487 self._dirty = True
509 self._dirty = True
488 if source is not None:
510 if source is not None:
489 self._check_sparse(source)
511 self._check_sparse(source)
490 self._map.copymap[dest] = source
512 self._map.copymap[dest] = source
491 else:
513 else:
492 self._map.copymap.pop(dest, None)
514 self._map.copymap.pop(dest, None)
493
515
494 def copied(self, file):
516 def copied(self, file):
495 return self._map.copymap.get(file, None)
517 return self._map.copymap.get(file, None)
496
518
497 def copies(self):
519 def copies(self):
498 return self._map.copymap
520 return self._map.copymap
499
521
500 @requires_not_changing_parents
522 @requires_not_changing_parents
501 def set_tracked(self, filename, reset_copy=False):
523 def set_tracked(self, filename, reset_copy=False):
502 """a "public" method for generic code to mark a file as tracked
524 """a "public" method for generic code to mark a file as tracked
503
525
504 This function is to be called outside of "update/merge" case. For
526 This function is to be called outside of "update/merge" case. For
505 example by a command like `hg add X`.
527 example by a command like `hg add X`.
506
528
507 if reset_copy is set, any existing copy information will be dropped.
529 if reset_copy is set, any existing copy information will be dropped.
508
530
509 return True the file was previously untracked, False otherwise.
531 return True the file was previously untracked, False otherwise.
510 """
532 """
511 self._dirty = True
533 self._dirty = True
512 entry = self._map.get(filename)
534 entry = self._map.get(filename)
513 if entry is None or not entry.tracked:
535 if entry is None or not entry.tracked:
514 self._check_new_tracked_filename(filename)
536 self._check_new_tracked_filename(filename)
515 pre_tracked = self._map.set_tracked(filename)
537 pre_tracked = self._map.set_tracked(filename)
516 if reset_copy:
538 if reset_copy:
517 self._map.copymap.pop(filename, None)
539 self._map.copymap.pop(filename, None)
518 if pre_tracked:
540 if pre_tracked:
519 self._dirty_tracked_set = True
541 self._dirty_tracked_set = True
520 return pre_tracked
542 return pre_tracked
521
543
522 @requires_not_changing_parents
544 @requires_not_changing_parents
523 def set_untracked(self, filename):
545 def set_untracked(self, filename):
524 """a "public" method for generic code to mark a file as untracked
546 """a "public" method for generic code to mark a file as untracked
525
547
526 This function is to be called outside of "update/merge" case. For
548 This function is to be called outside of "update/merge" case. For
527 example by a command like `hg remove X`.
549 example by a command like `hg remove X`.
528
550
529 return True the file was previously tracked, False otherwise.
551 return True the file was previously tracked, False otherwise.
530 """
552 """
531 ret = self._map.set_untracked(filename)
553 ret = self._map.set_untracked(filename)
532 if ret:
554 if ret:
533 self._dirty = True
555 self._dirty = True
534 self._dirty_tracked_set = True
556 self._dirty_tracked_set = True
535 return ret
557 return ret
536
558
537 @requires_not_changing_parents
559 @requires_not_changing_parents
538 def set_clean(self, filename, parentfiledata):
560 def set_clean(self, filename, parentfiledata):
539 """record that the current state of the file on disk is known to be clean"""
561 """record that the current state of the file on disk is known to be clean"""
540 self._dirty = True
562 self._dirty = True
541 if not self._map[filename].tracked:
563 if not self._map[filename].tracked:
542 self._check_new_tracked_filename(filename)
564 self._check_new_tracked_filename(filename)
543 (mode, size, mtime) = parentfiledata
565 (mode, size, mtime) = parentfiledata
544 self._map.set_clean(filename, mode, size, mtime)
566 self._map.set_clean(filename, mode, size, mtime)
545
567
546 @requires_not_changing_parents
568 @requires_not_changing_parents
547 def set_possibly_dirty(self, filename):
569 def set_possibly_dirty(self, filename):
548 """record that the current state of the file on disk is unknown"""
570 """record that the current state of the file on disk is unknown"""
549 self._dirty = True
571 self._dirty = True
550 self._map.set_possibly_dirty(filename)
572 self._map.set_possibly_dirty(filename)
551
573
552 @requires_changing_parents
574 @requires_changing_parents
553 def update_file_p1(
575 def update_file_p1(
554 self,
576 self,
555 filename,
577 filename,
556 p1_tracked,
578 p1_tracked,
557 ):
579 ):
558 """Set a file as tracked in the parent (or not)
580 """Set a file as tracked in the parent (or not)
559
581
560 This is to be called when adjust the dirstate to a new parent after an history
582 This is to be called when adjust the dirstate to a new parent after an history
561 rewriting operation.
583 rewriting operation.
562
584
563 It should not be called during a merge (p2 != nullid) and only within
585 It should not be called during a merge (p2 != nullid) and only within
564 a `with dirstate.changing_parents(repo):` context.
586 a `with dirstate.changing_parents(repo):` context.
565 """
587 """
566 if self.in_merge:
588 if self.in_merge:
567 msg = b'update_file_reference should not be called when merging'
589 msg = b'update_file_reference should not be called when merging'
568 raise error.ProgrammingError(msg)
590 raise error.ProgrammingError(msg)
569 entry = self._map.get(filename)
591 entry = self._map.get(filename)
570 if entry is None:
592 if entry is None:
571 wc_tracked = False
593 wc_tracked = False
572 else:
594 else:
573 wc_tracked = entry.tracked
595 wc_tracked = entry.tracked
574 if not (p1_tracked or wc_tracked):
596 if not (p1_tracked or wc_tracked):
575 # the file is no longer relevant to anyone
597 # the file is no longer relevant to anyone
576 if self._map.get(filename) is not None:
598 if self._map.get(filename) is not None:
577 self._map.reset_state(filename)
599 self._map.reset_state(filename)
578 self._dirty = True
600 self._dirty = True
579 elif (not p1_tracked) and wc_tracked:
601 elif (not p1_tracked) and wc_tracked:
580 if entry is not None and entry.added:
602 if entry is not None and entry.added:
581 return # avoid dropping copy information (maybe?)
603 return # avoid dropping copy information (maybe?)
582
604
583 self._map.reset_state(
605 self._map.reset_state(
584 filename,
606 filename,
585 wc_tracked,
607 wc_tracked,
586 p1_tracked,
608 p1_tracked,
587 # the underlying reference might have changed, we will have to
609 # the underlying reference might have changed, we will have to
588 # check it.
610 # check it.
589 has_meaningful_mtime=False,
611 has_meaningful_mtime=False,
590 )
612 )
591
613
592 @requires_changing_parents
614 @requires_changing_parents
593 def update_file(
615 def update_file(
594 self,
616 self,
595 filename,
617 filename,
596 wc_tracked,
618 wc_tracked,
597 p1_tracked,
619 p1_tracked,
598 p2_info=False,
620 p2_info=False,
599 possibly_dirty=False,
621 possibly_dirty=False,
600 parentfiledata=None,
622 parentfiledata=None,
601 ):
623 ):
602 """update the information about a file in the dirstate
624 """update the information about a file in the dirstate
603
625
604 This is to be called when the direstates parent changes to keep track
626 This is to be called when the direstates parent changes to keep track
605 of what is the file situation in regards to the working copy and its parent.
627 of what is the file situation in regards to the working copy and its parent.
606
628
607 This function must be called within a `dirstate.changing_parents` context.
629 This function must be called within a `dirstate.changing_parents` context.
608
630
609 note: the API is at an early stage and we might need to adjust it
631 note: the API is at an early stage and we might need to adjust it
610 depending of what information ends up being relevant and useful to
632 depending of what information ends up being relevant and useful to
611 other processing.
633 other processing.
612 """
634 """
613 self._update_file(
635 self._update_file(
614 filename=filename,
636 filename=filename,
615 wc_tracked=wc_tracked,
637 wc_tracked=wc_tracked,
616 p1_tracked=p1_tracked,
638 p1_tracked=p1_tracked,
617 p2_info=p2_info,
639 p2_info=p2_info,
618 possibly_dirty=possibly_dirty,
640 possibly_dirty=possibly_dirty,
619 parentfiledata=parentfiledata,
641 parentfiledata=parentfiledata,
620 )
642 )
621
643
622 def hacky_extension_update_file(self, *args, **kwargs):
644 def hacky_extension_update_file(self, *args, **kwargs):
623 """NEVER USE THIS, YOU DO NOT NEED IT
645 """NEVER USE THIS, YOU DO NOT NEED IT
624
646
625 This function is a variant of "update_file" to be called by a small set
647 This function is a variant of "update_file" to be called by a small set
626 of extensions, it also adjust the internal state of file, but can be
648 of extensions, it also adjust the internal state of file, but can be
627 called outside an `changing_parents` context.
649 called outside an `changing_parents` context.
628
650
629 A very small number of extension meddle with the working copy content
651 A very small number of extension meddle with the working copy content
630 in a way that requires to adjust the dirstate accordingly. At the time
652 in a way that requires to adjust the dirstate accordingly. At the time
631 this command is written they are :
653 this command is written they are :
632 - keyword,
654 - keyword,
633 - largefile,
655 - largefile,
634 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
656 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
635
657
636 This function could probably be replaced by more semantic one (like
658 This function could probably be replaced by more semantic one (like
637 "adjust expected size" or "always revalidate file content", etc)
659 "adjust expected size" or "always revalidate file content", etc)
638 however at the time where this is writen, this is too much of a detour
660 however at the time where this is writen, this is too much of a detour
639 to be considered.
661 to be considered.
640 """
662 """
641 self._update_file(
663 self._update_file(
642 *args,
664 *args,
643 **kwargs,
665 **kwargs,
644 )
666 )
645
667
646 def _update_file(
668 def _update_file(
647 self,
669 self,
648 filename,
670 filename,
649 wc_tracked,
671 wc_tracked,
650 p1_tracked,
672 p1_tracked,
651 p2_info=False,
673 p2_info=False,
652 possibly_dirty=False,
674 possibly_dirty=False,
653 parentfiledata=None,
675 parentfiledata=None,
654 ):
676 ):
655
677
656 # note: I do not think we need to double check name clash here since we
678 # note: I do not think we need to double check name clash here since we
657 # are in a update/merge case that should already have taken care of
679 # are in a update/merge case that should already have taken care of
658 # this. The test agrees
680 # this. The test agrees
659
681
660 self._dirty = True
682 self._dirty = True
661 old_entry = self._map.get(filename)
683 old_entry = self._map.get(filename)
662 if old_entry is None:
684 if old_entry is None:
663 prev_tracked = False
685 prev_tracked = False
664 else:
686 else:
665 prev_tracked = old_entry.tracked
687 prev_tracked = old_entry.tracked
666 if prev_tracked != wc_tracked:
688 if prev_tracked != wc_tracked:
667 self._dirty_tracked_set = True
689 self._dirty_tracked_set = True
668
690
669 self._map.reset_state(
691 self._map.reset_state(
670 filename,
692 filename,
671 wc_tracked,
693 wc_tracked,
672 p1_tracked,
694 p1_tracked,
673 p2_info=p2_info,
695 p2_info=p2_info,
674 has_meaningful_mtime=not possibly_dirty,
696 has_meaningful_mtime=not possibly_dirty,
675 parentfiledata=parentfiledata,
697 parentfiledata=parentfiledata,
676 )
698 )
677
699
678 def _check_new_tracked_filename(self, filename):
700 def _check_new_tracked_filename(self, filename):
679 scmutil.checkfilename(filename)
701 scmutil.checkfilename(filename)
680 if self._map.hastrackeddir(filename):
702 if self._map.hastrackeddir(filename):
681 msg = _(b'directory %r already in dirstate')
703 msg = _(b'directory %r already in dirstate')
682 msg %= pycompat.bytestr(filename)
704 msg %= pycompat.bytestr(filename)
683 raise error.Abort(msg)
705 raise error.Abort(msg)
684 # shadows
706 # shadows
685 for d in pathutil.finddirs(filename):
707 for d in pathutil.finddirs(filename):
686 if self._map.hastrackeddir(d):
708 if self._map.hastrackeddir(d):
687 break
709 break
688 entry = self._map.get(d)
710 entry = self._map.get(d)
689 if entry is not None and not entry.removed:
711 if entry is not None and not entry.removed:
690 msg = _(b'file %r in dirstate clashes with %r')
712 msg = _(b'file %r in dirstate clashes with %r')
691 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
713 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
692 raise error.Abort(msg)
714 raise error.Abort(msg)
693 self._check_sparse(filename)
715 self._check_sparse(filename)
694
716
695 def _check_sparse(self, filename):
717 def _check_sparse(self, filename):
696 """Check that a filename is inside the sparse profile"""
718 """Check that a filename is inside the sparse profile"""
697 sparsematch = self._sparsematcher
719 sparsematch = self._sparsematcher
698 if sparsematch is not None and not sparsematch.always():
720 if sparsematch is not None and not sparsematch.always():
699 if not sparsematch(filename):
721 if not sparsematch(filename):
700 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
722 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
701 hint = _(
723 hint = _(
702 b'include file with `hg debugsparse --include <pattern>` or use '
724 b'include file with `hg debugsparse --include <pattern>` or use '
703 b'`hg add -s <file>` to include file directory while adding'
725 b'`hg add -s <file>` to include file directory while adding'
704 )
726 )
705 raise error.Abort(msg % filename, hint=hint)
727 raise error.Abort(msg % filename, hint=hint)
706
728
707 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
729 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
708 if exists is None:
730 if exists is None:
709 exists = os.path.lexists(os.path.join(self._root, path))
731 exists = os.path.lexists(os.path.join(self._root, path))
710 if not exists:
732 if not exists:
711 # Maybe a path component exists
733 # Maybe a path component exists
712 if not ignoremissing and b'/' in path:
734 if not ignoremissing and b'/' in path:
713 d, f = path.rsplit(b'/', 1)
735 d, f = path.rsplit(b'/', 1)
714 d = self._normalize(d, False, ignoremissing, None)
736 d = self._normalize(d, False, ignoremissing, None)
715 folded = d + b"/" + f
737 folded = d + b"/" + f
716 else:
738 else:
717 # No path components, preserve original case
739 # No path components, preserve original case
718 folded = path
740 folded = path
719 else:
741 else:
720 # recursively normalize leading directory components
742 # recursively normalize leading directory components
721 # against dirstate
743 # against dirstate
722 if b'/' in normed:
744 if b'/' in normed:
723 d, f = normed.rsplit(b'/', 1)
745 d, f = normed.rsplit(b'/', 1)
724 d = self._normalize(d, False, ignoremissing, True)
746 d = self._normalize(d, False, ignoremissing, True)
725 r = self._root + b"/" + d
747 r = self._root + b"/" + d
726 folded = d + b"/" + util.fspath(f, r)
748 folded = d + b"/" + util.fspath(f, r)
727 else:
749 else:
728 folded = util.fspath(normed, self._root)
750 folded = util.fspath(normed, self._root)
729 storemap[normed] = folded
751 storemap[normed] = folded
730
752
731 return folded
753 return folded
732
754
733 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
755 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
734 normed = util.normcase(path)
756 normed = util.normcase(path)
735 folded = self._map.filefoldmap.get(normed, None)
757 folded = self._map.filefoldmap.get(normed, None)
736 if folded is None:
758 if folded is None:
737 if isknown:
759 if isknown:
738 folded = path
760 folded = path
739 else:
761 else:
740 folded = self._discoverpath(
762 folded = self._discoverpath(
741 path, normed, ignoremissing, exists, self._map.filefoldmap
763 path, normed, ignoremissing, exists, self._map.filefoldmap
742 )
764 )
743 return folded
765 return folded
744
766
745 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
767 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
746 normed = util.normcase(path)
768 normed = util.normcase(path)
747 folded = self._map.filefoldmap.get(normed, None)
769 folded = self._map.filefoldmap.get(normed, None)
748 if folded is None:
770 if folded is None:
749 folded = self._map.dirfoldmap.get(normed, None)
771 folded = self._map.dirfoldmap.get(normed, None)
750 if folded is None:
772 if folded is None:
751 if isknown:
773 if isknown:
752 folded = path
774 folded = path
753 else:
775 else:
754 # store discovered result in dirfoldmap so that future
776 # store discovered result in dirfoldmap so that future
755 # normalizefile calls don't start matching directories
777 # normalizefile calls don't start matching directories
756 folded = self._discoverpath(
778 folded = self._discoverpath(
757 path, normed, ignoremissing, exists, self._map.dirfoldmap
779 path, normed, ignoremissing, exists, self._map.dirfoldmap
758 )
780 )
759 return folded
781 return folded
760
782
761 def normalize(self, path, isknown=False, ignoremissing=False):
783 def normalize(self, path, isknown=False, ignoremissing=False):
762 """
784 """
763 normalize the case of a pathname when on a casefolding filesystem
785 normalize the case of a pathname when on a casefolding filesystem
764
786
765 isknown specifies whether the filename came from walking the
787 isknown specifies whether the filename came from walking the
766 disk, to avoid extra filesystem access.
788 disk, to avoid extra filesystem access.
767
789
768 If ignoremissing is True, missing path are returned
790 If ignoremissing is True, missing path are returned
769 unchanged. Otherwise, we try harder to normalize possibly
791 unchanged. Otherwise, we try harder to normalize possibly
770 existing path components.
792 existing path components.
771
793
772 The normalized case is determined based on the following precedence:
794 The normalized case is determined based on the following precedence:
773
795
774 - version of name already stored in the dirstate
796 - version of name already stored in the dirstate
775 - version of name stored on disk
797 - version of name stored on disk
776 - version provided via command arguments
798 - version provided via command arguments
777 """
799 """
778
800
779 if self._checkcase:
801 if self._checkcase:
780 return self._normalize(path, isknown, ignoremissing)
802 return self._normalize(path, isknown, ignoremissing)
781 return path
803 return path
782
804
783 def clear(self):
805 def clear(self):
784 self._map.clear()
806 self._map.clear()
785 self._dirty = True
807 self._dirty = True
786
808
787 def rebuild(self, parent, allfiles, changedfiles=None):
809 def rebuild(self, parent, allfiles, changedfiles=None):
788 matcher = self._sparsematcher
810 matcher = self._sparsematcher
789 if matcher is not None and not matcher.always():
811 if matcher is not None and not matcher.always():
790 # should not add non-matching files
812 # should not add non-matching files
791 allfiles = [f for f in allfiles if matcher(f)]
813 allfiles = [f for f in allfiles if matcher(f)]
792 if changedfiles:
814 if changedfiles:
793 changedfiles = [f for f in changedfiles if matcher(f)]
815 changedfiles = [f for f in changedfiles if matcher(f)]
794
816
795 if changedfiles is not None:
817 if changedfiles is not None:
796 # these files will be deleted from the dirstate when they are
818 # these files will be deleted from the dirstate when they are
797 # not found to be in allfiles
819 # not found to be in allfiles
798 dirstatefilestoremove = {f for f in self if not matcher(f)}
820 dirstatefilestoremove = {f for f in self if not matcher(f)}
799 changedfiles = dirstatefilestoremove.union(changedfiles)
821 changedfiles = dirstatefilestoremove.union(changedfiles)
800
822
801 if changedfiles is None:
823 if changedfiles is None:
802 # Rebuild entire dirstate
824 # Rebuild entire dirstate
803 to_lookup = allfiles
825 to_lookup = allfiles
804 to_drop = []
826 to_drop = []
805 self.clear()
827 self.clear()
806 elif len(changedfiles) < 10:
828 elif len(changedfiles) < 10:
807 # Avoid turning allfiles into a set, which can be expensive if it's
829 # Avoid turning allfiles into a set, which can be expensive if it's
808 # large.
830 # large.
809 to_lookup = []
831 to_lookup = []
810 to_drop = []
832 to_drop = []
811 for f in changedfiles:
833 for f in changedfiles:
812 if f in allfiles:
834 if f in allfiles:
813 to_lookup.append(f)
835 to_lookup.append(f)
814 else:
836 else:
815 to_drop.append(f)
837 to_drop.append(f)
816 else:
838 else:
817 changedfilesset = set(changedfiles)
839 changedfilesset = set(changedfiles)
818 to_lookup = changedfilesset & set(allfiles)
840 to_lookup = changedfilesset & set(allfiles)
819 to_drop = changedfilesset - to_lookup
841 to_drop = changedfilesset - to_lookup
820
842
821 if self._origpl is None:
843 if self._origpl is None:
822 self._origpl = self._pl
844 self._origpl = self._pl
823 self._map.setparents(parent, self._nodeconstants.nullid)
845 self._map.setparents(parent, self._nodeconstants.nullid)
824
846
825 for f in to_lookup:
847 for f in to_lookup:
826 if self.in_merge:
848 if self.in_merge:
827 self.set_tracked(f)
849 self.set_tracked(f)
828 else:
850 else:
829 self._map.reset_state(
851 self._map.reset_state(
830 f,
852 f,
831 wc_tracked=True,
853 wc_tracked=True,
832 p1_tracked=True,
854 p1_tracked=True,
833 )
855 )
834 for f in to_drop:
856 for f in to_drop:
835 self._map.reset_state(f)
857 self._map.reset_state(f)
836
858
837 self._dirty = True
859 self._dirty = True
838
860
839 def identity(self):
861 def identity(self):
840 """Return identity of dirstate itself to detect changing in storage
862 """Return identity of dirstate itself to detect changing in storage
841
863
842 If identity of previous dirstate is equal to this, writing
864 If identity of previous dirstate is equal to this, writing
843 changes based on the former dirstate out can keep consistency.
865 changes based on the former dirstate out can keep consistency.
844 """
866 """
845 return self._map.identity
867 return self._map.identity
846
868
847 def write(self, tr):
869 def write(self, tr):
848 if not self._dirty:
870 if not self._dirty:
849 return
871 return
850
872
851 write_key = self._use_tracked_hint and self._dirty_tracked_set
873 write_key = self._use_tracked_hint and self._dirty_tracked_set
852 if tr:
874 if tr:
853 # delay writing in-memory changes out
875 # delay writing in-memory changes out
854 tr.addfilegenerator(
876 tr.addfilegenerator(
855 b'dirstate-1-main',
877 b'dirstate-1-main',
856 (self._filename,),
878 (self._filename,),
857 lambda f: self._writedirstate(tr, f),
879 lambda f: self._writedirstate(tr, f),
858 location=b'plain',
880 location=b'plain',
859 post_finalize=True,
881 post_finalize=True,
860 )
882 )
861 if write_key:
883 if write_key:
862 tr.addfilegenerator(
884 tr.addfilegenerator(
863 b'dirstate-2-key-post',
885 b'dirstate-2-key-post',
864 (self._filename_th,),
886 (self._filename_th,),
865 lambda f: self._write_tracked_hint(tr, f),
887 lambda f: self._write_tracked_hint(tr, f),
866 location=b'plain',
888 location=b'plain',
867 post_finalize=True,
889 post_finalize=True,
868 )
890 )
869 return
891 return
870
892
871 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
893 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
872 with file(self._filename) as f:
894 with file(self._filename) as f:
873 self._writedirstate(tr, f)
895 self._writedirstate(tr, f)
874 if write_key:
896 if write_key:
875 # we update the key-file after writing to make sure reader have a
897 # we update the key-file after writing to make sure reader have a
876 # key that match the newly written content
898 # key that match the newly written content
877 with file(self._filename_th) as f:
899 with file(self._filename_th) as f:
878 self._write_tracked_hint(tr, f)
900 self._write_tracked_hint(tr, f)
879
901
880 def delete_tracked_hint(self):
902 def delete_tracked_hint(self):
881 """remove the tracked_hint file
903 """remove the tracked_hint file
882
904
883 To be used by format downgrades operation"""
905 To be used by format downgrades operation"""
884 self._opener.unlink(self._filename_th)
906 self._opener.unlink(self._filename_th)
885 self._use_tracked_hint = False
907 self._use_tracked_hint = False
886
908
887 def addparentchangecallback(self, category, callback):
909 def addparentchangecallback(self, category, callback):
888 """add a callback to be called when the wd parents are changed
910 """add a callback to be called when the wd parents are changed
889
911
890 Callback will be called with the following arguments:
912 Callback will be called with the following arguments:
891 dirstate, (oldp1, oldp2), (newp1, newp2)
913 dirstate, (oldp1, oldp2), (newp1, newp2)
892
914
893 Category is a unique identifier to allow overwriting an old callback
915 Category is a unique identifier to allow overwriting an old callback
894 with a newer callback.
916 with a newer callback.
895 """
917 """
896 self._plchangecallbacks[category] = callback
918 self._plchangecallbacks[category] = callback
897
919
898 def _writedirstate(self, tr, st):
920 def _writedirstate(self, tr, st):
899 # notify callbacks about parents change
921 # notify callbacks about parents change
900 if self._origpl is not None and self._origpl != self._pl:
922 if self._origpl is not None and self._origpl != self._pl:
901 for c, callback in sorted(self._plchangecallbacks.items()):
923 for c, callback in sorted(self._plchangecallbacks.items()):
902 callback(self, self._origpl, self._pl)
924 callback(self, self._origpl, self._pl)
903 self._origpl = None
925 self._origpl = None
904 self._map.write(tr, st)
926 self._map.write(tr, st)
905 self._dirty = False
927 self._dirty = False
906 self._dirty_tracked_set = False
928 self._dirty_tracked_set = False
907
929
908 def _write_tracked_hint(self, tr, f):
930 def _write_tracked_hint(self, tr, f):
909 key = node.hex(uuid.uuid4().bytes)
931 key = node.hex(uuid.uuid4().bytes)
910 f.write(b"1\n%s\n" % key) # 1 is the format version
932 f.write(b"1\n%s\n" % key) # 1 is the format version
911
933
912 def _dirignore(self, f):
934 def _dirignore(self, f):
913 if self._ignore(f):
935 if self._ignore(f):
914 return True
936 return True
915 for p in pathutil.finddirs(f):
937 for p in pathutil.finddirs(f):
916 if self._ignore(p):
938 if self._ignore(p):
917 return True
939 return True
918 return False
940 return False
919
941
920 def _ignorefiles(self):
942 def _ignorefiles(self):
921 files = []
943 files = []
922 if os.path.exists(self._join(b'.hgignore')):
944 if os.path.exists(self._join(b'.hgignore')):
923 files.append(self._join(b'.hgignore'))
945 files.append(self._join(b'.hgignore'))
924 for name, path in self._ui.configitems(b"ui"):
946 for name, path in self._ui.configitems(b"ui"):
925 if name == b'ignore' or name.startswith(b'ignore.'):
947 if name == b'ignore' or name.startswith(b'ignore.'):
926 # we need to use os.path.join here rather than self._join
948 # we need to use os.path.join here rather than self._join
927 # because path is arbitrary and user-specified
949 # because path is arbitrary and user-specified
928 files.append(os.path.join(self._rootdir, util.expandpath(path)))
950 files.append(os.path.join(self._rootdir, util.expandpath(path)))
929 return files
951 return files
930
952
931 def _ignorefileandline(self, f):
953 def _ignorefileandline(self, f):
932 files = collections.deque(self._ignorefiles())
954 files = collections.deque(self._ignorefiles())
933 visited = set()
955 visited = set()
934 while files:
956 while files:
935 i = files.popleft()
957 i = files.popleft()
936 patterns = matchmod.readpatternfile(
958 patterns = matchmod.readpatternfile(
937 i, self._ui.warn, sourceinfo=True
959 i, self._ui.warn, sourceinfo=True
938 )
960 )
939 for pattern, lineno, line in patterns:
961 for pattern, lineno, line in patterns:
940 kind, p = matchmod._patsplit(pattern, b'glob')
962 kind, p = matchmod._patsplit(pattern, b'glob')
941 if kind == b"subinclude":
963 if kind == b"subinclude":
942 if p not in visited:
964 if p not in visited:
943 files.append(p)
965 files.append(p)
944 continue
966 continue
945 m = matchmod.match(
967 m = matchmod.match(
946 self._root, b'', [], [pattern], warn=self._ui.warn
968 self._root, b'', [], [pattern], warn=self._ui.warn
947 )
969 )
948 if m(f):
970 if m(f):
949 return (i, lineno, line)
971 return (i, lineno, line)
950 visited.add(i)
972 visited.add(i)
951 return (None, -1, b"")
973 return (None, -1, b"")
952
974
953 def _walkexplicit(self, match, subrepos):
975 def _walkexplicit(self, match, subrepos):
954 """Get stat data about the files explicitly specified by match.
976 """Get stat data about the files explicitly specified by match.
955
977
956 Return a triple (results, dirsfound, dirsnotfound).
978 Return a triple (results, dirsfound, dirsnotfound).
957 - results is a mapping from filename to stat result. It also contains
979 - results is a mapping from filename to stat result. It also contains
958 listings mapping subrepos and .hg to None.
980 listings mapping subrepos and .hg to None.
959 - dirsfound is a list of files found to be directories.
981 - dirsfound is a list of files found to be directories.
960 - dirsnotfound is a list of files that the dirstate thinks are
982 - dirsnotfound is a list of files that the dirstate thinks are
961 directories and that were not found."""
983 directories and that were not found."""
962
984
963 def badtype(mode):
985 def badtype(mode):
964 kind = _(b'unknown')
986 kind = _(b'unknown')
965 if stat.S_ISCHR(mode):
987 if stat.S_ISCHR(mode):
966 kind = _(b'character device')
988 kind = _(b'character device')
967 elif stat.S_ISBLK(mode):
989 elif stat.S_ISBLK(mode):
968 kind = _(b'block device')
990 kind = _(b'block device')
969 elif stat.S_ISFIFO(mode):
991 elif stat.S_ISFIFO(mode):
970 kind = _(b'fifo')
992 kind = _(b'fifo')
971 elif stat.S_ISSOCK(mode):
993 elif stat.S_ISSOCK(mode):
972 kind = _(b'socket')
994 kind = _(b'socket')
973 elif stat.S_ISDIR(mode):
995 elif stat.S_ISDIR(mode):
974 kind = _(b'directory')
996 kind = _(b'directory')
975 return _(b'unsupported file type (type is %s)') % kind
997 return _(b'unsupported file type (type is %s)') % kind
976
998
977 badfn = match.bad
999 badfn = match.bad
978 dmap = self._map
1000 dmap = self._map
979 lstat = os.lstat
1001 lstat = os.lstat
980 getkind = stat.S_IFMT
1002 getkind = stat.S_IFMT
981 dirkind = stat.S_IFDIR
1003 dirkind = stat.S_IFDIR
982 regkind = stat.S_IFREG
1004 regkind = stat.S_IFREG
983 lnkkind = stat.S_IFLNK
1005 lnkkind = stat.S_IFLNK
984 join = self._join
1006 join = self._join
985 dirsfound = []
1007 dirsfound = []
986 foundadd = dirsfound.append
1008 foundadd = dirsfound.append
987 dirsnotfound = []
1009 dirsnotfound = []
988 notfoundadd = dirsnotfound.append
1010 notfoundadd = dirsnotfound.append
989
1011
990 if not match.isexact() and self._checkcase:
1012 if not match.isexact() and self._checkcase:
991 normalize = self._normalize
1013 normalize = self._normalize
992 else:
1014 else:
993 normalize = None
1015 normalize = None
994
1016
995 files = sorted(match.files())
1017 files = sorted(match.files())
996 subrepos.sort()
1018 subrepos.sort()
997 i, j = 0, 0
1019 i, j = 0, 0
998 while i < len(files) and j < len(subrepos):
1020 while i < len(files) and j < len(subrepos):
999 subpath = subrepos[j] + b"/"
1021 subpath = subrepos[j] + b"/"
1000 if files[i] < subpath:
1022 if files[i] < subpath:
1001 i += 1
1023 i += 1
1002 continue
1024 continue
1003 while i < len(files) and files[i].startswith(subpath):
1025 while i < len(files) and files[i].startswith(subpath):
1004 del files[i]
1026 del files[i]
1005 j += 1
1027 j += 1
1006
1028
1007 if not files or b'' in files:
1029 if not files or b'' in files:
1008 files = [b'']
1030 files = [b'']
1009 # constructing the foldmap is expensive, so don't do it for the
1031 # constructing the foldmap is expensive, so don't do it for the
1010 # common case where files is ['']
1032 # common case where files is ['']
1011 normalize = None
1033 normalize = None
1012 results = dict.fromkeys(subrepos)
1034 results = dict.fromkeys(subrepos)
1013 results[b'.hg'] = None
1035 results[b'.hg'] = None
1014
1036
1015 for ff in files:
1037 for ff in files:
1016 if normalize:
1038 if normalize:
1017 nf = normalize(ff, False, True)
1039 nf = normalize(ff, False, True)
1018 else:
1040 else:
1019 nf = ff
1041 nf = ff
1020 if nf in results:
1042 if nf in results:
1021 continue
1043 continue
1022
1044
1023 try:
1045 try:
1024 st = lstat(join(nf))
1046 st = lstat(join(nf))
1025 kind = getkind(st.st_mode)
1047 kind = getkind(st.st_mode)
1026 if kind == dirkind:
1048 if kind == dirkind:
1027 if nf in dmap:
1049 if nf in dmap:
1028 # file replaced by dir on disk but still in dirstate
1050 # file replaced by dir on disk but still in dirstate
1029 results[nf] = None
1051 results[nf] = None
1030 foundadd((nf, ff))
1052 foundadd((nf, ff))
1031 elif kind == regkind or kind == lnkkind:
1053 elif kind == regkind or kind == lnkkind:
1032 results[nf] = st
1054 results[nf] = st
1033 else:
1055 else:
1034 badfn(ff, badtype(kind))
1056 badfn(ff, badtype(kind))
1035 if nf in dmap:
1057 if nf in dmap:
1036 results[nf] = None
1058 results[nf] = None
1037 except (OSError) as inst:
1059 except (OSError) as inst:
1038 # nf not found on disk - it is dirstate only
1060 # nf not found on disk - it is dirstate only
1039 if nf in dmap: # does it exactly match a missing file?
1061 if nf in dmap: # does it exactly match a missing file?
1040 results[nf] = None
1062 results[nf] = None
1041 else: # does it match a missing directory?
1063 else: # does it match a missing directory?
1042 if self._map.hasdir(nf):
1064 if self._map.hasdir(nf):
1043 notfoundadd(nf)
1065 notfoundadd(nf)
1044 else:
1066 else:
1045 badfn(ff, encoding.strtolocal(inst.strerror))
1067 badfn(ff, encoding.strtolocal(inst.strerror))
1046
1068
1047 # match.files() may contain explicitly-specified paths that shouldn't
1069 # match.files() may contain explicitly-specified paths that shouldn't
1048 # be taken; drop them from the list of files found. dirsfound/notfound
1070 # be taken; drop them from the list of files found. dirsfound/notfound
1049 # aren't filtered here because they will be tested later.
1071 # aren't filtered here because they will be tested later.
1050 if match.anypats():
1072 if match.anypats():
1051 for f in list(results):
1073 for f in list(results):
1052 if f == b'.hg' or f in subrepos:
1074 if f == b'.hg' or f in subrepos:
1053 # keep sentinel to disable further out-of-repo walks
1075 # keep sentinel to disable further out-of-repo walks
1054 continue
1076 continue
1055 if not match(f):
1077 if not match(f):
1056 del results[f]
1078 del results[f]
1057
1079
1058 # Case insensitive filesystems cannot rely on lstat() failing to detect
1080 # Case insensitive filesystems cannot rely on lstat() failing to detect
1059 # a case-only rename. Prune the stat object for any file that does not
1081 # a case-only rename. Prune the stat object for any file that does not
1060 # match the case in the filesystem, if there are multiple files that
1082 # match the case in the filesystem, if there are multiple files that
1061 # normalize to the same path.
1083 # normalize to the same path.
1062 if match.isexact() and self._checkcase:
1084 if match.isexact() and self._checkcase:
1063 normed = {}
1085 normed = {}
1064
1086
1065 for f, st in results.items():
1087 for f, st in results.items():
1066 if st is None:
1088 if st is None:
1067 continue
1089 continue
1068
1090
1069 nc = util.normcase(f)
1091 nc = util.normcase(f)
1070 paths = normed.get(nc)
1092 paths = normed.get(nc)
1071
1093
1072 if paths is None:
1094 if paths is None:
1073 paths = set()
1095 paths = set()
1074 normed[nc] = paths
1096 normed[nc] = paths
1075
1097
1076 paths.add(f)
1098 paths.add(f)
1077
1099
1078 for norm, paths in normed.items():
1100 for norm, paths in normed.items():
1079 if len(paths) > 1:
1101 if len(paths) > 1:
1080 for path in paths:
1102 for path in paths:
1081 folded = self._discoverpath(
1103 folded = self._discoverpath(
1082 path, norm, True, None, self._map.dirfoldmap
1104 path, norm, True, None, self._map.dirfoldmap
1083 )
1105 )
1084 if path != folded:
1106 if path != folded:
1085 results[path] = None
1107 results[path] = None
1086
1108
1087 return results, dirsfound, dirsnotfound
1109 return results, dirsfound, dirsnotfound
1088
1110
1089 def walk(self, match, subrepos, unknown, ignored, full=True):
1111 def walk(self, match, subrepos, unknown, ignored, full=True):
1090 """
1112 """
1091 Walk recursively through the directory tree, finding all files
1113 Walk recursively through the directory tree, finding all files
1092 matched by match.
1114 matched by match.
1093
1115
1094 If full is False, maybe skip some known-clean files.
1116 If full is False, maybe skip some known-clean files.
1095
1117
1096 Return a dict mapping filename to stat-like object (either
1118 Return a dict mapping filename to stat-like object (either
1097 mercurial.osutil.stat instance or return value of os.stat()).
1119 mercurial.osutil.stat instance or return value of os.stat()).
1098
1120
1099 """
1121 """
1100 # full is a flag that extensions that hook into walk can use -- this
1122 # full is a flag that extensions that hook into walk can use -- this
1101 # implementation doesn't use it at all. This satisfies the contract
1123 # implementation doesn't use it at all. This satisfies the contract
1102 # because we only guarantee a "maybe".
1124 # because we only guarantee a "maybe".
1103
1125
1104 if ignored:
1126 if ignored:
1105 ignore = util.never
1127 ignore = util.never
1106 dirignore = util.never
1128 dirignore = util.never
1107 elif unknown:
1129 elif unknown:
1108 ignore = self._ignore
1130 ignore = self._ignore
1109 dirignore = self._dirignore
1131 dirignore = self._dirignore
1110 else:
1132 else:
1111 # if not unknown and not ignored, drop dir recursion and step 2
1133 # if not unknown and not ignored, drop dir recursion and step 2
1112 ignore = util.always
1134 ignore = util.always
1113 dirignore = util.always
1135 dirignore = util.always
1114
1136
1115 if self._sparsematchfn is not None:
1137 if self._sparsematchfn is not None:
1116 em = matchmod.exact(match.files())
1138 em = matchmod.exact(match.files())
1117 sm = matchmod.unionmatcher([self._sparsematcher, em])
1139 sm = matchmod.unionmatcher([self._sparsematcher, em])
1118 match = matchmod.intersectmatchers(match, sm)
1140 match = matchmod.intersectmatchers(match, sm)
1119
1141
1120 matchfn = match.matchfn
1142 matchfn = match.matchfn
1121 matchalways = match.always()
1143 matchalways = match.always()
1122 matchtdir = match.traversedir
1144 matchtdir = match.traversedir
1123 dmap = self._map
1145 dmap = self._map
1124 listdir = util.listdir
1146 listdir = util.listdir
1125 lstat = os.lstat
1147 lstat = os.lstat
1126 dirkind = stat.S_IFDIR
1148 dirkind = stat.S_IFDIR
1127 regkind = stat.S_IFREG
1149 regkind = stat.S_IFREG
1128 lnkkind = stat.S_IFLNK
1150 lnkkind = stat.S_IFLNK
1129 join = self._join
1151 join = self._join
1130
1152
1131 exact = skipstep3 = False
1153 exact = skipstep3 = False
1132 if match.isexact(): # match.exact
1154 if match.isexact(): # match.exact
1133 exact = True
1155 exact = True
1134 dirignore = util.always # skip step 2
1156 dirignore = util.always # skip step 2
1135 elif match.prefix(): # match.match, no patterns
1157 elif match.prefix(): # match.match, no patterns
1136 skipstep3 = True
1158 skipstep3 = True
1137
1159
1138 if not exact and self._checkcase:
1160 if not exact and self._checkcase:
1139 normalize = self._normalize
1161 normalize = self._normalize
1140 normalizefile = self._normalizefile
1162 normalizefile = self._normalizefile
1141 skipstep3 = False
1163 skipstep3 = False
1142 else:
1164 else:
1143 normalize = self._normalize
1165 normalize = self._normalize
1144 normalizefile = None
1166 normalizefile = None
1145
1167
1146 # step 1: find all explicit files
1168 # step 1: find all explicit files
1147 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1169 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1148 if matchtdir:
1170 if matchtdir:
1149 for d in work:
1171 for d in work:
1150 matchtdir(d[0])
1172 matchtdir(d[0])
1151 for d in dirsnotfound:
1173 for d in dirsnotfound:
1152 matchtdir(d)
1174 matchtdir(d)
1153
1175
1154 skipstep3 = skipstep3 and not (work or dirsnotfound)
1176 skipstep3 = skipstep3 and not (work or dirsnotfound)
1155 work = [d for d in work if not dirignore(d[0])]
1177 work = [d for d in work if not dirignore(d[0])]
1156
1178
1157 # step 2: visit subdirectories
1179 # step 2: visit subdirectories
1158 def traverse(work, alreadynormed):
1180 def traverse(work, alreadynormed):
1159 wadd = work.append
1181 wadd = work.append
1160 while work:
1182 while work:
1161 tracing.counter('dirstate.walk work', len(work))
1183 tracing.counter('dirstate.walk work', len(work))
1162 nd = work.pop()
1184 nd = work.pop()
1163 visitentries = match.visitchildrenset(nd)
1185 visitentries = match.visitchildrenset(nd)
1164 if not visitentries:
1186 if not visitentries:
1165 continue
1187 continue
1166 if visitentries == b'this' or visitentries == b'all':
1188 if visitentries == b'this' or visitentries == b'all':
1167 visitentries = None
1189 visitentries = None
1168 skip = None
1190 skip = None
1169 if nd != b'':
1191 if nd != b'':
1170 skip = b'.hg'
1192 skip = b'.hg'
1171 try:
1193 try:
1172 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1194 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1173 entries = listdir(join(nd), stat=True, skip=skip)
1195 entries = listdir(join(nd), stat=True, skip=skip)
1174 except (PermissionError, FileNotFoundError) as inst:
1196 except (PermissionError, FileNotFoundError) as inst:
1175 match.bad(
1197 match.bad(
1176 self.pathto(nd), encoding.strtolocal(inst.strerror)
1198 self.pathto(nd), encoding.strtolocal(inst.strerror)
1177 )
1199 )
1178 continue
1200 continue
1179 for f, kind, st in entries:
1201 for f, kind, st in entries:
1180 # Some matchers may return files in the visitentries set,
1202 # Some matchers may return files in the visitentries set,
1181 # instead of 'this', if the matcher explicitly mentions them
1203 # instead of 'this', if the matcher explicitly mentions them
1182 # and is not an exactmatcher. This is acceptable; we do not
1204 # and is not an exactmatcher. This is acceptable; we do not
1183 # make any hard assumptions about file-or-directory below
1205 # make any hard assumptions about file-or-directory below
1184 # based on the presence of `f` in visitentries. If
1206 # based on the presence of `f` in visitentries. If
1185 # visitchildrenset returned a set, we can always skip the
1207 # visitchildrenset returned a set, we can always skip the
1186 # entries *not* in the set it provided regardless of whether
1208 # entries *not* in the set it provided regardless of whether
1187 # they're actually a file or a directory.
1209 # they're actually a file or a directory.
1188 if visitentries and f not in visitentries:
1210 if visitentries and f not in visitentries:
1189 continue
1211 continue
1190 if normalizefile:
1212 if normalizefile:
1191 # even though f might be a directory, we're only
1213 # even though f might be a directory, we're only
1192 # interested in comparing it to files currently in the
1214 # interested in comparing it to files currently in the
1193 # dmap -- therefore normalizefile is enough
1215 # dmap -- therefore normalizefile is enough
1194 nf = normalizefile(
1216 nf = normalizefile(
1195 nd and (nd + b"/" + f) or f, True, True
1217 nd and (nd + b"/" + f) or f, True, True
1196 )
1218 )
1197 else:
1219 else:
1198 nf = nd and (nd + b"/" + f) or f
1220 nf = nd and (nd + b"/" + f) or f
1199 if nf not in results:
1221 if nf not in results:
1200 if kind == dirkind:
1222 if kind == dirkind:
1201 if not ignore(nf):
1223 if not ignore(nf):
1202 if matchtdir:
1224 if matchtdir:
1203 matchtdir(nf)
1225 matchtdir(nf)
1204 wadd(nf)
1226 wadd(nf)
1205 if nf in dmap and (matchalways or matchfn(nf)):
1227 if nf in dmap and (matchalways or matchfn(nf)):
1206 results[nf] = None
1228 results[nf] = None
1207 elif kind == regkind or kind == lnkkind:
1229 elif kind == regkind or kind == lnkkind:
1208 if nf in dmap:
1230 if nf in dmap:
1209 if matchalways or matchfn(nf):
1231 if matchalways or matchfn(nf):
1210 results[nf] = st
1232 results[nf] = st
1211 elif (matchalways or matchfn(nf)) and not ignore(
1233 elif (matchalways or matchfn(nf)) and not ignore(
1212 nf
1234 nf
1213 ):
1235 ):
1214 # unknown file -- normalize if necessary
1236 # unknown file -- normalize if necessary
1215 if not alreadynormed:
1237 if not alreadynormed:
1216 nf = normalize(nf, False, True)
1238 nf = normalize(nf, False, True)
1217 results[nf] = st
1239 results[nf] = st
1218 elif nf in dmap and (matchalways or matchfn(nf)):
1240 elif nf in dmap and (matchalways or matchfn(nf)):
1219 results[nf] = None
1241 results[nf] = None
1220
1242
1221 for nd, d in work:
1243 for nd, d in work:
1222 # alreadynormed means that processwork doesn't have to do any
1244 # alreadynormed means that processwork doesn't have to do any
1223 # expensive directory normalization
1245 # expensive directory normalization
1224 alreadynormed = not normalize or nd == d
1246 alreadynormed = not normalize or nd == d
1225 traverse([d], alreadynormed)
1247 traverse([d], alreadynormed)
1226
1248
1227 for s in subrepos:
1249 for s in subrepos:
1228 del results[s]
1250 del results[s]
1229 del results[b'.hg']
1251 del results[b'.hg']
1230
1252
1231 # step 3: visit remaining files from dmap
1253 # step 3: visit remaining files from dmap
1232 if not skipstep3 and not exact:
1254 if not skipstep3 and not exact:
1233 # If a dmap file is not in results yet, it was either
1255 # If a dmap file is not in results yet, it was either
1234 # a) not matching matchfn b) ignored, c) missing, or d) under a
1256 # a) not matching matchfn b) ignored, c) missing, or d) under a
1235 # symlink directory.
1257 # symlink directory.
1236 if not results and matchalways:
1258 if not results and matchalways:
1237 visit = [f for f in dmap]
1259 visit = [f for f in dmap]
1238 else:
1260 else:
1239 visit = [f for f in dmap if f not in results and matchfn(f)]
1261 visit = [f for f in dmap if f not in results and matchfn(f)]
1240 visit.sort()
1262 visit.sort()
1241
1263
1242 if unknown:
1264 if unknown:
1243 # unknown == True means we walked all dirs under the roots
1265 # unknown == True means we walked all dirs under the roots
1244 # that wasn't ignored, and everything that matched was stat'ed
1266 # that wasn't ignored, and everything that matched was stat'ed
1245 # and is already in results.
1267 # and is already in results.
1246 # The rest must thus be ignored or under a symlink.
1268 # The rest must thus be ignored or under a symlink.
1247 audit_path = pathutil.pathauditor(self._root, cached=True)
1269 audit_path = pathutil.pathauditor(self._root, cached=True)
1248
1270
1249 for nf in iter(visit):
1271 for nf in iter(visit):
1250 # If a stat for the same file was already added with a
1272 # If a stat for the same file was already added with a
1251 # different case, don't add one for this, since that would
1273 # different case, don't add one for this, since that would
1252 # make it appear as if the file exists under both names
1274 # make it appear as if the file exists under both names
1253 # on disk.
1275 # on disk.
1254 if (
1276 if (
1255 normalizefile
1277 normalizefile
1256 and normalizefile(nf, True, True) in results
1278 and normalizefile(nf, True, True) in results
1257 ):
1279 ):
1258 results[nf] = None
1280 results[nf] = None
1259 # Report ignored items in the dmap as long as they are not
1281 # Report ignored items in the dmap as long as they are not
1260 # under a symlink directory.
1282 # under a symlink directory.
1261 elif audit_path.check(nf):
1283 elif audit_path.check(nf):
1262 try:
1284 try:
1263 results[nf] = lstat(join(nf))
1285 results[nf] = lstat(join(nf))
1264 # file was just ignored, no links, and exists
1286 # file was just ignored, no links, and exists
1265 except OSError:
1287 except OSError:
1266 # file doesn't exist
1288 # file doesn't exist
1267 results[nf] = None
1289 results[nf] = None
1268 else:
1290 else:
1269 # It's either missing or under a symlink directory
1291 # It's either missing or under a symlink directory
1270 # which we in this case report as missing
1292 # which we in this case report as missing
1271 results[nf] = None
1293 results[nf] = None
1272 else:
1294 else:
1273 # We may not have walked the full directory tree above,
1295 # We may not have walked the full directory tree above,
1274 # so stat and check everything we missed.
1296 # so stat and check everything we missed.
1275 iv = iter(visit)
1297 iv = iter(visit)
1276 for st in util.statfiles([join(i) for i in visit]):
1298 for st in util.statfiles([join(i) for i in visit]):
1277 results[next(iv)] = st
1299 results[next(iv)] = st
1278 return results
1300 return results
1279
1301
1280 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1302 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1281 if self._sparsematchfn is not None:
1303 if self._sparsematchfn is not None:
1282 em = matchmod.exact(matcher.files())
1304 em = matchmod.exact(matcher.files())
1283 sm = matchmod.unionmatcher([self._sparsematcher, em])
1305 sm = matchmod.unionmatcher([self._sparsematcher, em])
1284 matcher = matchmod.intersectmatchers(matcher, sm)
1306 matcher = matchmod.intersectmatchers(matcher, sm)
1285 # Force Rayon (Rust parallelism library) to respect the number of
1307 # Force Rayon (Rust parallelism library) to respect the number of
1286 # workers. This is a temporary workaround until Rust code knows
1308 # workers. This is a temporary workaround until Rust code knows
1287 # how to read the config file.
1309 # how to read the config file.
1288 numcpus = self._ui.configint(b"worker", b"numcpus")
1310 numcpus = self._ui.configint(b"worker", b"numcpus")
1289 if numcpus is not None:
1311 if numcpus is not None:
1290 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1312 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1291
1313
1292 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1314 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1293 if not workers_enabled:
1315 if not workers_enabled:
1294 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1316 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1295
1317
1296 (
1318 (
1297 lookup,
1319 lookup,
1298 modified,
1320 modified,
1299 added,
1321 added,
1300 removed,
1322 removed,
1301 deleted,
1323 deleted,
1302 clean,
1324 clean,
1303 ignored,
1325 ignored,
1304 unknown,
1326 unknown,
1305 warnings,
1327 warnings,
1306 bad,
1328 bad,
1307 traversed,
1329 traversed,
1308 dirty,
1330 dirty,
1309 ) = rustmod.status(
1331 ) = rustmod.status(
1310 self._map._map,
1332 self._map._map,
1311 matcher,
1333 matcher,
1312 self._rootdir,
1334 self._rootdir,
1313 self._ignorefiles(),
1335 self._ignorefiles(),
1314 self._checkexec,
1336 self._checkexec,
1315 bool(list_clean),
1337 bool(list_clean),
1316 bool(list_ignored),
1338 bool(list_ignored),
1317 bool(list_unknown),
1339 bool(list_unknown),
1318 bool(matcher.traversedir),
1340 bool(matcher.traversedir),
1319 )
1341 )
1320
1342
1321 self._dirty |= dirty
1343 self._dirty |= dirty
1322
1344
1323 if matcher.traversedir:
1345 if matcher.traversedir:
1324 for dir in traversed:
1346 for dir in traversed:
1325 matcher.traversedir(dir)
1347 matcher.traversedir(dir)
1326
1348
1327 if self._ui.warn:
1349 if self._ui.warn:
1328 for item in warnings:
1350 for item in warnings:
1329 if isinstance(item, tuple):
1351 if isinstance(item, tuple):
1330 file_path, syntax = item
1352 file_path, syntax = item
1331 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1353 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1332 file_path,
1354 file_path,
1333 syntax,
1355 syntax,
1334 )
1356 )
1335 self._ui.warn(msg)
1357 self._ui.warn(msg)
1336 else:
1358 else:
1337 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1359 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1338 self._ui.warn(
1360 self._ui.warn(
1339 msg
1361 msg
1340 % (
1362 % (
1341 pathutil.canonpath(
1363 pathutil.canonpath(
1342 self._rootdir, self._rootdir, item
1364 self._rootdir, self._rootdir, item
1343 ),
1365 ),
1344 b"No such file or directory",
1366 b"No such file or directory",
1345 )
1367 )
1346 )
1368 )
1347
1369
1348 for fn, message in bad:
1370 for fn, message in bad:
1349 matcher.bad(fn, encoding.strtolocal(message))
1371 matcher.bad(fn, encoding.strtolocal(message))
1350
1372
1351 status = scmutil.status(
1373 status = scmutil.status(
1352 modified=modified,
1374 modified=modified,
1353 added=added,
1375 added=added,
1354 removed=removed,
1376 removed=removed,
1355 deleted=deleted,
1377 deleted=deleted,
1356 unknown=unknown,
1378 unknown=unknown,
1357 ignored=ignored,
1379 ignored=ignored,
1358 clean=clean,
1380 clean=clean,
1359 )
1381 )
1360 return (lookup, status)
1382 return (lookup, status)
1361
1383
1362 def status(self, match, subrepos, ignored, clean, unknown):
1384 def status(self, match, subrepos, ignored, clean, unknown):
1363 """Determine the status of the working copy relative to the
1385 """Determine the status of the working copy relative to the
1364 dirstate and return a pair of (unsure, status), where status is of type
1386 dirstate and return a pair of (unsure, status), where status is of type
1365 scmutil.status and:
1387 scmutil.status and:
1366
1388
1367 unsure:
1389 unsure:
1368 files that might have been modified since the dirstate was
1390 files that might have been modified since the dirstate was
1369 written, but need to be read to be sure (size is the same
1391 written, but need to be read to be sure (size is the same
1370 but mtime differs)
1392 but mtime differs)
1371 status.modified:
1393 status.modified:
1372 files that have definitely been modified since the dirstate
1394 files that have definitely been modified since the dirstate
1373 was written (different size or mode)
1395 was written (different size or mode)
1374 status.clean:
1396 status.clean:
1375 files that have definitely not been modified since the
1397 files that have definitely not been modified since the
1376 dirstate was written
1398 dirstate was written
1377 """
1399 """
1378 listignored, listclean, listunknown = ignored, clean, unknown
1400 listignored, listclean, listunknown = ignored, clean, unknown
1379 lookup, modified, added, unknown, ignored = [], [], [], [], []
1401 lookup, modified, added, unknown, ignored = [], [], [], [], []
1380 removed, deleted, clean = [], [], []
1402 removed, deleted, clean = [], [], []
1381
1403
1382 dmap = self._map
1404 dmap = self._map
1383 dmap.preload()
1405 dmap.preload()
1384
1406
1385 use_rust = True
1407 use_rust = True
1386
1408
1387 allowed_matchers = (
1409 allowed_matchers = (
1388 matchmod.alwaysmatcher,
1410 matchmod.alwaysmatcher,
1389 matchmod.differencematcher,
1411 matchmod.differencematcher,
1390 matchmod.exactmatcher,
1412 matchmod.exactmatcher,
1391 matchmod.includematcher,
1413 matchmod.includematcher,
1392 matchmod.intersectionmatcher,
1414 matchmod.intersectionmatcher,
1393 matchmod.nevermatcher,
1415 matchmod.nevermatcher,
1394 matchmod.unionmatcher,
1416 matchmod.unionmatcher,
1395 )
1417 )
1396
1418
1397 if rustmod is None:
1419 if rustmod is None:
1398 use_rust = False
1420 use_rust = False
1399 elif self._checkcase:
1421 elif self._checkcase:
1400 # Case-insensitive filesystems are not handled yet
1422 # Case-insensitive filesystems are not handled yet
1401 use_rust = False
1423 use_rust = False
1402 elif subrepos:
1424 elif subrepos:
1403 use_rust = False
1425 use_rust = False
1404 elif not isinstance(match, allowed_matchers):
1426 elif not isinstance(match, allowed_matchers):
1405 # Some matchers have yet to be implemented
1427 # Some matchers have yet to be implemented
1406 use_rust = False
1428 use_rust = False
1407
1429
1408 # Get the time from the filesystem so we can disambiguate files that
1430 # Get the time from the filesystem so we can disambiguate files that
1409 # appear modified in the present or future.
1431 # appear modified in the present or future.
1410 try:
1432 try:
1411 mtime_boundary = timestamp.get_fs_now(self._opener)
1433 mtime_boundary = timestamp.get_fs_now(self._opener)
1412 except OSError:
1434 except OSError:
1413 # In largefiles or readonly context
1435 # In largefiles or readonly context
1414 mtime_boundary = None
1436 mtime_boundary = None
1415
1437
1416 if use_rust:
1438 if use_rust:
1417 try:
1439 try:
1418 res = self._rust_status(
1440 res = self._rust_status(
1419 match, listclean, listignored, listunknown
1441 match, listclean, listignored, listunknown
1420 )
1442 )
1421 return res + (mtime_boundary,)
1443 return res + (mtime_boundary,)
1422 except rustmod.FallbackError:
1444 except rustmod.FallbackError:
1423 pass
1445 pass
1424
1446
1425 def noop(f):
1447 def noop(f):
1426 pass
1448 pass
1427
1449
1428 dcontains = dmap.__contains__
1450 dcontains = dmap.__contains__
1429 dget = dmap.__getitem__
1451 dget = dmap.__getitem__
1430 ladd = lookup.append # aka "unsure"
1452 ladd = lookup.append # aka "unsure"
1431 madd = modified.append
1453 madd = modified.append
1432 aadd = added.append
1454 aadd = added.append
1433 uadd = unknown.append if listunknown else noop
1455 uadd = unknown.append if listunknown else noop
1434 iadd = ignored.append if listignored else noop
1456 iadd = ignored.append if listignored else noop
1435 radd = removed.append
1457 radd = removed.append
1436 dadd = deleted.append
1458 dadd = deleted.append
1437 cadd = clean.append if listclean else noop
1459 cadd = clean.append if listclean else noop
1438 mexact = match.exact
1460 mexact = match.exact
1439 dirignore = self._dirignore
1461 dirignore = self._dirignore
1440 checkexec = self._checkexec
1462 checkexec = self._checkexec
1441 checklink = self._checklink
1463 checklink = self._checklink
1442 copymap = self._map.copymap
1464 copymap = self._map.copymap
1443
1465
1444 # We need to do full walks when either
1466 # We need to do full walks when either
1445 # - we're listing all clean files, or
1467 # - we're listing all clean files, or
1446 # - match.traversedir does something, because match.traversedir should
1468 # - match.traversedir does something, because match.traversedir should
1447 # be called for every dir in the working dir
1469 # be called for every dir in the working dir
1448 full = listclean or match.traversedir is not None
1470 full = listclean or match.traversedir is not None
1449 for fn, st in self.walk(
1471 for fn, st in self.walk(
1450 match, subrepos, listunknown, listignored, full=full
1472 match, subrepos, listunknown, listignored, full=full
1451 ).items():
1473 ).items():
1452 if not dcontains(fn):
1474 if not dcontains(fn):
1453 if (listignored or mexact(fn)) and dirignore(fn):
1475 if (listignored or mexact(fn)) and dirignore(fn):
1454 if listignored:
1476 if listignored:
1455 iadd(fn)
1477 iadd(fn)
1456 else:
1478 else:
1457 uadd(fn)
1479 uadd(fn)
1458 continue
1480 continue
1459
1481
1460 t = dget(fn)
1482 t = dget(fn)
1461 mode = t.mode
1483 mode = t.mode
1462 size = t.size
1484 size = t.size
1463
1485
1464 if not st and t.tracked:
1486 if not st and t.tracked:
1465 dadd(fn)
1487 dadd(fn)
1466 elif t.p2_info:
1488 elif t.p2_info:
1467 madd(fn)
1489 madd(fn)
1468 elif t.added:
1490 elif t.added:
1469 aadd(fn)
1491 aadd(fn)
1470 elif t.removed:
1492 elif t.removed:
1471 radd(fn)
1493 radd(fn)
1472 elif t.tracked:
1494 elif t.tracked:
1473 if not checklink and t.has_fallback_symlink:
1495 if not checklink and t.has_fallback_symlink:
1474 # If the file system does not support symlink, the mode
1496 # If the file system does not support symlink, the mode
1475 # might not be correctly stored in the dirstate, so do not
1497 # might not be correctly stored in the dirstate, so do not
1476 # trust it.
1498 # trust it.
1477 ladd(fn)
1499 ladd(fn)
1478 elif not checkexec and t.has_fallback_exec:
1500 elif not checkexec and t.has_fallback_exec:
1479 # If the file system does not support exec bits, the mode
1501 # If the file system does not support exec bits, the mode
1480 # might not be correctly stored in the dirstate, so do not
1502 # might not be correctly stored in the dirstate, so do not
1481 # trust it.
1503 # trust it.
1482 ladd(fn)
1504 ladd(fn)
1483 elif (
1505 elif (
1484 size >= 0
1506 size >= 0
1485 and (
1507 and (
1486 (size != st.st_size and size != st.st_size & _rangemask)
1508 (size != st.st_size and size != st.st_size & _rangemask)
1487 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1509 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1488 )
1510 )
1489 or fn in copymap
1511 or fn in copymap
1490 ):
1512 ):
1491 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1513 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1492 # issue6456: Size returned may be longer due to
1514 # issue6456: Size returned may be longer due to
1493 # encryption on EXT-4 fscrypt, undecided.
1515 # encryption on EXT-4 fscrypt, undecided.
1494 ladd(fn)
1516 ladd(fn)
1495 else:
1517 else:
1496 madd(fn)
1518 madd(fn)
1497 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1519 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1498 # There might be a change in the future if for example the
1520 # There might be a change in the future if for example the
1499 # internal clock is off, but this is a case where the issues
1521 # internal clock is off, but this is a case where the issues
1500 # the user would face would be a lot worse and there is
1522 # the user would face would be a lot worse and there is
1501 # nothing we can really do.
1523 # nothing we can really do.
1502 ladd(fn)
1524 ladd(fn)
1503 elif listclean:
1525 elif listclean:
1504 cadd(fn)
1526 cadd(fn)
1505 status = scmutil.status(
1527 status = scmutil.status(
1506 modified, added, removed, deleted, unknown, ignored, clean
1528 modified, added, removed, deleted, unknown, ignored, clean
1507 )
1529 )
1508 return (lookup, status, mtime_boundary)
1530 return (lookup, status, mtime_boundary)
1509
1531
1510 def matches(self, match):
1532 def matches(self, match):
1511 """
1533 """
1512 return files in the dirstate (in whatever state) filtered by match
1534 return files in the dirstate (in whatever state) filtered by match
1513 """
1535 """
1514 dmap = self._map
1536 dmap = self._map
1515 if rustmod is not None:
1537 if rustmod is not None:
1516 dmap = self._map._map
1538 dmap = self._map._map
1517
1539
1518 if match.always():
1540 if match.always():
1519 return dmap.keys()
1541 return dmap.keys()
1520 files = match.files()
1542 files = match.files()
1521 if match.isexact():
1543 if match.isexact():
1522 # fast path -- filter the other way around, since typically files is
1544 # fast path -- filter the other way around, since typically files is
1523 # much smaller than dmap
1545 # much smaller than dmap
1524 return [f for f in files if f in dmap]
1546 return [f for f in files if f in dmap]
1525 if match.prefix() and all(fn in dmap for fn in files):
1547 if match.prefix() and all(fn in dmap for fn in files):
1526 # fast path -- all the values are known to be files, so just return
1548 # fast path -- all the values are known to be files, so just return
1527 # that
1549 # that
1528 return list(files)
1550 return list(files)
1529 return [f for f in dmap if match(f)]
1551 return [f for f in dmap if match(f)]
1530
1552
1531 def _actualfilename(self, tr):
1553 def _actualfilename(self, tr):
1532 if tr:
1554 if tr:
1533 return self._pendingfilename
1555 return self._pendingfilename
1534 else:
1556 else:
1535 return self._filename
1557 return self._filename
1536
1558
1537 def data_backup_filename(self, backupname):
1559 def data_backup_filename(self, backupname):
1538 if not self._use_dirstate_v2:
1560 if not self._use_dirstate_v2:
1539 return None
1561 return None
1540 return backupname + b'.v2-data'
1562 return backupname + b'.v2-data'
1541
1563
1542 def _new_backup_data_filename(self, backupname):
1564 def _new_backup_data_filename(self, backupname):
1543 """return a filename to backup a data-file or None"""
1565 """return a filename to backup a data-file or None"""
1544 if not self._use_dirstate_v2:
1566 if not self._use_dirstate_v2:
1545 return None
1567 return None
1546 if self._map.docket.uuid is None:
1568 if self._map.docket.uuid is None:
1547 # not created yet, nothing to backup
1569 # not created yet, nothing to backup
1548 return None
1570 return None
1549 data_filename = self._map.docket.data_filename()
1571 data_filename = self._map.docket.data_filename()
1550 return data_filename, self.data_backup_filename(backupname)
1572 return data_filename, self.data_backup_filename(backupname)
1551
1573
1552 def backup_data_file(self, backupname):
1574 def backup_data_file(self, backupname):
1553 if not self._use_dirstate_v2:
1575 if not self._use_dirstate_v2:
1554 return None
1576 return None
1555 docket = docketmod.DirstateDocket.parse(
1577 docket = docketmod.DirstateDocket.parse(
1556 self._opener.read(backupname),
1578 self._opener.read(backupname),
1557 self._nodeconstants,
1579 self._nodeconstants,
1558 )
1580 )
1559 return self.data_backup_filename(backupname), docket.data_filename()
1581 return self.data_backup_filename(backupname), docket.data_filename()
1560
1582
1561 def savebackup(self, tr, backupname):
1583 def savebackup(self, tr, backupname):
1562 '''Save current dirstate into backup file'''
1584 '''Save current dirstate into backup file'''
1563 filename = self._actualfilename(tr)
1585 filename = self._actualfilename(tr)
1564 assert backupname != filename
1586 assert backupname != filename
1565
1587
1566 # use '_writedirstate' instead of 'write' to write changes certainly,
1588 # use '_writedirstate' instead of 'write' to write changes certainly,
1567 # because the latter omits writing out if transaction is running.
1589 # because the latter omits writing out if transaction is running.
1568 # output file will be used to create backup of dirstate at this point.
1590 # output file will be used to create backup of dirstate at this point.
1569 if self._dirty:
1591 if self._dirty:
1570 self._writedirstate(
1592 self._writedirstate(
1571 tr,
1593 tr,
1572 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1594 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1573 )
1595 )
1574
1596
1575 if tr:
1597 if tr:
1576 # ensure that subsequent tr.writepending returns True for
1598 # ensure that subsequent tr.writepending returns True for
1577 # changes written out above, even if dirstate is never
1599 # changes written out above, even if dirstate is never
1578 # changed after this
1600 # changed after this
1579 tr.addfilegenerator(
1601 tr.addfilegenerator(
1580 b'dirstate-1-main',
1602 b'dirstate-1-main',
1581 (self._filename,),
1603 (self._filename,),
1582 lambda f: self._writedirstate(tr, f),
1604 lambda f: self._writedirstate(tr, f),
1583 location=b'plain',
1605 location=b'plain',
1584 post_finalize=True,
1606 post_finalize=True,
1585 )
1607 )
1586
1608
1587 self._opener.tryunlink(backupname)
1609 self._opener.tryunlink(backupname)
1588 if self._opener.exists(filename):
1610 if self._opener.exists(filename):
1589 # hardlink backup is okay because _writedirstate is always called
1611 # hardlink backup is okay because _writedirstate is always called
1590 # with an "atomictemp=True" file.
1612 # with an "atomictemp=True" file.
1591 util.copyfile(
1613 util.copyfile(
1592 self._opener.join(filename),
1614 self._opener.join(filename),
1593 self._opener.join(backupname),
1615 self._opener.join(backupname),
1594 hardlink=True,
1616 hardlink=True,
1595 )
1617 )
1596 data_pair = self._new_backup_data_filename(backupname)
1618 data_pair = self._new_backup_data_filename(backupname)
1597 if data_pair is not None:
1619 if data_pair is not None:
1598 data_filename, bck_data_filename = data_pair
1620 data_filename, bck_data_filename = data_pair
1599 util.copyfile(
1621 util.copyfile(
1600 self._opener.join(data_filename),
1622 self._opener.join(data_filename),
1601 self._opener.join(bck_data_filename),
1623 self._opener.join(bck_data_filename),
1602 hardlink=True,
1624 hardlink=True,
1603 )
1625 )
1604 if tr is not None:
1626 if tr is not None:
1605 # ensure that pending file written above is unlinked at
1627 # ensure that pending file written above is unlinked at
1606 # failure, even if tr.writepending isn't invoked until the
1628 # failure, even if tr.writepending isn't invoked until the
1607 # end of this transaction
1629 # end of this transaction
1608 tr.registertmp(bck_data_filename, location=b'plain')
1630 tr.registertmp(bck_data_filename, location=b'plain')
1609
1631
1610 def restorebackup(self, tr, backupname):
1632 def restorebackup(self, tr, backupname):
1611 '''Restore dirstate by backup file'''
1633 '''Restore dirstate by backup file'''
1612 # this "invalidate()" prevents "wlock.release()" from writing
1634 # this "invalidate()" prevents "wlock.release()" from writing
1613 # changes of dirstate out after restoring from backup file
1635 # changes of dirstate out after restoring from backup file
1614 self.invalidate()
1636 self.invalidate()
1615 o = self._opener
1637 o = self._opener
1616 if not o.exists(backupname):
1638 if not o.exists(backupname):
1617 # there was no file backup, delete existing files
1639 # there was no file backup, delete existing files
1618 filename = self._actualfilename(tr)
1640 filename = self._actualfilename(tr)
1619 data_file = None
1641 data_file = None
1620 if self._use_dirstate_v2:
1642 if self._use_dirstate_v2:
1621 data_file = self._map.docket.data_filename()
1643 data_file = self._map.docket.data_filename()
1622 if o.exists(filename):
1644 if o.exists(filename):
1623 o.unlink(filename)
1645 o.unlink(filename)
1624 if data_file is not None and o.exists(data_file):
1646 if data_file is not None and o.exists(data_file):
1625 o.unlink(data_file)
1647 o.unlink(data_file)
1626 return
1648 return
1627 filename = self._actualfilename(tr)
1649 filename = self._actualfilename(tr)
1628 data_pair = self.backup_data_file(backupname)
1650 data_pair = self.backup_data_file(backupname)
1629 if o.exists(filename) and util.samefile(
1651 if o.exists(filename) and util.samefile(
1630 o.join(backupname), o.join(filename)
1652 o.join(backupname), o.join(filename)
1631 ):
1653 ):
1632 o.unlink(backupname)
1654 o.unlink(backupname)
1633 else:
1655 else:
1634 o.rename(backupname, filename, checkambig=True)
1656 o.rename(backupname, filename, checkambig=True)
1635
1657
1636 if data_pair is not None:
1658 if data_pair is not None:
1637 data_backup, target = data_pair
1659 data_backup, target = data_pair
1638 if o.exists(target) and util.samefile(
1660 if o.exists(target) and util.samefile(
1639 o.join(data_backup), o.join(target)
1661 o.join(data_backup), o.join(target)
1640 ):
1662 ):
1641 o.unlink(data_backup)
1663 o.unlink(data_backup)
1642 else:
1664 else:
1643 o.rename(data_backup, target, checkambig=True)
1665 o.rename(data_backup, target, checkambig=True)
1644
1666
1645 def clearbackup(self, tr, backupname):
1667 def clearbackup(self, tr, backupname):
1646 '''Clear backup file'''
1668 '''Clear backup file'''
1647 o = self._opener
1669 o = self._opener
1648 if o.exists(backupname):
1670 if o.exists(backupname):
1649 data_backup = self.backup_data_file(backupname)
1671 data_backup = self.backup_data_file(backupname)
1650 o.unlink(backupname)
1672 o.unlink(backupname)
1651 if data_backup is not None:
1673 if data_backup is not None:
1652 o.unlink(data_backup[0])
1674 o.unlink(data_backup[0])
1653
1675
1654 def verify(self, m1, m2, p1, narrow_matcher=None):
1676 def verify(self, m1, m2, p1, narrow_matcher=None):
1655 """
1677 """
1656 check the dirstate contents against the parent manifest and yield errors
1678 check the dirstate contents against the parent manifest and yield errors
1657 """
1679 """
1658 missing_from_p1 = _(
1680 missing_from_p1 = _(
1659 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1681 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1660 )
1682 )
1661 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1683 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1662 missing_from_ps = _(
1684 missing_from_ps = _(
1663 b"%s marked as modified, but not in either manifest\n"
1685 b"%s marked as modified, but not in either manifest\n"
1664 )
1686 )
1665 missing_from_ds = _(
1687 missing_from_ds = _(
1666 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1688 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1667 )
1689 )
1668 for f, entry in self.items():
1690 for f, entry in self.items():
1669 if entry.p1_tracked:
1691 if entry.p1_tracked:
1670 if entry.modified and f not in m1 and f not in m2:
1692 if entry.modified and f not in m1 and f not in m2:
1671 yield missing_from_ps % f
1693 yield missing_from_ps % f
1672 elif f not in m1:
1694 elif f not in m1:
1673 yield missing_from_p1 % (f, node.short(p1))
1695 yield missing_from_p1 % (f, node.short(p1))
1674 if entry.added and f in m1:
1696 if entry.added and f in m1:
1675 yield unexpected_in_p1 % f
1697 yield unexpected_in_p1 % f
1676 for f in m1:
1698 for f in m1:
1677 if narrow_matcher is not None and not narrow_matcher(f):
1699 if narrow_matcher is not None and not narrow_matcher(f):
1678 continue
1700 continue
1679 entry = self.get_entry(f)
1701 entry = self.get_entry(f)
1680 if not entry.p1_tracked:
1702 if not entry.p1_tracked:
1681 yield missing_from_ds % (f, node.short(p1))
1703 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now