##// END OF EJS Templates
dirstate: enforce the use of `changing_files` context to change tracking...
marmoute -
r50955:4f758b51 default
parent child Browse files
Show More
@@ -1,1722 +1,1735 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_changing_parents(func):
69 def requires_changing_parents(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.is_changing_parents:
71 if not self.is_changing_parents:
72 msg = 'calling `%s` outside of a changing_parents context'
72 msg = 'calling `%s` outside of a changing_parents context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
77 raise error.ProgrammingError(msg)
78 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
79
79
80 return wrap
80 return wrap
81
81
82
82
83 def requires_changing_files(func):
84 def wrap(self, *args, **kwargs):
85 if not self.is_changing_files:
86 msg = 'calling `%s` outside of a `changing_files`'
87 msg %= func.__name__
88 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
90
91 return wrap
92
93
83 def requires_not_changing_parents(func):
94 def requires_not_changing_parents(func):
84 def wrap(self, *args, **kwargs):
95 def wrap(self, *args, **kwargs):
85 if self.is_changing_parents:
96 if self.is_changing_parents:
86 msg = 'calling `%s` inside of a changing_parents context'
97 msg = 'calling `%s` inside of a changing_parents context'
87 msg %= func.__name__
98 msg %= func.__name__
88 raise error.ProgrammingError(msg)
99 raise error.ProgrammingError(msg)
89 return func(self, *args, **kwargs)
100 return func(self, *args, **kwargs)
90
101
91 return wrap
102 return wrap
92
103
93
104
94 CHANGE_TYPE_PARENTS = "parents"
105 CHANGE_TYPE_PARENTS = "parents"
95 CHANGE_TYPE_FILES = "files"
106 CHANGE_TYPE_FILES = "files"
96
107
97
108
98 @interfaceutil.implementer(intdirstate.idirstate)
109 @interfaceutil.implementer(intdirstate.idirstate)
99 class dirstate:
110 class dirstate:
100 def __init__(
111 def __init__(
101 self,
112 self,
102 opener,
113 opener,
103 ui,
114 ui,
104 root,
115 root,
105 validate,
116 validate,
106 sparsematchfn,
117 sparsematchfn,
107 nodeconstants,
118 nodeconstants,
108 use_dirstate_v2,
119 use_dirstate_v2,
109 use_tracked_hint=False,
120 use_tracked_hint=False,
110 ):
121 ):
111 """Create a new dirstate object.
122 """Create a new dirstate object.
112
123
113 opener is an open()-like callable that can be used to open the
124 opener is an open()-like callable that can be used to open the
114 dirstate file; root is the root of the directory tracked by
125 dirstate file; root is the root of the directory tracked by
115 the dirstate.
126 the dirstate.
116 """
127 """
117 self._use_dirstate_v2 = use_dirstate_v2
128 self._use_dirstate_v2 = use_dirstate_v2
118 self._use_tracked_hint = use_tracked_hint
129 self._use_tracked_hint = use_tracked_hint
119 self._nodeconstants = nodeconstants
130 self._nodeconstants = nodeconstants
120 self._opener = opener
131 self._opener = opener
121 self._validate = validate
132 self._validate = validate
122 self._root = root
133 self._root = root
123 # Either build a sparse-matcher or None if sparse is disabled
134 # Either build a sparse-matcher or None if sparse is disabled
124 self._sparsematchfn = sparsematchfn
135 self._sparsematchfn = sparsematchfn
125 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
136 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
126 # UNC path pointing to root share (issue4557)
137 # UNC path pointing to root share (issue4557)
127 self._rootdir = pathutil.normasprefix(root)
138 self._rootdir = pathutil.normasprefix(root)
128 # True is any internal state may be different
139 # True is any internal state may be different
129 self._dirty = False
140 self._dirty = False
130 # True if the set of tracked file may be different
141 # True if the set of tracked file may be different
131 self._dirty_tracked_set = False
142 self._dirty_tracked_set = False
132 self._ui = ui
143 self._ui = ui
133 self._filecache = {}
144 self._filecache = {}
134 # nesting level of `changing_parents` context
145 # nesting level of `changing_parents` context
135 self._changing_level = 0
146 self._changing_level = 0
136 # the change currently underway
147 # the change currently underway
137 self._change_type = None
148 self._change_type = None
138 # True if the current dirstate changing operations have been
149 # True if the current dirstate changing operations have been
139 # invalidated (used to make sure all nested contexts have been exited)
150 # invalidated (used to make sure all nested contexts have been exited)
140 self._invalidated_context = False
151 self._invalidated_context = False
141 self._filename = b'dirstate'
152 self._filename = b'dirstate'
142 self._filename_th = b'dirstate-tracked-hint'
153 self._filename_th = b'dirstate-tracked-hint'
143 self._pendingfilename = b'%s.pending' % self._filename
154 self._pendingfilename = b'%s.pending' % self._filename
144 self._plchangecallbacks = {}
155 self._plchangecallbacks = {}
145 self._origpl = None
156 self._origpl = None
146 self._mapcls = dirstatemap.dirstatemap
157 self._mapcls = dirstatemap.dirstatemap
147 # Access and cache cwd early, so we don't access it for the first time
158 # Access and cache cwd early, so we don't access it for the first time
148 # after a working-copy update caused it to not exist (accessing it then
159 # after a working-copy update caused it to not exist (accessing it then
149 # raises an exception).
160 # raises an exception).
150 self._cwd
161 self._cwd
151
162
152 def prefetch_parents(self):
163 def prefetch_parents(self):
153 """make sure the parents are loaded
164 """make sure the parents are loaded
154
165
155 Used to avoid a race condition.
166 Used to avoid a race condition.
156 """
167 """
157 self._pl
168 self._pl
158
169
159 @contextlib.contextmanager
170 @contextlib.contextmanager
160 def _changing(self, repo, change_type):
171 def _changing(self, repo, change_type):
161 if repo.currentwlock() is None:
172 if repo.currentwlock() is None:
162 msg = b"trying to change the dirstate without holding the wlock"
173 msg = b"trying to change the dirstate without holding the wlock"
163 raise error.ProgrammingError(msg)
174 raise error.ProgrammingError(msg)
164 if self._invalidated_context:
175 if self._invalidated_context:
165 msg = "trying to use an invalidated dirstate before it has reset"
176 msg = "trying to use an invalidated dirstate before it has reset"
166 raise error.ProgrammingError(msg)
177 raise error.ProgrammingError(msg)
167
178
168 # different type of change are mutually exclusive
179 # different type of change are mutually exclusive
169 if self._change_type is None:
180 if self._change_type is None:
170 assert self._changing_level == 0
181 assert self._changing_level == 0
171 self._change_type = change_type
182 self._change_type = change_type
172 elif self._change_type != change_type:
183 elif self._change_type != change_type:
173 msg = (
184 msg = (
174 'trying to open "%s" dirstate-changing context while a "%s" is'
185 'trying to open "%s" dirstate-changing context while a "%s" is'
175 ' already open'
186 ' already open'
176 )
187 )
177 msg %= (change_type, self._change_type)
188 msg %= (change_type, self._change_type)
178 raise error.ProgrammingError(msg)
189 raise error.ProgrammingError(msg)
179 self._changing_level += 1
190 self._changing_level += 1
180 try:
191 try:
181 yield
192 yield
182 except Exception:
193 except Exception:
183 self.invalidate()
194 self.invalidate()
184 raise
195 raise
185 finally:
196 finally:
186 if self._changing_level > 0:
197 if self._changing_level > 0:
187 if self._invalidated_context:
198 if self._invalidated_context:
188 # make sure we invalidate anything an upper context might
199 # make sure we invalidate anything an upper context might
189 # have changed.
200 # have changed.
190 self.invalidate()
201 self.invalidate()
191 self._changing_level -= 1
202 self._changing_level -= 1
192 # The invalidation is complete once we exit the final context
203 # The invalidation is complete once we exit the final context
193 # manager
204 # manager
194 if self._changing_level <= 0:
205 if self._changing_level <= 0:
195 self._change_type = None
206 self._change_type = None
196 assert self._changing_level == 0
207 assert self._changing_level == 0
197 if self._invalidated_context:
208 if self._invalidated_context:
198 self._invalidated_context = False
209 self._invalidated_context = False
199 else:
210 else:
200 # When an exception occured, `_invalidated_context`
211 # When an exception occured, `_invalidated_context`
201 # would have been set to True by the `invalidate`
212 # would have been set to True by the `invalidate`
202 # call earlier.
213 # call earlier.
203 #
214 #
204 # We don't have more straightforward code, because the
215 # We don't have more straightforward code, because the
205 # Exception catching (and the associated `invalidate`
216 # Exception catching (and the associated `invalidate`
206 # calling) might have been called by a nested context
217 # calling) might have been called by a nested context
207 # instead of the top level one.
218 # instead of the top level one.
208 tr = repo.currenttransaction()
219 tr = repo.currenttransaction()
209 if tr is not None:
220 if tr is not None:
210 abort_cb = lambda tr: self.invalidate()
221 abort_cb = lambda tr: self.invalidate()
211 tr.addabort(b'dirstate', abort_cb)
222 tr.addabort(b'dirstate', abort_cb)
212 self.write(tr)
223 self.write(tr)
213
224
214 @contextlib.contextmanager
225 @contextlib.contextmanager
215 def changing_parents(self, repo):
226 def changing_parents(self, repo):
216 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
227 with self._changing(repo, CHANGE_TYPE_PARENTS) as c:
217 yield c
228 yield c
218
229
219 @contextlib.contextmanager
230 @contextlib.contextmanager
220 def changing_files(self, repo):
231 def changing_files(self, repo):
221 with self._changing(repo, CHANGE_TYPE_FILES) as c:
232 with self._changing(repo, CHANGE_TYPE_FILES) as c:
222 yield c
233 yield c
223
234
224 # here to help migration to the new code
235 # here to help migration to the new code
225 def parentchange(self):
236 def parentchange(self):
226 msg = (
237 msg = (
227 "Mercurial 6.4 and later requires call to "
238 "Mercurial 6.4 and later requires call to "
228 "`dirstate.changing_parents(repo)`"
239 "`dirstate.changing_parents(repo)`"
229 )
240 )
230 raise error.ProgrammingError(msg)
241 raise error.ProgrammingError(msg)
231
242
232 @property
243 @property
233 def is_changing_any(self):
244 def is_changing_any(self):
234 """Returns true if the dirstate is in the middle of a set of changes.
245 """Returns true if the dirstate is in the middle of a set of changes.
235
246
236 This returns True for any kind of change.
247 This returns True for any kind of change.
237 """
248 """
238 return self._changing_level > 0
249 return self._changing_level > 0
239
250
240 def pendingparentchange(self):
251 def pendingparentchange(self):
241 return self.is_changing_parent()
252 return self.is_changing_parent()
242
253
243 def is_changing_parent(self):
254 def is_changing_parent(self):
244 """Returns true if the dirstate is in the middle of a set of changes
255 """Returns true if the dirstate is in the middle of a set of changes
245 that modify the dirstate parent.
256 that modify the dirstate parent.
246 """
257 """
247 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
258 self._ui.deprecwarn(b"dirstate.is_changing_parents", b"6.5")
248 return self.is_changing_parents
259 return self.is_changing_parents
249
260
250 @property
261 @property
251 def is_changing_parents(self):
262 def is_changing_parents(self):
252 """Returns true if the dirstate is in the middle of a set of changes
263 """Returns true if the dirstate is in the middle of a set of changes
253 that modify the dirstate parent.
264 that modify the dirstate parent.
254 """
265 """
255 if self._changing_level <= 0:
266 if self._changing_level <= 0:
256 return False
267 return False
257 return self._change_type == CHANGE_TYPE_PARENTS
268 return self._change_type == CHANGE_TYPE_PARENTS
258
269
259 @property
270 @property
260 def is_changing_files(self):
271 def is_changing_files(self):
261 """Returns true if the dirstate is in the middle of a set of changes
272 """Returns true if the dirstate is in the middle of a set of changes
262 that modify the files tracked or their sources.
273 that modify the files tracked or their sources.
263 """
274 """
264 if self._changing_level <= 0:
275 if self._changing_level <= 0:
265 return False
276 return False
266 return self._change_type == CHANGE_TYPE_FILES
277 return self._change_type == CHANGE_TYPE_FILES
267
278
268 @propertycache
279 @propertycache
269 def _map(self):
280 def _map(self):
270 """Return the dirstate contents (see documentation for dirstatemap)."""
281 """Return the dirstate contents (see documentation for dirstatemap)."""
271 self._map = self._mapcls(
282 self._map = self._mapcls(
272 self._ui,
283 self._ui,
273 self._opener,
284 self._opener,
274 self._root,
285 self._root,
275 self._nodeconstants,
286 self._nodeconstants,
276 self._use_dirstate_v2,
287 self._use_dirstate_v2,
277 )
288 )
278 return self._map
289 return self._map
279
290
280 @property
291 @property
281 def _sparsematcher(self):
292 def _sparsematcher(self):
282 """The matcher for the sparse checkout.
293 """The matcher for the sparse checkout.
283
294
284 The working directory may not include every file from a manifest. The
295 The working directory may not include every file from a manifest. The
285 matcher obtained by this property will match a path if it is to be
296 matcher obtained by this property will match a path if it is to be
286 included in the working directory.
297 included in the working directory.
287
298
288 When sparse if disabled, return None.
299 When sparse if disabled, return None.
289 """
300 """
290 if self._sparsematchfn is None:
301 if self._sparsematchfn is None:
291 return None
302 return None
292 # TODO there is potential to cache this property. For now, the matcher
303 # TODO there is potential to cache this property. For now, the matcher
293 # is resolved on every access. (But the called function does use a
304 # is resolved on every access. (But the called function does use a
294 # cache to keep the lookup fast.)
305 # cache to keep the lookup fast.)
295 return self._sparsematchfn()
306 return self._sparsematchfn()
296
307
297 @repocache(b'branch')
308 @repocache(b'branch')
298 def _branch(self):
309 def _branch(self):
299 try:
310 try:
300 return self._opener.read(b"branch").strip() or b"default"
311 return self._opener.read(b"branch").strip() or b"default"
301 except FileNotFoundError:
312 except FileNotFoundError:
302 return b"default"
313 return b"default"
303
314
304 @property
315 @property
305 def _pl(self):
316 def _pl(self):
306 return self._map.parents()
317 return self._map.parents()
307
318
308 def hasdir(self, d):
319 def hasdir(self, d):
309 return self._map.hastrackeddir(d)
320 return self._map.hastrackeddir(d)
310
321
311 @rootcache(b'.hgignore')
322 @rootcache(b'.hgignore')
312 def _ignore(self):
323 def _ignore(self):
313 files = self._ignorefiles()
324 files = self._ignorefiles()
314 if not files:
325 if not files:
315 return matchmod.never()
326 return matchmod.never()
316
327
317 pats = [b'include:%s' % f for f in files]
328 pats = [b'include:%s' % f for f in files]
318 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
329 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
319
330
320 @propertycache
331 @propertycache
321 def _slash(self):
332 def _slash(self):
322 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
333 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
323
334
324 @propertycache
335 @propertycache
325 def _checklink(self):
336 def _checklink(self):
326 return util.checklink(self._root)
337 return util.checklink(self._root)
327
338
328 @propertycache
339 @propertycache
329 def _checkexec(self):
340 def _checkexec(self):
330 return bool(util.checkexec(self._root))
341 return bool(util.checkexec(self._root))
331
342
332 @propertycache
343 @propertycache
333 def _checkcase(self):
344 def _checkcase(self):
334 return not util.fscasesensitive(self._join(b'.hg'))
345 return not util.fscasesensitive(self._join(b'.hg'))
335
346
336 def _join(self, f):
347 def _join(self, f):
337 # much faster than os.path.join()
348 # much faster than os.path.join()
338 # it's safe because f is always a relative path
349 # it's safe because f is always a relative path
339 return self._rootdir + f
350 return self._rootdir + f
340
351
341 def flagfunc(self, buildfallback):
352 def flagfunc(self, buildfallback):
342 """build a callable that returns flags associated with a filename
353 """build a callable that returns flags associated with a filename
343
354
344 The information is extracted from three possible layers:
355 The information is extracted from three possible layers:
345 1. the file system if it supports the information
356 1. the file system if it supports the information
346 2. the "fallback" information stored in the dirstate if any
357 2. the "fallback" information stored in the dirstate if any
347 3. a more expensive mechanism inferring the flags from the parents.
358 3. a more expensive mechanism inferring the flags from the parents.
348 """
359 """
349
360
350 # small hack to cache the result of buildfallback()
361 # small hack to cache the result of buildfallback()
351 fallback_func = []
362 fallback_func = []
352
363
353 def get_flags(x):
364 def get_flags(x):
354 entry = None
365 entry = None
355 fallback_value = None
366 fallback_value = None
356 try:
367 try:
357 st = os.lstat(self._join(x))
368 st = os.lstat(self._join(x))
358 except OSError:
369 except OSError:
359 return b''
370 return b''
360
371
361 if self._checklink:
372 if self._checklink:
362 if util.statislink(st):
373 if util.statislink(st):
363 return b'l'
374 return b'l'
364 else:
375 else:
365 entry = self.get_entry(x)
376 entry = self.get_entry(x)
366 if entry.has_fallback_symlink:
377 if entry.has_fallback_symlink:
367 if entry.fallback_symlink:
378 if entry.fallback_symlink:
368 return b'l'
379 return b'l'
369 else:
380 else:
370 if not fallback_func:
381 if not fallback_func:
371 fallback_func.append(buildfallback())
382 fallback_func.append(buildfallback())
372 fallback_value = fallback_func[0](x)
383 fallback_value = fallback_func[0](x)
373 if b'l' in fallback_value:
384 if b'l' in fallback_value:
374 return b'l'
385 return b'l'
375
386
376 if self._checkexec:
387 if self._checkexec:
377 if util.statisexec(st):
388 if util.statisexec(st):
378 return b'x'
389 return b'x'
379 else:
390 else:
380 if entry is None:
391 if entry is None:
381 entry = self.get_entry(x)
392 entry = self.get_entry(x)
382 if entry.has_fallback_exec:
393 if entry.has_fallback_exec:
383 if entry.fallback_exec:
394 if entry.fallback_exec:
384 return b'x'
395 return b'x'
385 else:
396 else:
386 if fallback_value is None:
397 if fallback_value is None:
387 if not fallback_func:
398 if not fallback_func:
388 fallback_func.append(buildfallback())
399 fallback_func.append(buildfallback())
389 fallback_value = fallback_func[0](x)
400 fallback_value = fallback_func[0](x)
390 if b'x' in fallback_value:
401 if b'x' in fallback_value:
391 return b'x'
402 return b'x'
392 return b''
403 return b''
393
404
394 return get_flags
405 return get_flags
395
406
396 @propertycache
407 @propertycache
397 def _cwd(self):
408 def _cwd(self):
398 # internal config: ui.forcecwd
409 # internal config: ui.forcecwd
399 forcecwd = self._ui.config(b'ui', b'forcecwd')
410 forcecwd = self._ui.config(b'ui', b'forcecwd')
400 if forcecwd:
411 if forcecwd:
401 return forcecwd
412 return forcecwd
402 return encoding.getcwd()
413 return encoding.getcwd()
403
414
404 def getcwd(self):
415 def getcwd(self):
405 """Return the path from which a canonical path is calculated.
416 """Return the path from which a canonical path is calculated.
406
417
407 This path should be used to resolve file patterns or to convert
418 This path should be used to resolve file patterns or to convert
408 canonical paths back to file paths for display. It shouldn't be
419 canonical paths back to file paths for display. It shouldn't be
409 used to get real file paths. Use vfs functions instead.
420 used to get real file paths. Use vfs functions instead.
410 """
421 """
411 cwd = self._cwd
422 cwd = self._cwd
412 if cwd == self._root:
423 if cwd == self._root:
413 return b''
424 return b''
414 # self._root ends with a path separator if self._root is '/' or 'C:\'
425 # self._root ends with a path separator if self._root is '/' or 'C:\'
415 rootsep = self._root
426 rootsep = self._root
416 if not util.endswithsep(rootsep):
427 if not util.endswithsep(rootsep):
417 rootsep += pycompat.ossep
428 rootsep += pycompat.ossep
418 if cwd.startswith(rootsep):
429 if cwd.startswith(rootsep):
419 return cwd[len(rootsep) :]
430 return cwd[len(rootsep) :]
420 else:
431 else:
421 # we're outside the repo. return an absolute path.
432 # we're outside the repo. return an absolute path.
422 return cwd
433 return cwd
423
434
424 def pathto(self, f, cwd=None):
435 def pathto(self, f, cwd=None):
425 if cwd is None:
436 if cwd is None:
426 cwd = self.getcwd()
437 cwd = self.getcwd()
427 path = util.pathto(self._root, cwd, f)
438 path = util.pathto(self._root, cwd, f)
428 if self._slash:
439 if self._slash:
429 return util.pconvert(path)
440 return util.pconvert(path)
430 return path
441 return path
431
442
432 def get_entry(self, path):
443 def get_entry(self, path):
433 """return a DirstateItem for the associated path"""
444 """return a DirstateItem for the associated path"""
434 entry = self._map.get(path)
445 entry = self._map.get(path)
435 if entry is None:
446 if entry is None:
436 return DirstateItem()
447 return DirstateItem()
437 return entry
448 return entry
438
449
439 def __contains__(self, key):
450 def __contains__(self, key):
440 return key in self._map
451 return key in self._map
441
452
442 def __iter__(self):
453 def __iter__(self):
443 return iter(sorted(self._map))
454 return iter(sorted(self._map))
444
455
445 def items(self):
456 def items(self):
446 return self._map.items()
457 return self._map.items()
447
458
448 iteritems = items
459 iteritems = items
449
460
450 def parents(self):
461 def parents(self):
451 return [self._validate(p) for p in self._pl]
462 return [self._validate(p) for p in self._pl]
452
463
453 def p1(self):
464 def p1(self):
454 return self._validate(self._pl[0])
465 return self._validate(self._pl[0])
455
466
456 def p2(self):
467 def p2(self):
457 return self._validate(self._pl[1])
468 return self._validate(self._pl[1])
458
469
459 @property
470 @property
460 def in_merge(self):
471 def in_merge(self):
461 """True if a merge is in progress"""
472 """True if a merge is in progress"""
462 return self._pl[1] != self._nodeconstants.nullid
473 return self._pl[1] != self._nodeconstants.nullid
463
474
464 def branch(self):
475 def branch(self):
465 return encoding.tolocal(self._branch)
476 return encoding.tolocal(self._branch)
466
477
467 def setparents(self, p1, p2=None):
478 def setparents(self, p1, p2=None):
468 """Set dirstate parents to p1 and p2.
479 """Set dirstate parents to p1 and p2.
469
480
470 When moving from two parents to one, "merged" entries a
481 When moving from two parents to one, "merged" entries a
471 adjusted to normal and previous copy records discarded and
482 adjusted to normal and previous copy records discarded and
472 returned by the call.
483 returned by the call.
473
484
474 See localrepo.setparents()
485 See localrepo.setparents()
475 """
486 """
476 if p2 is None:
487 if p2 is None:
477 p2 = self._nodeconstants.nullid
488 p2 = self._nodeconstants.nullid
478 if self._changing_level == 0:
489 if self._changing_level == 0:
479 raise ValueError(
490 raise ValueError(
480 b"cannot set dirstate parent outside of "
491 b"cannot set dirstate parent outside of "
481 b"dirstate.changing_parents context manager"
492 b"dirstate.changing_parents context manager"
482 )
493 )
483
494
484 self._dirty = True
495 self._dirty = True
485 oldp2 = self._pl[1]
496 oldp2 = self._pl[1]
486 if self._origpl is None:
497 if self._origpl is None:
487 self._origpl = self._pl
498 self._origpl = self._pl
488 nullid = self._nodeconstants.nullid
499 nullid = self._nodeconstants.nullid
489 # True if we need to fold p2 related state back to a linear case
500 # True if we need to fold p2 related state back to a linear case
490 fold_p2 = oldp2 != nullid and p2 == nullid
501 fold_p2 = oldp2 != nullid and p2 == nullid
491 return self._map.setparents(p1, p2, fold_p2=fold_p2)
502 return self._map.setparents(p1, p2, fold_p2=fold_p2)
492
503
493 def setbranch(self, branch):
504 def setbranch(self, branch):
494 self.__class__._branch.set(self, encoding.fromlocal(branch))
505 self.__class__._branch.set(self, encoding.fromlocal(branch))
495 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
506 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
496 try:
507 try:
497 f.write(self._branch + b'\n')
508 f.write(self._branch + b'\n')
498 f.close()
509 f.close()
499
510
500 # make sure filecache has the correct stat info for _branch after
511 # make sure filecache has the correct stat info for _branch after
501 # replacing the underlying file
512 # replacing the underlying file
502 ce = self._filecache[b'_branch']
513 ce = self._filecache[b'_branch']
503 if ce:
514 if ce:
504 ce.refresh()
515 ce.refresh()
505 except: # re-raises
516 except: # re-raises
506 f.discard()
517 f.discard()
507 raise
518 raise
508
519
509 def invalidate(self):
520 def invalidate(self):
510 """Causes the next access to reread the dirstate.
521 """Causes the next access to reread the dirstate.
511
522
512 This is different from localrepo.invalidatedirstate() because it always
523 This is different from localrepo.invalidatedirstate() because it always
513 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
524 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
514 check whether the dirstate has changed before rereading it."""
525 check whether the dirstate has changed before rereading it."""
515
526
516 for a in ("_map", "_branch", "_ignore"):
527 for a in ("_map", "_branch", "_ignore"):
517 if a in self.__dict__:
528 if a in self.__dict__:
518 delattr(self, a)
529 delattr(self, a)
519 self._dirty = False
530 self._dirty = False
520 self._dirty_tracked_set = False
531 self._dirty_tracked_set = False
521 self._invalidated_context = self._changing_level > 0
532 self._invalidated_context = self._changing_level > 0
522 self._origpl = None
533 self._origpl = None
523
534
524 def copy(self, source, dest):
535 def copy(self, source, dest):
525 """Mark dest as a copy of source. Unmark dest if source is None."""
536 """Mark dest as a copy of source. Unmark dest if source is None."""
526 if source == dest:
537 if source == dest:
527 return
538 return
528 self._dirty = True
539 self._dirty = True
529 if source is not None:
540 if source is not None:
530 self._check_sparse(source)
541 self._check_sparse(source)
531 self._map.copymap[dest] = source
542 self._map.copymap[dest] = source
532 else:
543 else:
533 self._map.copymap.pop(dest, None)
544 self._map.copymap.pop(dest, None)
534
545
535 def copied(self, file):
546 def copied(self, file):
536 return self._map.copymap.get(file, None)
547 return self._map.copymap.get(file, None)
537
548
538 def copies(self):
549 def copies(self):
539 return self._map.copymap
550 return self._map.copymap
540
551
541 @requires_not_changing_parents
552 @requires_not_changing_parents
553 @requires_changing_files
542 def set_tracked(self, filename, reset_copy=False):
554 def set_tracked(self, filename, reset_copy=False):
543 """a "public" method for generic code to mark a file as tracked
555 """a "public" method for generic code to mark a file as tracked
544
556
545 This function is to be called outside of "update/merge" case. For
557 This function is to be called outside of "update/merge" case. For
546 example by a command like `hg add X`.
558 example by a command like `hg add X`.
547
559
548 if reset_copy is set, any existing copy information will be dropped.
560 if reset_copy is set, any existing copy information will be dropped.
549
561
550 return True the file was previously untracked, False otherwise.
562 return True the file was previously untracked, False otherwise.
551 """
563 """
552 self._dirty = True
564 self._dirty = True
553 entry = self._map.get(filename)
565 entry = self._map.get(filename)
554 if entry is None or not entry.tracked:
566 if entry is None or not entry.tracked:
555 self._check_new_tracked_filename(filename)
567 self._check_new_tracked_filename(filename)
556 pre_tracked = self._map.set_tracked(filename)
568 pre_tracked = self._map.set_tracked(filename)
557 if reset_copy:
569 if reset_copy:
558 self._map.copymap.pop(filename, None)
570 self._map.copymap.pop(filename, None)
559 if pre_tracked:
571 if pre_tracked:
560 self._dirty_tracked_set = True
572 self._dirty_tracked_set = True
561 return pre_tracked
573 return pre_tracked
562
574
563 @requires_not_changing_parents
575 @requires_not_changing_parents
576 @requires_changing_files
564 def set_untracked(self, filename):
577 def set_untracked(self, filename):
565 """a "public" method for generic code to mark a file as untracked
578 """a "public" method for generic code to mark a file as untracked
566
579
567 This function is to be called outside of "update/merge" case. For
580 This function is to be called outside of "update/merge" case. For
568 example by a command like `hg remove X`.
581 example by a command like `hg remove X`.
569
582
570 return True the file was previously tracked, False otherwise.
583 return True the file was previously tracked, False otherwise.
571 """
584 """
572 ret = self._map.set_untracked(filename)
585 ret = self._map.set_untracked(filename)
573 if ret:
586 if ret:
574 self._dirty = True
587 self._dirty = True
575 self._dirty_tracked_set = True
588 self._dirty_tracked_set = True
576 return ret
589 return ret
577
590
578 @requires_not_changing_parents
591 @requires_not_changing_parents
579 def set_clean(self, filename, parentfiledata):
592 def set_clean(self, filename, parentfiledata):
580 """record that the current state of the file on disk is known to be clean"""
593 """record that the current state of the file on disk is known to be clean"""
581 self._dirty = True
594 self._dirty = True
582 if not self._map[filename].tracked:
595 if not self._map[filename].tracked:
583 self._check_new_tracked_filename(filename)
596 self._check_new_tracked_filename(filename)
584 (mode, size, mtime) = parentfiledata
597 (mode, size, mtime) = parentfiledata
585 self._map.set_clean(filename, mode, size, mtime)
598 self._map.set_clean(filename, mode, size, mtime)
586
599
587 @requires_not_changing_parents
600 @requires_not_changing_parents
588 def set_possibly_dirty(self, filename):
601 def set_possibly_dirty(self, filename):
589 """record that the current state of the file on disk is unknown"""
602 """record that the current state of the file on disk is unknown"""
590 self._dirty = True
603 self._dirty = True
591 self._map.set_possibly_dirty(filename)
604 self._map.set_possibly_dirty(filename)
592
605
593 @requires_changing_parents
606 @requires_changing_parents
594 def update_file_p1(
607 def update_file_p1(
595 self,
608 self,
596 filename,
609 filename,
597 p1_tracked,
610 p1_tracked,
598 ):
611 ):
599 """Set a file as tracked in the parent (or not)
612 """Set a file as tracked in the parent (or not)
600
613
601 This is to be called when adjust the dirstate to a new parent after an history
614 This is to be called when adjust the dirstate to a new parent after an history
602 rewriting operation.
615 rewriting operation.
603
616
604 It should not be called during a merge (p2 != nullid) and only within
617 It should not be called during a merge (p2 != nullid) and only within
605 a `with dirstate.changing_parents(repo):` context.
618 a `with dirstate.changing_parents(repo):` context.
606 """
619 """
607 if self.in_merge:
620 if self.in_merge:
608 msg = b'update_file_reference should not be called when merging'
621 msg = b'update_file_reference should not be called when merging'
609 raise error.ProgrammingError(msg)
622 raise error.ProgrammingError(msg)
610 entry = self._map.get(filename)
623 entry = self._map.get(filename)
611 if entry is None:
624 if entry is None:
612 wc_tracked = False
625 wc_tracked = False
613 else:
626 else:
614 wc_tracked = entry.tracked
627 wc_tracked = entry.tracked
615 if not (p1_tracked or wc_tracked):
628 if not (p1_tracked or wc_tracked):
616 # the file is no longer relevant to anyone
629 # the file is no longer relevant to anyone
617 if self._map.get(filename) is not None:
630 if self._map.get(filename) is not None:
618 self._map.reset_state(filename)
631 self._map.reset_state(filename)
619 self._dirty = True
632 self._dirty = True
620 elif (not p1_tracked) and wc_tracked:
633 elif (not p1_tracked) and wc_tracked:
621 if entry is not None and entry.added:
634 if entry is not None and entry.added:
622 return # avoid dropping copy information (maybe?)
635 return # avoid dropping copy information (maybe?)
623
636
624 self._map.reset_state(
637 self._map.reset_state(
625 filename,
638 filename,
626 wc_tracked,
639 wc_tracked,
627 p1_tracked,
640 p1_tracked,
628 # the underlying reference might have changed, we will have to
641 # the underlying reference might have changed, we will have to
629 # check it.
642 # check it.
630 has_meaningful_mtime=False,
643 has_meaningful_mtime=False,
631 )
644 )
632
645
633 @requires_changing_parents
646 @requires_changing_parents
634 def update_file(
647 def update_file(
635 self,
648 self,
636 filename,
649 filename,
637 wc_tracked,
650 wc_tracked,
638 p1_tracked,
651 p1_tracked,
639 p2_info=False,
652 p2_info=False,
640 possibly_dirty=False,
653 possibly_dirty=False,
641 parentfiledata=None,
654 parentfiledata=None,
642 ):
655 ):
643 """update the information about a file in the dirstate
656 """update the information about a file in the dirstate
644
657
645 This is to be called when the direstates parent changes to keep track
658 This is to be called when the direstates parent changes to keep track
646 of what is the file situation in regards to the working copy and its parent.
659 of what is the file situation in regards to the working copy and its parent.
647
660
648 This function must be called within a `dirstate.changing_parents` context.
661 This function must be called within a `dirstate.changing_parents` context.
649
662
650 note: the API is at an early stage and we might need to adjust it
663 note: the API is at an early stage and we might need to adjust it
651 depending of what information ends up being relevant and useful to
664 depending of what information ends up being relevant and useful to
652 other processing.
665 other processing.
653 """
666 """
654 self._update_file(
667 self._update_file(
655 filename=filename,
668 filename=filename,
656 wc_tracked=wc_tracked,
669 wc_tracked=wc_tracked,
657 p1_tracked=p1_tracked,
670 p1_tracked=p1_tracked,
658 p2_info=p2_info,
671 p2_info=p2_info,
659 possibly_dirty=possibly_dirty,
672 possibly_dirty=possibly_dirty,
660 parentfiledata=parentfiledata,
673 parentfiledata=parentfiledata,
661 )
674 )
662
675
663 def hacky_extension_update_file(self, *args, **kwargs):
676 def hacky_extension_update_file(self, *args, **kwargs):
664 """NEVER USE THIS, YOU DO NOT NEED IT
677 """NEVER USE THIS, YOU DO NOT NEED IT
665
678
666 This function is a variant of "update_file" to be called by a small set
679 This function is a variant of "update_file" to be called by a small set
667 of extensions, it also adjust the internal state of file, but can be
680 of extensions, it also adjust the internal state of file, but can be
668 called outside an `changing_parents` context.
681 called outside an `changing_parents` context.
669
682
670 A very small number of extension meddle with the working copy content
683 A very small number of extension meddle with the working copy content
671 in a way that requires to adjust the dirstate accordingly. At the time
684 in a way that requires to adjust the dirstate accordingly. At the time
672 this command is written they are :
685 this command is written they are :
673 - keyword,
686 - keyword,
674 - largefile,
687 - largefile,
675 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
688 PLEASE DO NOT GROW THIS LIST ANY FURTHER.
676
689
677 This function could probably be replaced by more semantic one (like
690 This function could probably be replaced by more semantic one (like
678 "adjust expected size" or "always revalidate file content", etc)
691 "adjust expected size" or "always revalidate file content", etc)
679 however at the time where this is writen, this is too much of a detour
692 however at the time where this is writen, this is too much of a detour
680 to be considered.
693 to be considered.
681 """
694 """
682 self._update_file(
695 self._update_file(
683 *args,
696 *args,
684 **kwargs,
697 **kwargs,
685 )
698 )
686
699
687 def _update_file(
700 def _update_file(
688 self,
701 self,
689 filename,
702 filename,
690 wc_tracked,
703 wc_tracked,
691 p1_tracked,
704 p1_tracked,
692 p2_info=False,
705 p2_info=False,
693 possibly_dirty=False,
706 possibly_dirty=False,
694 parentfiledata=None,
707 parentfiledata=None,
695 ):
708 ):
696
709
697 # note: I do not think we need to double check name clash here since we
710 # note: I do not think we need to double check name clash here since we
698 # are in a update/merge case that should already have taken care of
711 # are in a update/merge case that should already have taken care of
699 # this. The test agrees
712 # this. The test agrees
700
713
701 self._dirty = True
714 self._dirty = True
702 old_entry = self._map.get(filename)
715 old_entry = self._map.get(filename)
703 if old_entry is None:
716 if old_entry is None:
704 prev_tracked = False
717 prev_tracked = False
705 else:
718 else:
706 prev_tracked = old_entry.tracked
719 prev_tracked = old_entry.tracked
707 if prev_tracked != wc_tracked:
720 if prev_tracked != wc_tracked:
708 self._dirty_tracked_set = True
721 self._dirty_tracked_set = True
709
722
710 self._map.reset_state(
723 self._map.reset_state(
711 filename,
724 filename,
712 wc_tracked,
725 wc_tracked,
713 p1_tracked,
726 p1_tracked,
714 p2_info=p2_info,
727 p2_info=p2_info,
715 has_meaningful_mtime=not possibly_dirty,
728 has_meaningful_mtime=not possibly_dirty,
716 parentfiledata=parentfiledata,
729 parentfiledata=parentfiledata,
717 )
730 )
718
731
719 def _check_new_tracked_filename(self, filename):
732 def _check_new_tracked_filename(self, filename):
720 scmutil.checkfilename(filename)
733 scmutil.checkfilename(filename)
721 if self._map.hastrackeddir(filename):
734 if self._map.hastrackeddir(filename):
722 msg = _(b'directory %r already in dirstate')
735 msg = _(b'directory %r already in dirstate')
723 msg %= pycompat.bytestr(filename)
736 msg %= pycompat.bytestr(filename)
724 raise error.Abort(msg)
737 raise error.Abort(msg)
725 # shadows
738 # shadows
726 for d in pathutil.finddirs(filename):
739 for d in pathutil.finddirs(filename):
727 if self._map.hastrackeddir(d):
740 if self._map.hastrackeddir(d):
728 break
741 break
729 entry = self._map.get(d)
742 entry = self._map.get(d)
730 if entry is not None and not entry.removed:
743 if entry is not None and not entry.removed:
731 msg = _(b'file %r in dirstate clashes with %r')
744 msg = _(b'file %r in dirstate clashes with %r')
732 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
745 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
733 raise error.Abort(msg)
746 raise error.Abort(msg)
734 self._check_sparse(filename)
747 self._check_sparse(filename)
735
748
736 def _check_sparse(self, filename):
749 def _check_sparse(self, filename):
737 """Check that a filename is inside the sparse profile"""
750 """Check that a filename is inside the sparse profile"""
738 sparsematch = self._sparsematcher
751 sparsematch = self._sparsematcher
739 if sparsematch is not None and not sparsematch.always():
752 if sparsematch is not None and not sparsematch.always():
740 if not sparsematch(filename):
753 if not sparsematch(filename):
741 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
754 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
742 hint = _(
755 hint = _(
743 b'include file with `hg debugsparse --include <pattern>` or use '
756 b'include file with `hg debugsparse --include <pattern>` or use '
744 b'`hg add -s <file>` to include file directory while adding'
757 b'`hg add -s <file>` to include file directory while adding'
745 )
758 )
746 raise error.Abort(msg % filename, hint=hint)
759 raise error.Abort(msg % filename, hint=hint)
747
760
748 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
761 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
749 if exists is None:
762 if exists is None:
750 exists = os.path.lexists(os.path.join(self._root, path))
763 exists = os.path.lexists(os.path.join(self._root, path))
751 if not exists:
764 if not exists:
752 # Maybe a path component exists
765 # Maybe a path component exists
753 if not ignoremissing and b'/' in path:
766 if not ignoremissing and b'/' in path:
754 d, f = path.rsplit(b'/', 1)
767 d, f = path.rsplit(b'/', 1)
755 d = self._normalize(d, False, ignoremissing, None)
768 d = self._normalize(d, False, ignoremissing, None)
756 folded = d + b"/" + f
769 folded = d + b"/" + f
757 else:
770 else:
758 # No path components, preserve original case
771 # No path components, preserve original case
759 folded = path
772 folded = path
760 else:
773 else:
761 # recursively normalize leading directory components
774 # recursively normalize leading directory components
762 # against dirstate
775 # against dirstate
763 if b'/' in normed:
776 if b'/' in normed:
764 d, f = normed.rsplit(b'/', 1)
777 d, f = normed.rsplit(b'/', 1)
765 d = self._normalize(d, False, ignoremissing, True)
778 d = self._normalize(d, False, ignoremissing, True)
766 r = self._root + b"/" + d
779 r = self._root + b"/" + d
767 folded = d + b"/" + util.fspath(f, r)
780 folded = d + b"/" + util.fspath(f, r)
768 else:
781 else:
769 folded = util.fspath(normed, self._root)
782 folded = util.fspath(normed, self._root)
770 storemap[normed] = folded
783 storemap[normed] = folded
771
784
772 return folded
785 return folded
773
786
774 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
787 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
775 normed = util.normcase(path)
788 normed = util.normcase(path)
776 folded = self._map.filefoldmap.get(normed, None)
789 folded = self._map.filefoldmap.get(normed, None)
777 if folded is None:
790 if folded is None:
778 if isknown:
791 if isknown:
779 folded = path
792 folded = path
780 else:
793 else:
781 folded = self._discoverpath(
794 folded = self._discoverpath(
782 path, normed, ignoremissing, exists, self._map.filefoldmap
795 path, normed, ignoremissing, exists, self._map.filefoldmap
783 )
796 )
784 return folded
797 return folded
785
798
786 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
799 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
787 normed = util.normcase(path)
800 normed = util.normcase(path)
788 folded = self._map.filefoldmap.get(normed, None)
801 folded = self._map.filefoldmap.get(normed, None)
789 if folded is None:
802 if folded is None:
790 folded = self._map.dirfoldmap.get(normed, None)
803 folded = self._map.dirfoldmap.get(normed, None)
791 if folded is None:
804 if folded is None:
792 if isknown:
805 if isknown:
793 folded = path
806 folded = path
794 else:
807 else:
795 # store discovered result in dirfoldmap so that future
808 # store discovered result in dirfoldmap so that future
796 # normalizefile calls don't start matching directories
809 # normalizefile calls don't start matching directories
797 folded = self._discoverpath(
810 folded = self._discoverpath(
798 path, normed, ignoremissing, exists, self._map.dirfoldmap
811 path, normed, ignoremissing, exists, self._map.dirfoldmap
799 )
812 )
800 return folded
813 return folded
801
814
802 def normalize(self, path, isknown=False, ignoremissing=False):
815 def normalize(self, path, isknown=False, ignoremissing=False):
803 """
816 """
804 normalize the case of a pathname when on a casefolding filesystem
817 normalize the case of a pathname when on a casefolding filesystem
805
818
806 isknown specifies whether the filename came from walking the
819 isknown specifies whether the filename came from walking the
807 disk, to avoid extra filesystem access.
820 disk, to avoid extra filesystem access.
808
821
809 If ignoremissing is True, missing path are returned
822 If ignoremissing is True, missing path are returned
810 unchanged. Otherwise, we try harder to normalize possibly
823 unchanged. Otherwise, we try harder to normalize possibly
811 existing path components.
824 existing path components.
812
825
813 The normalized case is determined based on the following precedence:
826 The normalized case is determined based on the following precedence:
814
827
815 - version of name already stored in the dirstate
828 - version of name already stored in the dirstate
816 - version of name stored on disk
829 - version of name stored on disk
817 - version provided via command arguments
830 - version provided via command arguments
818 """
831 """
819
832
820 if self._checkcase:
833 if self._checkcase:
821 return self._normalize(path, isknown, ignoremissing)
834 return self._normalize(path, isknown, ignoremissing)
822 return path
835 return path
823
836
824 def clear(self):
837 def clear(self):
825 self._map.clear()
838 self._map.clear()
826 self._dirty = True
839 self._dirty = True
827
840
828 def rebuild(self, parent, allfiles, changedfiles=None):
841 def rebuild(self, parent, allfiles, changedfiles=None):
829 matcher = self._sparsematcher
842 matcher = self._sparsematcher
830 if matcher is not None and not matcher.always():
843 if matcher is not None and not matcher.always():
831 # should not add non-matching files
844 # should not add non-matching files
832 allfiles = [f for f in allfiles if matcher(f)]
845 allfiles = [f for f in allfiles if matcher(f)]
833 if changedfiles:
846 if changedfiles:
834 changedfiles = [f for f in changedfiles if matcher(f)]
847 changedfiles = [f for f in changedfiles if matcher(f)]
835
848
836 if changedfiles is not None:
849 if changedfiles is not None:
837 # these files will be deleted from the dirstate when they are
850 # these files will be deleted from the dirstate when they are
838 # not found to be in allfiles
851 # not found to be in allfiles
839 dirstatefilestoremove = {f for f in self if not matcher(f)}
852 dirstatefilestoremove = {f for f in self if not matcher(f)}
840 changedfiles = dirstatefilestoremove.union(changedfiles)
853 changedfiles = dirstatefilestoremove.union(changedfiles)
841
854
842 if changedfiles is None:
855 if changedfiles is None:
843 # Rebuild entire dirstate
856 # Rebuild entire dirstate
844 to_lookup = allfiles
857 to_lookup = allfiles
845 to_drop = []
858 to_drop = []
846 self.clear()
859 self.clear()
847 elif len(changedfiles) < 10:
860 elif len(changedfiles) < 10:
848 # Avoid turning allfiles into a set, which can be expensive if it's
861 # Avoid turning allfiles into a set, which can be expensive if it's
849 # large.
862 # large.
850 to_lookup = []
863 to_lookup = []
851 to_drop = []
864 to_drop = []
852 for f in changedfiles:
865 for f in changedfiles:
853 if f in allfiles:
866 if f in allfiles:
854 to_lookup.append(f)
867 to_lookup.append(f)
855 else:
868 else:
856 to_drop.append(f)
869 to_drop.append(f)
857 else:
870 else:
858 changedfilesset = set(changedfiles)
871 changedfilesset = set(changedfiles)
859 to_lookup = changedfilesset & set(allfiles)
872 to_lookup = changedfilesset & set(allfiles)
860 to_drop = changedfilesset - to_lookup
873 to_drop = changedfilesset - to_lookup
861
874
862 if self._origpl is None:
875 if self._origpl is None:
863 self._origpl = self._pl
876 self._origpl = self._pl
864 self._map.setparents(parent, self._nodeconstants.nullid)
877 self._map.setparents(parent, self._nodeconstants.nullid)
865
878
866 for f in to_lookup:
879 for f in to_lookup:
867 if self.in_merge:
880 if self.in_merge:
868 self.set_tracked(f)
881 self.set_tracked(f)
869 else:
882 else:
870 self._map.reset_state(
883 self._map.reset_state(
871 f,
884 f,
872 wc_tracked=True,
885 wc_tracked=True,
873 p1_tracked=True,
886 p1_tracked=True,
874 )
887 )
875 for f in to_drop:
888 for f in to_drop:
876 self._map.reset_state(f)
889 self._map.reset_state(f)
877
890
878 self._dirty = True
891 self._dirty = True
879
892
880 def identity(self):
893 def identity(self):
881 """Return identity of dirstate itself to detect changing in storage
894 """Return identity of dirstate itself to detect changing in storage
882
895
883 If identity of previous dirstate is equal to this, writing
896 If identity of previous dirstate is equal to this, writing
884 changes based on the former dirstate out can keep consistency.
897 changes based on the former dirstate out can keep consistency.
885 """
898 """
886 return self._map.identity
899 return self._map.identity
887
900
888 def write(self, tr):
901 def write(self, tr):
889 if not self._dirty:
902 if not self._dirty:
890 return
903 return
891
904
892 write_key = self._use_tracked_hint and self._dirty_tracked_set
905 write_key = self._use_tracked_hint and self._dirty_tracked_set
893 if tr:
906 if tr:
894 # delay writing in-memory changes out
907 # delay writing in-memory changes out
895 tr.addfilegenerator(
908 tr.addfilegenerator(
896 b'dirstate-1-main',
909 b'dirstate-1-main',
897 (self._filename,),
910 (self._filename,),
898 lambda f: self._writedirstate(tr, f),
911 lambda f: self._writedirstate(tr, f),
899 location=b'plain',
912 location=b'plain',
900 post_finalize=True,
913 post_finalize=True,
901 )
914 )
902 if write_key:
915 if write_key:
903 tr.addfilegenerator(
916 tr.addfilegenerator(
904 b'dirstate-2-key-post',
917 b'dirstate-2-key-post',
905 (self._filename_th,),
918 (self._filename_th,),
906 lambda f: self._write_tracked_hint(tr, f),
919 lambda f: self._write_tracked_hint(tr, f),
907 location=b'plain',
920 location=b'plain',
908 post_finalize=True,
921 post_finalize=True,
909 )
922 )
910 return
923 return
911
924
912 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
925 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
913 with file(self._filename) as f:
926 with file(self._filename) as f:
914 self._writedirstate(tr, f)
927 self._writedirstate(tr, f)
915 if write_key:
928 if write_key:
916 # we update the key-file after writing to make sure reader have a
929 # we update the key-file after writing to make sure reader have a
917 # key that match the newly written content
930 # key that match the newly written content
918 with file(self._filename_th) as f:
931 with file(self._filename_th) as f:
919 self._write_tracked_hint(tr, f)
932 self._write_tracked_hint(tr, f)
920
933
921 def delete_tracked_hint(self):
934 def delete_tracked_hint(self):
922 """remove the tracked_hint file
935 """remove the tracked_hint file
923
936
924 To be used by format downgrades operation"""
937 To be used by format downgrades operation"""
925 self._opener.unlink(self._filename_th)
938 self._opener.unlink(self._filename_th)
926 self._use_tracked_hint = False
939 self._use_tracked_hint = False
927
940
928 def addparentchangecallback(self, category, callback):
941 def addparentchangecallback(self, category, callback):
929 """add a callback to be called when the wd parents are changed
942 """add a callback to be called when the wd parents are changed
930
943
931 Callback will be called with the following arguments:
944 Callback will be called with the following arguments:
932 dirstate, (oldp1, oldp2), (newp1, newp2)
945 dirstate, (oldp1, oldp2), (newp1, newp2)
933
946
934 Category is a unique identifier to allow overwriting an old callback
947 Category is a unique identifier to allow overwriting an old callback
935 with a newer callback.
948 with a newer callback.
936 """
949 """
937 self._plchangecallbacks[category] = callback
950 self._plchangecallbacks[category] = callback
938
951
939 def _writedirstate(self, tr, st):
952 def _writedirstate(self, tr, st):
940 # notify callbacks about parents change
953 # notify callbacks about parents change
941 if self._origpl is not None and self._origpl != self._pl:
954 if self._origpl is not None and self._origpl != self._pl:
942 for c, callback in sorted(self._plchangecallbacks.items()):
955 for c, callback in sorted(self._plchangecallbacks.items()):
943 callback(self, self._origpl, self._pl)
956 callback(self, self._origpl, self._pl)
944 self._origpl = None
957 self._origpl = None
945 self._map.write(tr, st)
958 self._map.write(tr, st)
946 self._dirty = False
959 self._dirty = False
947 self._dirty_tracked_set = False
960 self._dirty_tracked_set = False
948
961
949 def _write_tracked_hint(self, tr, f):
962 def _write_tracked_hint(self, tr, f):
950 key = node.hex(uuid.uuid4().bytes)
963 key = node.hex(uuid.uuid4().bytes)
951 f.write(b"1\n%s\n" % key) # 1 is the format version
964 f.write(b"1\n%s\n" % key) # 1 is the format version
952
965
953 def _dirignore(self, f):
966 def _dirignore(self, f):
954 if self._ignore(f):
967 if self._ignore(f):
955 return True
968 return True
956 for p in pathutil.finddirs(f):
969 for p in pathutil.finddirs(f):
957 if self._ignore(p):
970 if self._ignore(p):
958 return True
971 return True
959 return False
972 return False
960
973
961 def _ignorefiles(self):
974 def _ignorefiles(self):
962 files = []
975 files = []
963 if os.path.exists(self._join(b'.hgignore')):
976 if os.path.exists(self._join(b'.hgignore')):
964 files.append(self._join(b'.hgignore'))
977 files.append(self._join(b'.hgignore'))
965 for name, path in self._ui.configitems(b"ui"):
978 for name, path in self._ui.configitems(b"ui"):
966 if name == b'ignore' or name.startswith(b'ignore.'):
979 if name == b'ignore' or name.startswith(b'ignore.'):
967 # we need to use os.path.join here rather than self._join
980 # we need to use os.path.join here rather than self._join
968 # because path is arbitrary and user-specified
981 # because path is arbitrary and user-specified
969 files.append(os.path.join(self._rootdir, util.expandpath(path)))
982 files.append(os.path.join(self._rootdir, util.expandpath(path)))
970 return files
983 return files
971
984
972 def _ignorefileandline(self, f):
985 def _ignorefileandline(self, f):
973 files = collections.deque(self._ignorefiles())
986 files = collections.deque(self._ignorefiles())
974 visited = set()
987 visited = set()
975 while files:
988 while files:
976 i = files.popleft()
989 i = files.popleft()
977 patterns = matchmod.readpatternfile(
990 patterns = matchmod.readpatternfile(
978 i, self._ui.warn, sourceinfo=True
991 i, self._ui.warn, sourceinfo=True
979 )
992 )
980 for pattern, lineno, line in patterns:
993 for pattern, lineno, line in patterns:
981 kind, p = matchmod._patsplit(pattern, b'glob')
994 kind, p = matchmod._patsplit(pattern, b'glob')
982 if kind == b"subinclude":
995 if kind == b"subinclude":
983 if p not in visited:
996 if p not in visited:
984 files.append(p)
997 files.append(p)
985 continue
998 continue
986 m = matchmod.match(
999 m = matchmod.match(
987 self._root, b'', [], [pattern], warn=self._ui.warn
1000 self._root, b'', [], [pattern], warn=self._ui.warn
988 )
1001 )
989 if m(f):
1002 if m(f):
990 return (i, lineno, line)
1003 return (i, lineno, line)
991 visited.add(i)
1004 visited.add(i)
992 return (None, -1, b"")
1005 return (None, -1, b"")
993
1006
994 def _walkexplicit(self, match, subrepos):
1007 def _walkexplicit(self, match, subrepos):
995 """Get stat data about the files explicitly specified by match.
1008 """Get stat data about the files explicitly specified by match.
996
1009
997 Return a triple (results, dirsfound, dirsnotfound).
1010 Return a triple (results, dirsfound, dirsnotfound).
998 - results is a mapping from filename to stat result. It also contains
1011 - results is a mapping from filename to stat result. It also contains
999 listings mapping subrepos and .hg to None.
1012 listings mapping subrepos and .hg to None.
1000 - dirsfound is a list of files found to be directories.
1013 - dirsfound is a list of files found to be directories.
1001 - dirsnotfound is a list of files that the dirstate thinks are
1014 - dirsnotfound is a list of files that the dirstate thinks are
1002 directories and that were not found."""
1015 directories and that were not found."""
1003
1016
1004 def badtype(mode):
1017 def badtype(mode):
1005 kind = _(b'unknown')
1018 kind = _(b'unknown')
1006 if stat.S_ISCHR(mode):
1019 if stat.S_ISCHR(mode):
1007 kind = _(b'character device')
1020 kind = _(b'character device')
1008 elif stat.S_ISBLK(mode):
1021 elif stat.S_ISBLK(mode):
1009 kind = _(b'block device')
1022 kind = _(b'block device')
1010 elif stat.S_ISFIFO(mode):
1023 elif stat.S_ISFIFO(mode):
1011 kind = _(b'fifo')
1024 kind = _(b'fifo')
1012 elif stat.S_ISSOCK(mode):
1025 elif stat.S_ISSOCK(mode):
1013 kind = _(b'socket')
1026 kind = _(b'socket')
1014 elif stat.S_ISDIR(mode):
1027 elif stat.S_ISDIR(mode):
1015 kind = _(b'directory')
1028 kind = _(b'directory')
1016 return _(b'unsupported file type (type is %s)') % kind
1029 return _(b'unsupported file type (type is %s)') % kind
1017
1030
1018 badfn = match.bad
1031 badfn = match.bad
1019 dmap = self._map
1032 dmap = self._map
1020 lstat = os.lstat
1033 lstat = os.lstat
1021 getkind = stat.S_IFMT
1034 getkind = stat.S_IFMT
1022 dirkind = stat.S_IFDIR
1035 dirkind = stat.S_IFDIR
1023 regkind = stat.S_IFREG
1036 regkind = stat.S_IFREG
1024 lnkkind = stat.S_IFLNK
1037 lnkkind = stat.S_IFLNK
1025 join = self._join
1038 join = self._join
1026 dirsfound = []
1039 dirsfound = []
1027 foundadd = dirsfound.append
1040 foundadd = dirsfound.append
1028 dirsnotfound = []
1041 dirsnotfound = []
1029 notfoundadd = dirsnotfound.append
1042 notfoundadd = dirsnotfound.append
1030
1043
1031 if not match.isexact() and self._checkcase:
1044 if not match.isexact() and self._checkcase:
1032 normalize = self._normalize
1045 normalize = self._normalize
1033 else:
1046 else:
1034 normalize = None
1047 normalize = None
1035
1048
1036 files = sorted(match.files())
1049 files = sorted(match.files())
1037 subrepos.sort()
1050 subrepos.sort()
1038 i, j = 0, 0
1051 i, j = 0, 0
1039 while i < len(files) and j < len(subrepos):
1052 while i < len(files) and j < len(subrepos):
1040 subpath = subrepos[j] + b"/"
1053 subpath = subrepos[j] + b"/"
1041 if files[i] < subpath:
1054 if files[i] < subpath:
1042 i += 1
1055 i += 1
1043 continue
1056 continue
1044 while i < len(files) and files[i].startswith(subpath):
1057 while i < len(files) and files[i].startswith(subpath):
1045 del files[i]
1058 del files[i]
1046 j += 1
1059 j += 1
1047
1060
1048 if not files or b'' in files:
1061 if not files or b'' in files:
1049 files = [b'']
1062 files = [b'']
1050 # constructing the foldmap is expensive, so don't do it for the
1063 # constructing the foldmap is expensive, so don't do it for the
1051 # common case where files is ['']
1064 # common case where files is ['']
1052 normalize = None
1065 normalize = None
1053 results = dict.fromkeys(subrepos)
1066 results = dict.fromkeys(subrepos)
1054 results[b'.hg'] = None
1067 results[b'.hg'] = None
1055
1068
1056 for ff in files:
1069 for ff in files:
1057 if normalize:
1070 if normalize:
1058 nf = normalize(ff, False, True)
1071 nf = normalize(ff, False, True)
1059 else:
1072 else:
1060 nf = ff
1073 nf = ff
1061 if nf in results:
1074 if nf in results:
1062 continue
1075 continue
1063
1076
1064 try:
1077 try:
1065 st = lstat(join(nf))
1078 st = lstat(join(nf))
1066 kind = getkind(st.st_mode)
1079 kind = getkind(st.st_mode)
1067 if kind == dirkind:
1080 if kind == dirkind:
1068 if nf in dmap:
1081 if nf in dmap:
1069 # file replaced by dir on disk but still in dirstate
1082 # file replaced by dir on disk but still in dirstate
1070 results[nf] = None
1083 results[nf] = None
1071 foundadd((nf, ff))
1084 foundadd((nf, ff))
1072 elif kind == regkind or kind == lnkkind:
1085 elif kind == regkind or kind == lnkkind:
1073 results[nf] = st
1086 results[nf] = st
1074 else:
1087 else:
1075 badfn(ff, badtype(kind))
1088 badfn(ff, badtype(kind))
1076 if nf in dmap:
1089 if nf in dmap:
1077 results[nf] = None
1090 results[nf] = None
1078 except (OSError) as inst:
1091 except (OSError) as inst:
1079 # nf not found on disk - it is dirstate only
1092 # nf not found on disk - it is dirstate only
1080 if nf in dmap: # does it exactly match a missing file?
1093 if nf in dmap: # does it exactly match a missing file?
1081 results[nf] = None
1094 results[nf] = None
1082 else: # does it match a missing directory?
1095 else: # does it match a missing directory?
1083 if self._map.hasdir(nf):
1096 if self._map.hasdir(nf):
1084 notfoundadd(nf)
1097 notfoundadd(nf)
1085 else:
1098 else:
1086 badfn(ff, encoding.strtolocal(inst.strerror))
1099 badfn(ff, encoding.strtolocal(inst.strerror))
1087
1100
1088 # match.files() may contain explicitly-specified paths that shouldn't
1101 # match.files() may contain explicitly-specified paths that shouldn't
1089 # be taken; drop them from the list of files found. dirsfound/notfound
1102 # be taken; drop them from the list of files found. dirsfound/notfound
1090 # aren't filtered here because they will be tested later.
1103 # aren't filtered here because they will be tested later.
1091 if match.anypats():
1104 if match.anypats():
1092 for f in list(results):
1105 for f in list(results):
1093 if f == b'.hg' or f in subrepos:
1106 if f == b'.hg' or f in subrepos:
1094 # keep sentinel to disable further out-of-repo walks
1107 # keep sentinel to disable further out-of-repo walks
1095 continue
1108 continue
1096 if not match(f):
1109 if not match(f):
1097 del results[f]
1110 del results[f]
1098
1111
1099 # Case insensitive filesystems cannot rely on lstat() failing to detect
1112 # Case insensitive filesystems cannot rely on lstat() failing to detect
1100 # a case-only rename. Prune the stat object for any file that does not
1113 # a case-only rename. Prune the stat object for any file that does not
1101 # match the case in the filesystem, if there are multiple files that
1114 # match the case in the filesystem, if there are multiple files that
1102 # normalize to the same path.
1115 # normalize to the same path.
1103 if match.isexact() and self._checkcase:
1116 if match.isexact() and self._checkcase:
1104 normed = {}
1117 normed = {}
1105
1118
1106 for f, st in results.items():
1119 for f, st in results.items():
1107 if st is None:
1120 if st is None:
1108 continue
1121 continue
1109
1122
1110 nc = util.normcase(f)
1123 nc = util.normcase(f)
1111 paths = normed.get(nc)
1124 paths = normed.get(nc)
1112
1125
1113 if paths is None:
1126 if paths is None:
1114 paths = set()
1127 paths = set()
1115 normed[nc] = paths
1128 normed[nc] = paths
1116
1129
1117 paths.add(f)
1130 paths.add(f)
1118
1131
1119 for norm, paths in normed.items():
1132 for norm, paths in normed.items():
1120 if len(paths) > 1:
1133 if len(paths) > 1:
1121 for path in paths:
1134 for path in paths:
1122 folded = self._discoverpath(
1135 folded = self._discoverpath(
1123 path, norm, True, None, self._map.dirfoldmap
1136 path, norm, True, None, self._map.dirfoldmap
1124 )
1137 )
1125 if path != folded:
1138 if path != folded:
1126 results[path] = None
1139 results[path] = None
1127
1140
1128 return results, dirsfound, dirsnotfound
1141 return results, dirsfound, dirsnotfound
1129
1142
1130 def walk(self, match, subrepos, unknown, ignored, full=True):
1143 def walk(self, match, subrepos, unknown, ignored, full=True):
1131 """
1144 """
1132 Walk recursively through the directory tree, finding all files
1145 Walk recursively through the directory tree, finding all files
1133 matched by match.
1146 matched by match.
1134
1147
1135 If full is False, maybe skip some known-clean files.
1148 If full is False, maybe skip some known-clean files.
1136
1149
1137 Return a dict mapping filename to stat-like object (either
1150 Return a dict mapping filename to stat-like object (either
1138 mercurial.osutil.stat instance or return value of os.stat()).
1151 mercurial.osutil.stat instance or return value of os.stat()).
1139
1152
1140 """
1153 """
1141 # full is a flag that extensions that hook into walk can use -- this
1154 # full is a flag that extensions that hook into walk can use -- this
1142 # implementation doesn't use it at all. This satisfies the contract
1155 # implementation doesn't use it at all. This satisfies the contract
1143 # because we only guarantee a "maybe".
1156 # because we only guarantee a "maybe".
1144
1157
1145 if ignored:
1158 if ignored:
1146 ignore = util.never
1159 ignore = util.never
1147 dirignore = util.never
1160 dirignore = util.never
1148 elif unknown:
1161 elif unknown:
1149 ignore = self._ignore
1162 ignore = self._ignore
1150 dirignore = self._dirignore
1163 dirignore = self._dirignore
1151 else:
1164 else:
1152 # if not unknown and not ignored, drop dir recursion and step 2
1165 # if not unknown and not ignored, drop dir recursion and step 2
1153 ignore = util.always
1166 ignore = util.always
1154 dirignore = util.always
1167 dirignore = util.always
1155
1168
1156 if self._sparsematchfn is not None:
1169 if self._sparsematchfn is not None:
1157 em = matchmod.exact(match.files())
1170 em = matchmod.exact(match.files())
1158 sm = matchmod.unionmatcher([self._sparsematcher, em])
1171 sm = matchmod.unionmatcher([self._sparsematcher, em])
1159 match = matchmod.intersectmatchers(match, sm)
1172 match = matchmod.intersectmatchers(match, sm)
1160
1173
1161 matchfn = match.matchfn
1174 matchfn = match.matchfn
1162 matchalways = match.always()
1175 matchalways = match.always()
1163 matchtdir = match.traversedir
1176 matchtdir = match.traversedir
1164 dmap = self._map
1177 dmap = self._map
1165 listdir = util.listdir
1178 listdir = util.listdir
1166 lstat = os.lstat
1179 lstat = os.lstat
1167 dirkind = stat.S_IFDIR
1180 dirkind = stat.S_IFDIR
1168 regkind = stat.S_IFREG
1181 regkind = stat.S_IFREG
1169 lnkkind = stat.S_IFLNK
1182 lnkkind = stat.S_IFLNK
1170 join = self._join
1183 join = self._join
1171
1184
1172 exact = skipstep3 = False
1185 exact = skipstep3 = False
1173 if match.isexact(): # match.exact
1186 if match.isexact(): # match.exact
1174 exact = True
1187 exact = True
1175 dirignore = util.always # skip step 2
1188 dirignore = util.always # skip step 2
1176 elif match.prefix(): # match.match, no patterns
1189 elif match.prefix(): # match.match, no patterns
1177 skipstep3 = True
1190 skipstep3 = True
1178
1191
1179 if not exact and self._checkcase:
1192 if not exact and self._checkcase:
1180 normalize = self._normalize
1193 normalize = self._normalize
1181 normalizefile = self._normalizefile
1194 normalizefile = self._normalizefile
1182 skipstep3 = False
1195 skipstep3 = False
1183 else:
1196 else:
1184 normalize = self._normalize
1197 normalize = self._normalize
1185 normalizefile = None
1198 normalizefile = None
1186
1199
1187 # step 1: find all explicit files
1200 # step 1: find all explicit files
1188 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1201 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1189 if matchtdir:
1202 if matchtdir:
1190 for d in work:
1203 for d in work:
1191 matchtdir(d[0])
1204 matchtdir(d[0])
1192 for d in dirsnotfound:
1205 for d in dirsnotfound:
1193 matchtdir(d)
1206 matchtdir(d)
1194
1207
1195 skipstep3 = skipstep3 and not (work or dirsnotfound)
1208 skipstep3 = skipstep3 and not (work or dirsnotfound)
1196 work = [d for d in work if not dirignore(d[0])]
1209 work = [d for d in work if not dirignore(d[0])]
1197
1210
1198 # step 2: visit subdirectories
1211 # step 2: visit subdirectories
1199 def traverse(work, alreadynormed):
1212 def traverse(work, alreadynormed):
1200 wadd = work.append
1213 wadd = work.append
1201 while work:
1214 while work:
1202 tracing.counter('dirstate.walk work', len(work))
1215 tracing.counter('dirstate.walk work', len(work))
1203 nd = work.pop()
1216 nd = work.pop()
1204 visitentries = match.visitchildrenset(nd)
1217 visitentries = match.visitchildrenset(nd)
1205 if not visitentries:
1218 if not visitentries:
1206 continue
1219 continue
1207 if visitentries == b'this' or visitentries == b'all':
1220 if visitentries == b'this' or visitentries == b'all':
1208 visitentries = None
1221 visitentries = None
1209 skip = None
1222 skip = None
1210 if nd != b'':
1223 if nd != b'':
1211 skip = b'.hg'
1224 skip = b'.hg'
1212 try:
1225 try:
1213 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1226 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1214 entries = listdir(join(nd), stat=True, skip=skip)
1227 entries = listdir(join(nd), stat=True, skip=skip)
1215 except (PermissionError, FileNotFoundError) as inst:
1228 except (PermissionError, FileNotFoundError) as inst:
1216 match.bad(
1229 match.bad(
1217 self.pathto(nd), encoding.strtolocal(inst.strerror)
1230 self.pathto(nd), encoding.strtolocal(inst.strerror)
1218 )
1231 )
1219 continue
1232 continue
1220 for f, kind, st in entries:
1233 for f, kind, st in entries:
1221 # Some matchers may return files in the visitentries set,
1234 # Some matchers may return files in the visitentries set,
1222 # instead of 'this', if the matcher explicitly mentions them
1235 # instead of 'this', if the matcher explicitly mentions them
1223 # and is not an exactmatcher. This is acceptable; we do not
1236 # and is not an exactmatcher. This is acceptable; we do not
1224 # make any hard assumptions about file-or-directory below
1237 # make any hard assumptions about file-or-directory below
1225 # based on the presence of `f` in visitentries. If
1238 # based on the presence of `f` in visitentries. If
1226 # visitchildrenset returned a set, we can always skip the
1239 # visitchildrenset returned a set, we can always skip the
1227 # entries *not* in the set it provided regardless of whether
1240 # entries *not* in the set it provided regardless of whether
1228 # they're actually a file or a directory.
1241 # they're actually a file or a directory.
1229 if visitentries and f not in visitentries:
1242 if visitentries and f not in visitentries:
1230 continue
1243 continue
1231 if normalizefile:
1244 if normalizefile:
1232 # even though f might be a directory, we're only
1245 # even though f might be a directory, we're only
1233 # interested in comparing it to files currently in the
1246 # interested in comparing it to files currently in the
1234 # dmap -- therefore normalizefile is enough
1247 # dmap -- therefore normalizefile is enough
1235 nf = normalizefile(
1248 nf = normalizefile(
1236 nd and (nd + b"/" + f) or f, True, True
1249 nd and (nd + b"/" + f) or f, True, True
1237 )
1250 )
1238 else:
1251 else:
1239 nf = nd and (nd + b"/" + f) or f
1252 nf = nd and (nd + b"/" + f) or f
1240 if nf not in results:
1253 if nf not in results:
1241 if kind == dirkind:
1254 if kind == dirkind:
1242 if not ignore(nf):
1255 if not ignore(nf):
1243 if matchtdir:
1256 if matchtdir:
1244 matchtdir(nf)
1257 matchtdir(nf)
1245 wadd(nf)
1258 wadd(nf)
1246 if nf in dmap and (matchalways or matchfn(nf)):
1259 if nf in dmap and (matchalways or matchfn(nf)):
1247 results[nf] = None
1260 results[nf] = None
1248 elif kind == regkind or kind == lnkkind:
1261 elif kind == regkind or kind == lnkkind:
1249 if nf in dmap:
1262 if nf in dmap:
1250 if matchalways or matchfn(nf):
1263 if matchalways or matchfn(nf):
1251 results[nf] = st
1264 results[nf] = st
1252 elif (matchalways or matchfn(nf)) and not ignore(
1265 elif (matchalways or matchfn(nf)) and not ignore(
1253 nf
1266 nf
1254 ):
1267 ):
1255 # unknown file -- normalize if necessary
1268 # unknown file -- normalize if necessary
1256 if not alreadynormed:
1269 if not alreadynormed:
1257 nf = normalize(nf, False, True)
1270 nf = normalize(nf, False, True)
1258 results[nf] = st
1271 results[nf] = st
1259 elif nf in dmap and (matchalways or matchfn(nf)):
1272 elif nf in dmap and (matchalways or matchfn(nf)):
1260 results[nf] = None
1273 results[nf] = None
1261
1274
1262 for nd, d in work:
1275 for nd, d in work:
1263 # alreadynormed means that processwork doesn't have to do any
1276 # alreadynormed means that processwork doesn't have to do any
1264 # expensive directory normalization
1277 # expensive directory normalization
1265 alreadynormed = not normalize or nd == d
1278 alreadynormed = not normalize or nd == d
1266 traverse([d], alreadynormed)
1279 traverse([d], alreadynormed)
1267
1280
1268 for s in subrepos:
1281 for s in subrepos:
1269 del results[s]
1282 del results[s]
1270 del results[b'.hg']
1283 del results[b'.hg']
1271
1284
1272 # step 3: visit remaining files from dmap
1285 # step 3: visit remaining files from dmap
1273 if not skipstep3 and not exact:
1286 if not skipstep3 and not exact:
1274 # If a dmap file is not in results yet, it was either
1287 # If a dmap file is not in results yet, it was either
1275 # a) not matching matchfn b) ignored, c) missing, or d) under a
1288 # a) not matching matchfn b) ignored, c) missing, or d) under a
1276 # symlink directory.
1289 # symlink directory.
1277 if not results and matchalways:
1290 if not results and matchalways:
1278 visit = [f for f in dmap]
1291 visit = [f for f in dmap]
1279 else:
1292 else:
1280 visit = [f for f in dmap if f not in results and matchfn(f)]
1293 visit = [f for f in dmap if f not in results and matchfn(f)]
1281 visit.sort()
1294 visit.sort()
1282
1295
1283 if unknown:
1296 if unknown:
1284 # unknown == True means we walked all dirs under the roots
1297 # unknown == True means we walked all dirs under the roots
1285 # that wasn't ignored, and everything that matched was stat'ed
1298 # that wasn't ignored, and everything that matched was stat'ed
1286 # and is already in results.
1299 # and is already in results.
1287 # The rest must thus be ignored or under a symlink.
1300 # The rest must thus be ignored or under a symlink.
1288 audit_path = pathutil.pathauditor(self._root, cached=True)
1301 audit_path = pathutil.pathauditor(self._root, cached=True)
1289
1302
1290 for nf in iter(visit):
1303 for nf in iter(visit):
1291 # If a stat for the same file was already added with a
1304 # If a stat for the same file was already added with a
1292 # different case, don't add one for this, since that would
1305 # different case, don't add one for this, since that would
1293 # make it appear as if the file exists under both names
1306 # make it appear as if the file exists under both names
1294 # on disk.
1307 # on disk.
1295 if (
1308 if (
1296 normalizefile
1309 normalizefile
1297 and normalizefile(nf, True, True) in results
1310 and normalizefile(nf, True, True) in results
1298 ):
1311 ):
1299 results[nf] = None
1312 results[nf] = None
1300 # Report ignored items in the dmap as long as they are not
1313 # Report ignored items in the dmap as long as they are not
1301 # under a symlink directory.
1314 # under a symlink directory.
1302 elif audit_path.check(nf):
1315 elif audit_path.check(nf):
1303 try:
1316 try:
1304 results[nf] = lstat(join(nf))
1317 results[nf] = lstat(join(nf))
1305 # file was just ignored, no links, and exists
1318 # file was just ignored, no links, and exists
1306 except OSError:
1319 except OSError:
1307 # file doesn't exist
1320 # file doesn't exist
1308 results[nf] = None
1321 results[nf] = None
1309 else:
1322 else:
1310 # It's either missing or under a symlink directory
1323 # It's either missing or under a symlink directory
1311 # which we in this case report as missing
1324 # which we in this case report as missing
1312 results[nf] = None
1325 results[nf] = None
1313 else:
1326 else:
1314 # We may not have walked the full directory tree above,
1327 # We may not have walked the full directory tree above,
1315 # so stat and check everything we missed.
1328 # so stat and check everything we missed.
1316 iv = iter(visit)
1329 iv = iter(visit)
1317 for st in util.statfiles([join(i) for i in visit]):
1330 for st in util.statfiles([join(i) for i in visit]):
1318 results[next(iv)] = st
1331 results[next(iv)] = st
1319 return results
1332 return results
1320
1333
1321 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1334 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1322 if self._sparsematchfn is not None:
1335 if self._sparsematchfn is not None:
1323 em = matchmod.exact(matcher.files())
1336 em = matchmod.exact(matcher.files())
1324 sm = matchmod.unionmatcher([self._sparsematcher, em])
1337 sm = matchmod.unionmatcher([self._sparsematcher, em])
1325 matcher = matchmod.intersectmatchers(matcher, sm)
1338 matcher = matchmod.intersectmatchers(matcher, sm)
1326 # Force Rayon (Rust parallelism library) to respect the number of
1339 # Force Rayon (Rust parallelism library) to respect the number of
1327 # workers. This is a temporary workaround until Rust code knows
1340 # workers. This is a temporary workaround until Rust code knows
1328 # how to read the config file.
1341 # how to read the config file.
1329 numcpus = self._ui.configint(b"worker", b"numcpus")
1342 numcpus = self._ui.configint(b"worker", b"numcpus")
1330 if numcpus is not None:
1343 if numcpus is not None:
1331 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1344 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1332
1345
1333 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1346 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1334 if not workers_enabled:
1347 if not workers_enabled:
1335 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1348 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1336
1349
1337 (
1350 (
1338 lookup,
1351 lookup,
1339 modified,
1352 modified,
1340 added,
1353 added,
1341 removed,
1354 removed,
1342 deleted,
1355 deleted,
1343 clean,
1356 clean,
1344 ignored,
1357 ignored,
1345 unknown,
1358 unknown,
1346 warnings,
1359 warnings,
1347 bad,
1360 bad,
1348 traversed,
1361 traversed,
1349 dirty,
1362 dirty,
1350 ) = rustmod.status(
1363 ) = rustmod.status(
1351 self._map._map,
1364 self._map._map,
1352 matcher,
1365 matcher,
1353 self._rootdir,
1366 self._rootdir,
1354 self._ignorefiles(),
1367 self._ignorefiles(),
1355 self._checkexec,
1368 self._checkexec,
1356 bool(list_clean),
1369 bool(list_clean),
1357 bool(list_ignored),
1370 bool(list_ignored),
1358 bool(list_unknown),
1371 bool(list_unknown),
1359 bool(matcher.traversedir),
1372 bool(matcher.traversedir),
1360 )
1373 )
1361
1374
1362 self._dirty |= dirty
1375 self._dirty |= dirty
1363
1376
1364 if matcher.traversedir:
1377 if matcher.traversedir:
1365 for dir in traversed:
1378 for dir in traversed:
1366 matcher.traversedir(dir)
1379 matcher.traversedir(dir)
1367
1380
1368 if self._ui.warn:
1381 if self._ui.warn:
1369 for item in warnings:
1382 for item in warnings:
1370 if isinstance(item, tuple):
1383 if isinstance(item, tuple):
1371 file_path, syntax = item
1384 file_path, syntax = item
1372 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1385 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1373 file_path,
1386 file_path,
1374 syntax,
1387 syntax,
1375 )
1388 )
1376 self._ui.warn(msg)
1389 self._ui.warn(msg)
1377 else:
1390 else:
1378 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1391 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1379 self._ui.warn(
1392 self._ui.warn(
1380 msg
1393 msg
1381 % (
1394 % (
1382 pathutil.canonpath(
1395 pathutil.canonpath(
1383 self._rootdir, self._rootdir, item
1396 self._rootdir, self._rootdir, item
1384 ),
1397 ),
1385 b"No such file or directory",
1398 b"No such file or directory",
1386 )
1399 )
1387 )
1400 )
1388
1401
1389 for fn, message in bad:
1402 for fn, message in bad:
1390 matcher.bad(fn, encoding.strtolocal(message))
1403 matcher.bad(fn, encoding.strtolocal(message))
1391
1404
1392 status = scmutil.status(
1405 status = scmutil.status(
1393 modified=modified,
1406 modified=modified,
1394 added=added,
1407 added=added,
1395 removed=removed,
1408 removed=removed,
1396 deleted=deleted,
1409 deleted=deleted,
1397 unknown=unknown,
1410 unknown=unknown,
1398 ignored=ignored,
1411 ignored=ignored,
1399 clean=clean,
1412 clean=clean,
1400 )
1413 )
1401 return (lookup, status)
1414 return (lookup, status)
1402
1415
1403 def status(self, match, subrepos, ignored, clean, unknown):
1416 def status(self, match, subrepos, ignored, clean, unknown):
1404 """Determine the status of the working copy relative to the
1417 """Determine the status of the working copy relative to the
1405 dirstate and return a pair of (unsure, status), where status is of type
1418 dirstate and return a pair of (unsure, status), where status is of type
1406 scmutil.status and:
1419 scmutil.status and:
1407
1420
1408 unsure:
1421 unsure:
1409 files that might have been modified since the dirstate was
1422 files that might have been modified since the dirstate was
1410 written, but need to be read to be sure (size is the same
1423 written, but need to be read to be sure (size is the same
1411 but mtime differs)
1424 but mtime differs)
1412 status.modified:
1425 status.modified:
1413 files that have definitely been modified since the dirstate
1426 files that have definitely been modified since the dirstate
1414 was written (different size or mode)
1427 was written (different size or mode)
1415 status.clean:
1428 status.clean:
1416 files that have definitely not been modified since the
1429 files that have definitely not been modified since the
1417 dirstate was written
1430 dirstate was written
1418 """
1431 """
1419 listignored, listclean, listunknown = ignored, clean, unknown
1432 listignored, listclean, listunknown = ignored, clean, unknown
1420 lookup, modified, added, unknown, ignored = [], [], [], [], []
1433 lookup, modified, added, unknown, ignored = [], [], [], [], []
1421 removed, deleted, clean = [], [], []
1434 removed, deleted, clean = [], [], []
1422
1435
1423 dmap = self._map
1436 dmap = self._map
1424 dmap.preload()
1437 dmap.preload()
1425
1438
1426 use_rust = True
1439 use_rust = True
1427
1440
1428 allowed_matchers = (
1441 allowed_matchers = (
1429 matchmod.alwaysmatcher,
1442 matchmod.alwaysmatcher,
1430 matchmod.differencematcher,
1443 matchmod.differencematcher,
1431 matchmod.exactmatcher,
1444 matchmod.exactmatcher,
1432 matchmod.includematcher,
1445 matchmod.includematcher,
1433 matchmod.intersectionmatcher,
1446 matchmod.intersectionmatcher,
1434 matchmod.nevermatcher,
1447 matchmod.nevermatcher,
1435 matchmod.unionmatcher,
1448 matchmod.unionmatcher,
1436 )
1449 )
1437
1450
1438 if rustmod is None:
1451 if rustmod is None:
1439 use_rust = False
1452 use_rust = False
1440 elif self._checkcase:
1453 elif self._checkcase:
1441 # Case-insensitive filesystems are not handled yet
1454 # Case-insensitive filesystems are not handled yet
1442 use_rust = False
1455 use_rust = False
1443 elif subrepos:
1456 elif subrepos:
1444 use_rust = False
1457 use_rust = False
1445 elif not isinstance(match, allowed_matchers):
1458 elif not isinstance(match, allowed_matchers):
1446 # Some matchers have yet to be implemented
1459 # Some matchers have yet to be implemented
1447 use_rust = False
1460 use_rust = False
1448
1461
1449 # Get the time from the filesystem so we can disambiguate files that
1462 # Get the time from the filesystem so we can disambiguate files that
1450 # appear modified in the present or future.
1463 # appear modified in the present or future.
1451 try:
1464 try:
1452 mtime_boundary = timestamp.get_fs_now(self._opener)
1465 mtime_boundary = timestamp.get_fs_now(self._opener)
1453 except OSError:
1466 except OSError:
1454 # In largefiles or readonly context
1467 # In largefiles or readonly context
1455 mtime_boundary = None
1468 mtime_boundary = None
1456
1469
1457 if use_rust:
1470 if use_rust:
1458 try:
1471 try:
1459 res = self._rust_status(
1472 res = self._rust_status(
1460 match, listclean, listignored, listunknown
1473 match, listclean, listignored, listunknown
1461 )
1474 )
1462 return res + (mtime_boundary,)
1475 return res + (mtime_boundary,)
1463 except rustmod.FallbackError:
1476 except rustmod.FallbackError:
1464 pass
1477 pass
1465
1478
1466 def noop(f):
1479 def noop(f):
1467 pass
1480 pass
1468
1481
1469 dcontains = dmap.__contains__
1482 dcontains = dmap.__contains__
1470 dget = dmap.__getitem__
1483 dget = dmap.__getitem__
1471 ladd = lookup.append # aka "unsure"
1484 ladd = lookup.append # aka "unsure"
1472 madd = modified.append
1485 madd = modified.append
1473 aadd = added.append
1486 aadd = added.append
1474 uadd = unknown.append if listunknown else noop
1487 uadd = unknown.append if listunknown else noop
1475 iadd = ignored.append if listignored else noop
1488 iadd = ignored.append if listignored else noop
1476 radd = removed.append
1489 radd = removed.append
1477 dadd = deleted.append
1490 dadd = deleted.append
1478 cadd = clean.append if listclean else noop
1491 cadd = clean.append if listclean else noop
1479 mexact = match.exact
1492 mexact = match.exact
1480 dirignore = self._dirignore
1493 dirignore = self._dirignore
1481 checkexec = self._checkexec
1494 checkexec = self._checkexec
1482 checklink = self._checklink
1495 checklink = self._checklink
1483 copymap = self._map.copymap
1496 copymap = self._map.copymap
1484
1497
1485 # We need to do full walks when either
1498 # We need to do full walks when either
1486 # - we're listing all clean files, or
1499 # - we're listing all clean files, or
1487 # - match.traversedir does something, because match.traversedir should
1500 # - match.traversedir does something, because match.traversedir should
1488 # be called for every dir in the working dir
1501 # be called for every dir in the working dir
1489 full = listclean or match.traversedir is not None
1502 full = listclean or match.traversedir is not None
1490 for fn, st in self.walk(
1503 for fn, st in self.walk(
1491 match, subrepos, listunknown, listignored, full=full
1504 match, subrepos, listunknown, listignored, full=full
1492 ).items():
1505 ).items():
1493 if not dcontains(fn):
1506 if not dcontains(fn):
1494 if (listignored or mexact(fn)) and dirignore(fn):
1507 if (listignored or mexact(fn)) and dirignore(fn):
1495 if listignored:
1508 if listignored:
1496 iadd(fn)
1509 iadd(fn)
1497 else:
1510 else:
1498 uadd(fn)
1511 uadd(fn)
1499 continue
1512 continue
1500
1513
1501 t = dget(fn)
1514 t = dget(fn)
1502 mode = t.mode
1515 mode = t.mode
1503 size = t.size
1516 size = t.size
1504
1517
1505 if not st and t.tracked:
1518 if not st and t.tracked:
1506 dadd(fn)
1519 dadd(fn)
1507 elif t.p2_info:
1520 elif t.p2_info:
1508 madd(fn)
1521 madd(fn)
1509 elif t.added:
1522 elif t.added:
1510 aadd(fn)
1523 aadd(fn)
1511 elif t.removed:
1524 elif t.removed:
1512 radd(fn)
1525 radd(fn)
1513 elif t.tracked:
1526 elif t.tracked:
1514 if not checklink and t.has_fallback_symlink:
1527 if not checklink and t.has_fallback_symlink:
1515 # If the file system does not support symlink, the mode
1528 # If the file system does not support symlink, the mode
1516 # might not be correctly stored in the dirstate, so do not
1529 # might not be correctly stored in the dirstate, so do not
1517 # trust it.
1530 # trust it.
1518 ladd(fn)
1531 ladd(fn)
1519 elif not checkexec and t.has_fallback_exec:
1532 elif not checkexec and t.has_fallback_exec:
1520 # If the file system does not support exec bits, the mode
1533 # If the file system does not support exec bits, the mode
1521 # might not be correctly stored in the dirstate, so do not
1534 # might not be correctly stored in the dirstate, so do not
1522 # trust it.
1535 # trust it.
1523 ladd(fn)
1536 ladd(fn)
1524 elif (
1537 elif (
1525 size >= 0
1538 size >= 0
1526 and (
1539 and (
1527 (size != st.st_size and size != st.st_size & _rangemask)
1540 (size != st.st_size and size != st.st_size & _rangemask)
1528 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1541 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1529 )
1542 )
1530 or fn in copymap
1543 or fn in copymap
1531 ):
1544 ):
1532 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1545 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1533 # issue6456: Size returned may be longer due to
1546 # issue6456: Size returned may be longer due to
1534 # encryption on EXT-4 fscrypt, undecided.
1547 # encryption on EXT-4 fscrypt, undecided.
1535 ladd(fn)
1548 ladd(fn)
1536 else:
1549 else:
1537 madd(fn)
1550 madd(fn)
1538 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1551 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1539 # There might be a change in the future if for example the
1552 # There might be a change in the future if for example the
1540 # internal clock is off, but this is a case where the issues
1553 # internal clock is off, but this is a case where the issues
1541 # the user would face would be a lot worse and there is
1554 # the user would face would be a lot worse and there is
1542 # nothing we can really do.
1555 # nothing we can really do.
1543 ladd(fn)
1556 ladd(fn)
1544 elif listclean:
1557 elif listclean:
1545 cadd(fn)
1558 cadd(fn)
1546 status = scmutil.status(
1559 status = scmutil.status(
1547 modified, added, removed, deleted, unknown, ignored, clean
1560 modified, added, removed, deleted, unknown, ignored, clean
1548 )
1561 )
1549 return (lookup, status, mtime_boundary)
1562 return (lookup, status, mtime_boundary)
1550
1563
1551 def matches(self, match):
1564 def matches(self, match):
1552 """
1565 """
1553 return files in the dirstate (in whatever state) filtered by match
1566 return files in the dirstate (in whatever state) filtered by match
1554 """
1567 """
1555 dmap = self._map
1568 dmap = self._map
1556 if rustmod is not None:
1569 if rustmod is not None:
1557 dmap = self._map._map
1570 dmap = self._map._map
1558
1571
1559 if match.always():
1572 if match.always():
1560 return dmap.keys()
1573 return dmap.keys()
1561 files = match.files()
1574 files = match.files()
1562 if match.isexact():
1575 if match.isexact():
1563 # fast path -- filter the other way around, since typically files is
1576 # fast path -- filter the other way around, since typically files is
1564 # much smaller than dmap
1577 # much smaller than dmap
1565 return [f for f in files if f in dmap]
1578 return [f for f in files if f in dmap]
1566 if match.prefix() and all(fn in dmap for fn in files):
1579 if match.prefix() and all(fn in dmap for fn in files):
1567 # fast path -- all the values are known to be files, so just return
1580 # fast path -- all the values are known to be files, so just return
1568 # that
1581 # that
1569 return list(files)
1582 return list(files)
1570 return [f for f in dmap if match(f)]
1583 return [f for f in dmap if match(f)]
1571
1584
1572 def _actualfilename(self, tr):
1585 def _actualfilename(self, tr):
1573 if tr:
1586 if tr:
1574 return self._pendingfilename
1587 return self._pendingfilename
1575 else:
1588 else:
1576 return self._filename
1589 return self._filename
1577
1590
1578 def data_backup_filename(self, backupname):
1591 def data_backup_filename(self, backupname):
1579 if not self._use_dirstate_v2:
1592 if not self._use_dirstate_v2:
1580 return None
1593 return None
1581 return backupname + b'.v2-data'
1594 return backupname + b'.v2-data'
1582
1595
1583 def _new_backup_data_filename(self, backupname):
1596 def _new_backup_data_filename(self, backupname):
1584 """return a filename to backup a data-file or None"""
1597 """return a filename to backup a data-file or None"""
1585 if not self._use_dirstate_v2:
1598 if not self._use_dirstate_v2:
1586 return None
1599 return None
1587 if self._map.docket.uuid is None:
1600 if self._map.docket.uuid is None:
1588 # not created yet, nothing to backup
1601 # not created yet, nothing to backup
1589 return None
1602 return None
1590 data_filename = self._map.docket.data_filename()
1603 data_filename = self._map.docket.data_filename()
1591 return data_filename, self.data_backup_filename(backupname)
1604 return data_filename, self.data_backup_filename(backupname)
1592
1605
1593 def backup_data_file(self, backupname):
1606 def backup_data_file(self, backupname):
1594 if not self._use_dirstate_v2:
1607 if not self._use_dirstate_v2:
1595 return None
1608 return None
1596 docket = docketmod.DirstateDocket.parse(
1609 docket = docketmod.DirstateDocket.parse(
1597 self._opener.read(backupname),
1610 self._opener.read(backupname),
1598 self._nodeconstants,
1611 self._nodeconstants,
1599 )
1612 )
1600 return self.data_backup_filename(backupname), docket.data_filename()
1613 return self.data_backup_filename(backupname), docket.data_filename()
1601
1614
1602 def savebackup(self, tr, backupname):
1615 def savebackup(self, tr, backupname):
1603 '''Save current dirstate into backup file'''
1616 '''Save current dirstate into backup file'''
1604 filename = self._actualfilename(tr)
1617 filename = self._actualfilename(tr)
1605 assert backupname != filename
1618 assert backupname != filename
1606
1619
1607 # use '_writedirstate' instead of 'write' to write changes certainly,
1620 # use '_writedirstate' instead of 'write' to write changes certainly,
1608 # because the latter omits writing out if transaction is running.
1621 # because the latter omits writing out if transaction is running.
1609 # output file will be used to create backup of dirstate at this point.
1622 # output file will be used to create backup of dirstate at this point.
1610 if self._dirty:
1623 if self._dirty:
1611 self._writedirstate(
1624 self._writedirstate(
1612 tr,
1625 tr,
1613 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1626 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1614 )
1627 )
1615
1628
1616 if tr:
1629 if tr:
1617 # ensure that subsequent tr.writepending returns True for
1630 # ensure that subsequent tr.writepending returns True for
1618 # changes written out above, even if dirstate is never
1631 # changes written out above, even if dirstate is never
1619 # changed after this
1632 # changed after this
1620 tr.addfilegenerator(
1633 tr.addfilegenerator(
1621 b'dirstate-1-main',
1634 b'dirstate-1-main',
1622 (self._filename,),
1635 (self._filename,),
1623 lambda f: self._writedirstate(tr, f),
1636 lambda f: self._writedirstate(tr, f),
1624 location=b'plain',
1637 location=b'plain',
1625 post_finalize=True,
1638 post_finalize=True,
1626 )
1639 )
1627
1640
1628 self._opener.tryunlink(backupname)
1641 self._opener.tryunlink(backupname)
1629 if self._opener.exists(filename):
1642 if self._opener.exists(filename):
1630 # hardlink backup is okay because _writedirstate is always called
1643 # hardlink backup is okay because _writedirstate is always called
1631 # with an "atomictemp=True" file.
1644 # with an "atomictemp=True" file.
1632 util.copyfile(
1645 util.copyfile(
1633 self._opener.join(filename),
1646 self._opener.join(filename),
1634 self._opener.join(backupname),
1647 self._opener.join(backupname),
1635 hardlink=True,
1648 hardlink=True,
1636 )
1649 )
1637 data_pair = self._new_backup_data_filename(backupname)
1650 data_pair = self._new_backup_data_filename(backupname)
1638 if data_pair is not None:
1651 if data_pair is not None:
1639 data_filename, bck_data_filename = data_pair
1652 data_filename, bck_data_filename = data_pair
1640 util.copyfile(
1653 util.copyfile(
1641 self._opener.join(data_filename),
1654 self._opener.join(data_filename),
1642 self._opener.join(bck_data_filename),
1655 self._opener.join(bck_data_filename),
1643 hardlink=True,
1656 hardlink=True,
1644 )
1657 )
1645 if tr is not None:
1658 if tr is not None:
1646 # ensure that pending file written above is unlinked at
1659 # ensure that pending file written above is unlinked at
1647 # failure, even if tr.writepending isn't invoked until the
1660 # failure, even if tr.writepending isn't invoked until the
1648 # end of this transaction
1661 # end of this transaction
1649 tr.registertmp(bck_data_filename, location=b'plain')
1662 tr.registertmp(bck_data_filename, location=b'plain')
1650
1663
1651 def restorebackup(self, tr, backupname):
1664 def restorebackup(self, tr, backupname):
1652 '''Restore dirstate by backup file'''
1665 '''Restore dirstate by backup file'''
1653 # this "invalidate()" prevents "wlock.release()" from writing
1666 # this "invalidate()" prevents "wlock.release()" from writing
1654 # changes of dirstate out after restoring from backup file
1667 # changes of dirstate out after restoring from backup file
1655 self.invalidate()
1668 self.invalidate()
1656 o = self._opener
1669 o = self._opener
1657 if not o.exists(backupname):
1670 if not o.exists(backupname):
1658 # there was no file backup, delete existing files
1671 # there was no file backup, delete existing files
1659 filename = self._actualfilename(tr)
1672 filename = self._actualfilename(tr)
1660 data_file = None
1673 data_file = None
1661 if self._use_dirstate_v2:
1674 if self._use_dirstate_v2:
1662 data_file = self._map.docket.data_filename()
1675 data_file = self._map.docket.data_filename()
1663 if o.exists(filename):
1676 if o.exists(filename):
1664 o.unlink(filename)
1677 o.unlink(filename)
1665 if data_file is not None and o.exists(data_file):
1678 if data_file is not None and o.exists(data_file):
1666 o.unlink(data_file)
1679 o.unlink(data_file)
1667 return
1680 return
1668 filename = self._actualfilename(tr)
1681 filename = self._actualfilename(tr)
1669 data_pair = self.backup_data_file(backupname)
1682 data_pair = self.backup_data_file(backupname)
1670 if o.exists(filename) and util.samefile(
1683 if o.exists(filename) and util.samefile(
1671 o.join(backupname), o.join(filename)
1684 o.join(backupname), o.join(filename)
1672 ):
1685 ):
1673 o.unlink(backupname)
1686 o.unlink(backupname)
1674 else:
1687 else:
1675 o.rename(backupname, filename, checkambig=True)
1688 o.rename(backupname, filename, checkambig=True)
1676
1689
1677 if data_pair is not None:
1690 if data_pair is not None:
1678 data_backup, target = data_pair
1691 data_backup, target = data_pair
1679 if o.exists(target) and util.samefile(
1692 if o.exists(target) and util.samefile(
1680 o.join(data_backup), o.join(target)
1693 o.join(data_backup), o.join(target)
1681 ):
1694 ):
1682 o.unlink(data_backup)
1695 o.unlink(data_backup)
1683 else:
1696 else:
1684 o.rename(data_backup, target, checkambig=True)
1697 o.rename(data_backup, target, checkambig=True)
1685
1698
1686 def clearbackup(self, tr, backupname):
1699 def clearbackup(self, tr, backupname):
1687 '''Clear backup file'''
1700 '''Clear backup file'''
1688 o = self._opener
1701 o = self._opener
1689 if o.exists(backupname):
1702 if o.exists(backupname):
1690 data_backup = self.backup_data_file(backupname)
1703 data_backup = self.backup_data_file(backupname)
1691 o.unlink(backupname)
1704 o.unlink(backupname)
1692 if data_backup is not None:
1705 if data_backup is not None:
1693 o.unlink(data_backup[0])
1706 o.unlink(data_backup[0])
1694
1707
1695 def verify(self, m1, m2, p1, narrow_matcher=None):
1708 def verify(self, m1, m2, p1, narrow_matcher=None):
1696 """
1709 """
1697 check the dirstate contents against the parent manifest and yield errors
1710 check the dirstate contents against the parent manifest and yield errors
1698 """
1711 """
1699 missing_from_p1 = _(
1712 missing_from_p1 = _(
1700 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1713 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1701 )
1714 )
1702 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1715 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1703 missing_from_ps = _(
1716 missing_from_ps = _(
1704 b"%s marked as modified, but not in either manifest\n"
1717 b"%s marked as modified, but not in either manifest\n"
1705 )
1718 )
1706 missing_from_ds = _(
1719 missing_from_ds = _(
1707 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1720 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1708 )
1721 )
1709 for f, entry in self.items():
1722 for f, entry in self.items():
1710 if entry.p1_tracked:
1723 if entry.p1_tracked:
1711 if entry.modified and f not in m1 and f not in m2:
1724 if entry.modified and f not in m1 and f not in m2:
1712 yield missing_from_ps % f
1725 yield missing_from_ps % f
1713 elif f not in m1:
1726 elif f not in m1:
1714 yield missing_from_p1 % (f, node.short(p1))
1727 yield missing_from_p1 % (f, node.short(p1))
1715 if entry.added and f in m1:
1728 if entry.added and f in m1:
1716 yield unexpected_in_p1 % f
1729 yield unexpected_in_p1 % f
1717 for f in m1:
1730 for f in m1:
1718 if narrow_matcher is not None and not narrow_matcher(f):
1731 if narrow_matcher is not None and not narrow_matcher(f):
1719 continue
1732 continue
1720 entry = self.get_entry(f)
1733 entry = self.get_entry(f)
1721 if not entry.p1_tracked:
1734 if not entry.p1_tracked:
1722 yield missing_from_ds % (f, node.short(p1))
1735 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now