##// END OF EJS Templates
dirstate: invalidate changes when parent-change fails...
marmoute -
r50852:96e526fe default
parent child Browse files
Show More
@@ -1,1570 +1,1590 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 docket as docketmod,
34 docket as docketmod,
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_parents_change(func):
69 def requires_parents_change(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.pendingparentchange():
71 if not self.pendingparentchange():
72 msg = 'calling `%s` outside of a parentchange context'
72 msg = 'calling `%s` outside of a parentchange context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 if self._invalidated_context:
76 msg = 'calling `%s` after the dirstate was invalidated'
77 raise error.ProgrammingError(msg)
75 return func(self, *args, **kwargs)
78 return func(self, *args, **kwargs)
76
79
77 return wrap
80 return wrap
78
81
79
82
80 def requires_no_parents_change(func):
83 def requires_no_parents_change(func):
81 def wrap(self, *args, **kwargs):
84 def wrap(self, *args, **kwargs):
82 if self.pendingparentchange():
85 if self.pendingparentchange():
83 msg = 'calling `%s` inside of a parentchange context'
86 msg = 'calling `%s` inside of a parentchange context'
84 msg %= func.__name__
87 msg %= func.__name__
85 raise error.ProgrammingError(msg)
88 raise error.ProgrammingError(msg)
86 return func(self, *args, **kwargs)
89 return func(self, *args, **kwargs)
87
90
88 return wrap
91 return wrap
89
92
90
93
91 @interfaceutil.implementer(intdirstate.idirstate)
94 @interfaceutil.implementer(intdirstate.idirstate)
92 class dirstate:
95 class dirstate:
93 def __init__(
96 def __init__(
94 self,
97 self,
95 opener,
98 opener,
96 ui,
99 ui,
97 root,
100 root,
98 validate,
101 validate,
99 sparsematchfn,
102 sparsematchfn,
100 nodeconstants,
103 nodeconstants,
101 use_dirstate_v2,
104 use_dirstate_v2,
102 use_tracked_hint=False,
105 use_tracked_hint=False,
103 ):
106 ):
104 """Create a new dirstate object.
107 """Create a new dirstate object.
105
108
106 opener is an open()-like callable that can be used to open the
109 opener is an open()-like callable that can be used to open the
107 dirstate file; root is the root of the directory tracked by
110 dirstate file; root is the root of the directory tracked by
108 the dirstate.
111 the dirstate.
109 """
112 """
110 self._use_dirstate_v2 = use_dirstate_v2
113 self._use_dirstate_v2 = use_dirstate_v2
111 self._use_tracked_hint = use_tracked_hint
114 self._use_tracked_hint = use_tracked_hint
112 self._nodeconstants = nodeconstants
115 self._nodeconstants = nodeconstants
113 self._opener = opener
116 self._opener = opener
114 self._validate = validate
117 self._validate = validate
115 self._root = root
118 self._root = root
116 # Either build a sparse-matcher or None if sparse is disabled
119 # Either build a sparse-matcher or None if sparse is disabled
117 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
118 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
119 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
120 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
121 # True is any internal state may be different
124 # True is any internal state may be different
122 self._dirty = False
125 self._dirty = False
123 # True if the set of tracked file may be different
126 # True if the set of tracked file may be different
124 self._dirty_tracked_set = False
127 self._dirty_tracked_set = False
125 self._ui = ui
128 self._ui = ui
126 self._filecache = {}
129 self._filecache = {}
130 # nesting level of `parentchange` context
127 self._parentwriters = 0
131 self._parentwriters = 0
132 # True if the current dirstate changing operations have been
133 # invalidated (used to make sure all nested contexts have been exited)
134 self._invalidated_context = False
128 self._filename = b'dirstate'
135 self._filename = b'dirstate'
129 self._filename_th = b'dirstate-tracked-hint'
136 self._filename_th = b'dirstate-tracked-hint'
130 self._pendingfilename = b'%s.pending' % self._filename
137 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
138 self._plchangecallbacks = {}
132 self._origpl = None
139 self._origpl = None
133 self._mapcls = dirstatemap.dirstatemap
140 self._mapcls = dirstatemap.dirstatemap
134 # Access and cache cwd early, so we don't access it for the first time
141 # Access and cache cwd early, so we don't access it for the first time
135 # after a working-copy update caused it to not exist (accessing it then
142 # after a working-copy update caused it to not exist (accessing it then
136 # raises an exception).
143 # raises an exception).
137 self._cwd
144 self._cwd
138
145
139 def prefetch_parents(self):
146 def prefetch_parents(self):
140 """make sure the parents are loaded
147 """make sure the parents are loaded
141
148
142 Used to avoid a race condition.
149 Used to avoid a race condition.
143 """
150 """
144 self._pl
151 self._pl
145
152
146 @contextlib.contextmanager
153 @contextlib.contextmanager
147 def parentchange(self):
154 def parentchange(self):
148 """Context manager for handling dirstate parents.
155 """Context manager for handling dirstate parents.
149
156
150 If an exception occurs in the scope of the context manager,
157 If an exception occurs in the scope of the context manager,
151 the incoherent dirstate won't be written when wlock is
158 the incoherent dirstate won't be written when wlock is
152 released.
159 released.
153 """
160 """
161 if self._invalidated_context:
162 msg = "trying to use an invalidated dirstate before it has reset"
163 raise error.ProgrammingError(msg)
154 self._parentwriters += 1
164 self._parentwriters += 1
165 try:
155 yield
166 yield
156 # Typically we want the "undo" step of a context manager in a
167 except Exception:
157 # finally block so it happens even when an exception
168 self.invalidate()
158 # occurs. In this case, however, we only want to decrement
169 raise
159 # parentwriters if the code in the with statement exits
170 finally:
160 # normally, so we don't have a try/finally here on purpose.
171 if self._parentwriters > 0:
172 if self._invalidated_context:
173 # make sure we invalidate anything an upper context might
174 # have changed.
175 self.invalidate()
161 self._parentwriters -= 1
176 self._parentwriters -= 1
177 # The invalidation is complete once we exit the final context
178 # manager
179 if self._parentwriters <= 0:
180 assert self._parentwriters == 0
181 self._invalidated_context = False
162
182
163 def pendingparentchange(self):
183 def pendingparentchange(self):
164 """Returns true if the dirstate is in the middle of a set of changes
184 """Returns true if the dirstate is in the middle of a set of changes
165 that modify the dirstate parent.
185 that modify the dirstate parent.
166 """
186 """
167 return self._parentwriters > 0
187 return self._parentwriters > 0
168
188
169 @propertycache
189 @propertycache
170 def _map(self):
190 def _map(self):
171 """Return the dirstate contents (see documentation for dirstatemap)."""
191 """Return the dirstate contents (see documentation for dirstatemap)."""
172 self._map = self._mapcls(
192 self._map = self._mapcls(
173 self._ui,
193 self._ui,
174 self._opener,
194 self._opener,
175 self._root,
195 self._root,
176 self._nodeconstants,
196 self._nodeconstants,
177 self._use_dirstate_v2,
197 self._use_dirstate_v2,
178 )
198 )
179 return self._map
199 return self._map
180
200
181 @property
201 @property
182 def _sparsematcher(self):
202 def _sparsematcher(self):
183 """The matcher for the sparse checkout.
203 """The matcher for the sparse checkout.
184
204
185 The working directory may not include every file from a manifest. The
205 The working directory may not include every file from a manifest. The
186 matcher obtained by this property will match a path if it is to be
206 matcher obtained by this property will match a path if it is to be
187 included in the working directory.
207 included in the working directory.
188
208
189 When sparse if disabled, return None.
209 When sparse if disabled, return None.
190 """
210 """
191 if self._sparsematchfn is None:
211 if self._sparsematchfn is None:
192 return None
212 return None
193 # TODO there is potential to cache this property. For now, the matcher
213 # TODO there is potential to cache this property. For now, the matcher
194 # is resolved on every access. (But the called function does use a
214 # is resolved on every access. (But the called function does use a
195 # cache to keep the lookup fast.)
215 # cache to keep the lookup fast.)
196 return self._sparsematchfn()
216 return self._sparsematchfn()
197
217
198 @repocache(b'branch')
218 @repocache(b'branch')
199 def _branch(self):
219 def _branch(self):
200 try:
220 try:
201 return self._opener.read(b"branch").strip() or b"default"
221 return self._opener.read(b"branch").strip() or b"default"
202 except FileNotFoundError:
222 except FileNotFoundError:
203 return b"default"
223 return b"default"
204
224
205 @property
225 @property
206 def _pl(self):
226 def _pl(self):
207 return self._map.parents()
227 return self._map.parents()
208
228
209 def hasdir(self, d):
229 def hasdir(self, d):
210 return self._map.hastrackeddir(d)
230 return self._map.hastrackeddir(d)
211
231
212 @rootcache(b'.hgignore')
232 @rootcache(b'.hgignore')
213 def _ignore(self):
233 def _ignore(self):
214 files = self._ignorefiles()
234 files = self._ignorefiles()
215 if not files:
235 if not files:
216 return matchmod.never()
236 return matchmod.never()
217
237
218 pats = [b'include:%s' % f for f in files]
238 pats = [b'include:%s' % f for f in files]
219 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
239 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
220
240
221 @propertycache
241 @propertycache
222 def _slash(self):
242 def _slash(self):
223 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
243 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
224
244
225 @propertycache
245 @propertycache
226 def _checklink(self):
246 def _checklink(self):
227 return util.checklink(self._root)
247 return util.checklink(self._root)
228
248
229 @propertycache
249 @propertycache
230 def _checkexec(self):
250 def _checkexec(self):
231 return bool(util.checkexec(self._root))
251 return bool(util.checkexec(self._root))
232
252
233 @propertycache
253 @propertycache
234 def _checkcase(self):
254 def _checkcase(self):
235 return not util.fscasesensitive(self._join(b'.hg'))
255 return not util.fscasesensitive(self._join(b'.hg'))
236
256
237 def _join(self, f):
257 def _join(self, f):
238 # much faster than os.path.join()
258 # much faster than os.path.join()
239 # it's safe because f is always a relative path
259 # it's safe because f is always a relative path
240 return self._rootdir + f
260 return self._rootdir + f
241
261
242 def flagfunc(self, buildfallback):
262 def flagfunc(self, buildfallback):
243 """build a callable that returns flags associated with a filename
263 """build a callable that returns flags associated with a filename
244
264
245 The information is extracted from three possible layers:
265 The information is extracted from three possible layers:
246 1. the file system if it supports the information
266 1. the file system if it supports the information
247 2. the "fallback" information stored in the dirstate if any
267 2. the "fallback" information stored in the dirstate if any
248 3. a more expensive mechanism inferring the flags from the parents.
268 3. a more expensive mechanism inferring the flags from the parents.
249 """
269 """
250
270
251 # small hack to cache the result of buildfallback()
271 # small hack to cache the result of buildfallback()
252 fallback_func = []
272 fallback_func = []
253
273
254 def get_flags(x):
274 def get_flags(x):
255 entry = None
275 entry = None
256 fallback_value = None
276 fallback_value = None
257 try:
277 try:
258 st = os.lstat(self._join(x))
278 st = os.lstat(self._join(x))
259 except OSError:
279 except OSError:
260 return b''
280 return b''
261
281
262 if self._checklink:
282 if self._checklink:
263 if util.statislink(st):
283 if util.statislink(st):
264 return b'l'
284 return b'l'
265 else:
285 else:
266 entry = self.get_entry(x)
286 entry = self.get_entry(x)
267 if entry.has_fallback_symlink:
287 if entry.has_fallback_symlink:
268 if entry.fallback_symlink:
288 if entry.fallback_symlink:
269 return b'l'
289 return b'l'
270 else:
290 else:
271 if not fallback_func:
291 if not fallback_func:
272 fallback_func.append(buildfallback())
292 fallback_func.append(buildfallback())
273 fallback_value = fallback_func[0](x)
293 fallback_value = fallback_func[0](x)
274 if b'l' in fallback_value:
294 if b'l' in fallback_value:
275 return b'l'
295 return b'l'
276
296
277 if self._checkexec:
297 if self._checkexec:
278 if util.statisexec(st):
298 if util.statisexec(st):
279 return b'x'
299 return b'x'
280 else:
300 else:
281 if entry is None:
301 if entry is None:
282 entry = self.get_entry(x)
302 entry = self.get_entry(x)
283 if entry.has_fallback_exec:
303 if entry.has_fallback_exec:
284 if entry.fallback_exec:
304 if entry.fallback_exec:
285 return b'x'
305 return b'x'
286 else:
306 else:
287 if fallback_value is None:
307 if fallback_value is None:
288 if not fallback_func:
308 if not fallback_func:
289 fallback_func.append(buildfallback())
309 fallback_func.append(buildfallback())
290 fallback_value = fallback_func[0](x)
310 fallback_value = fallback_func[0](x)
291 if b'x' in fallback_value:
311 if b'x' in fallback_value:
292 return b'x'
312 return b'x'
293 return b''
313 return b''
294
314
295 return get_flags
315 return get_flags
296
316
297 @propertycache
317 @propertycache
298 def _cwd(self):
318 def _cwd(self):
299 # internal config: ui.forcecwd
319 # internal config: ui.forcecwd
300 forcecwd = self._ui.config(b'ui', b'forcecwd')
320 forcecwd = self._ui.config(b'ui', b'forcecwd')
301 if forcecwd:
321 if forcecwd:
302 return forcecwd
322 return forcecwd
303 return encoding.getcwd()
323 return encoding.getcwd()
304
324
305 def getcwd(self):
325 def getcwd(self):
306 """Return the path from which a canonical path is calculated.
326 """Return the path from which a canonical path is calculated.
307
327
308 This path should be used to resolve file patterns or to convert
328 This path should be used to resolve file patterns or to convert
309 canonical paths back to file paths for display. It shouldn't be
329 canonical paths back to file paths for display. It shouldn't be
310 used to get real file paths. Use vfs functions instead.
330 used to get real file paths. Use vfs functions instead.
311 """
331 """
312 cwd = self._cwd
332 cwd = self._cwd
313 if cwd == self._root:
333 if cwd == self._root:
314 return b''
334 return b''
315 # self._root ends with a path separator if self._root is '/' or 'C:\'
335 # self._root ends with a path separator if self._root is '/' or 'C:\'
316 rootsep = self._root
336 rootsep = self._root
317 if not util.endswithsep(rootsep):
337 if not util.endswithsep(rootsep):
318 rootsep += pycompat.ossep
338 rootsep += pycompat.ossep
319 if cwd.startswith(rootsep):
339 if cwd.startswith(rootsep):
320 return cwd[len(rootsep) :]
340 return cwd[len(rootsep) :]
321 else:
341 else:
322 # we're outside the repo. return an absolute path.
342 # we're outside the repo. return an absolute path.
323 return cwd
343 return cwd
324
344
325 def pathto(self, f, cwd=None):
345 def pathto(self, f, cwd=None):
326 if cwd is None:
346 if cwd is None:
327 cwd = self.getcwd()
347 cwd = self.getcwd()
328 path = util.pathto(self._root, cwd, f)
348 path = util.pathto(self._root, cwd, f)
329 if self._slash:
349 if self._slash:
330 return util.pconvert(path)
350 return util.pconvert(path)
331 return path
351 return path
332
352
333 def get_entry(self, path):
353 def get_entry(self, path):
334 """return a DirstateItem for the associated path"""
354 """return a DirstateItem for the associated path"""
335 entry = self._map.get(path)
355 entry = self._map.get(path)
336 if entry is None:
356 if entry is None:
337 return DirstateItem()
357 return DirstateItem()
338 return entry
358 return entry
339
359
340 def __contains__(self, key):
360 def __contains__(self, key):
341 return key in self._map
361 return key in self._map
342
362
343 def __iter__(self):
363 def __iter__(self):
344 return iter(sorted(self._map))
364 return iter(sorted(self._map))
345
365
346 def items(self):
366 def items(self):
347 return self._map.items()
367 return self._map.items()
348
368
349 iteritems = items
369 iteritems = items
350
370
351 def parents(self):
371 def parents(self):
352 return [self._validate(p) for p in self._pl]
372 return [self._validate(p) for p in self._pl]
353
373
354 def p1(self):
374 def p1(self):
355 return self._validate(self._pl[0])
375 return self._validate(self._pl[0])
356
376
357 def p2(self):
377 def p2(self):
358 return self._validate(self._pl[1])
378 return self._validate(self._pl[1])
359
379
360 @property
380 @property
361 def in_merge(self):
381 def in_merge(self):
362 """True if a merge is in progress"""
382 """True if a merge is in progress"""
363 return self._pl[1] != self._nodeconstants.nullid
383 return self._pl[1] != self._nodeconstants.nullid
364
384
365 def branch(self):
385 def branch(self):
366 return encoding.tolocal(self._branch)
386 return encoding.tolocal(self._branch)
367
387
368 def setparents(self, p1, p2=None):
388 def setparents(self, p1, p2=None):
369 """Set dirstate parents to p1 and p2.
389 """Set dirstate parents to p1 and p2.
370
390
371 When moving from two parents to one, "merged" entries a
391 When moving from two parents to one, "merged" entries a
372 adjusted to normal and previous copy records discarded and
392 adjusted to normal and previous copy records discarded and
373 returned by the call.
393 returned by the call.
374
394
375 See localrepo.setparents()
395 See localrepo.setparents()
376 """
396 """
377 if p2 is None:
397 if p2 is None:
378 p2 = self._nodeconstants.nullid
398 p2 = self._nodeconstants.nullid
379 if self._parentwriters == 0:
399 if self._parentwriters == 0:
380 raise ValueError(
400 raise ValueError(
381 b"cannot set dirstate parent outside of "
401 b"cannot set dirstate parent outside of "
382 b"dirstate.parentchange context manager"
402 b"dirstate.parentchange context manager"
383 )
403 )
384
404
385 self._dirty = True
405 self._dirty = True
386 oldp2 = self._pl[1]
406 oldp2 = self._pl[1]
387 if self._origpl is None:
407 if self._origpl is None:
388 self._origpl = self._pl
408 self._origpl = self._pl
389 nullid = self._nodeconstants.nullid
409 nullid = self._nodeconstants.nullid
390 # True if we need to fold p2 related state back to a linear case
410 # True if we need to fold p2 related state back to a linear case
391 fold_p2 = oldp2 != nullid and p2 == nullid
411 fold_p2 = oldp2 != nullid and p2 == nullid
392 return self._map.setparents(p1, p2, fold_p2=fold_p2)
412 return self._map.setparents(p1, p2, fold_p2=fold_p2)
393
413
394 def setbranch(self, branch):
414 def setbranch(self, branch):
395 self.__class__._branch.set(self, encoding.fromlocal(branch))
415 self.__class__._branch.set(self, encoding.fromlocal(branch))
396 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
416 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
397 try:
417 try:
398 f.write(self._branch + b'\n')
418 f.write(self._branch + b'\n')
399 f.close()
419 f.close()
400
420
401 # make sure filecache has the correct stat info for _branch after
421 # make sure filecache has the correct stat info for _branch after
402 # replacing the underlying file
422 # replacing the underlying file
403 ce = self._filecache[b'_branch']
423 ce = self._filecache[b'_branch']
404 if ce:
424 if ce:
405 ce.refresh()
425 ce.refresh()
406 except: # re-raises
426 except: # re-raises
407 f.discard()
427 f.discard()
408 raise
428 raise
409
429
410 def invalidate(self):
430 def invalidate(self):
411 """Causes the next access to reread the dirstate.
431 """Causes the next access to reread the dirstate.
412
432
413 This is different from localrepo.invalidatedirstate() because it always
433 This is different from localrepo.invalidatedirstate() because it always
414 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
434 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
415 check whether the dirstate has changed before rereading it."""
435 check whether the dirstate has changed before rereading it."""
416
436
417 for a in ("_map", "_branch", "_ignore"):
437 for a in ("_map", "_branch", "_ignore"):
418 if a in self.__dict__:
438 if a in self.__dict__:
419 delattr(self, a)
439 delattr(self, a)
420 self._dirty = False
440 self._dirty = False
421 self._dirty_tracked_set = False
441 self._dirty_tracked_set = False
422 self._parentwriters = 0
442 self._invalidated_context = self._parentwriters > 0
423 self._origpl = None
443 self._origpl = None
424
444
425 def copy(self, source, dest):
445 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
446 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
447 if source == dest:
428 return
448 return
429 self._dirty = True
449 self._dirty = True
430 if source is not None:
450 if source is not None:
431 self._check_sparse(source)
451 self._check_sparse(source)
432 self._map.copymap[dest] = source
452 self._map.copymap[dest] = source
433 else:
453 else:
434 self._map.copymap.pop(dest, None)
454 self._map.copymap.pop(dest, None)
435
455
436 def copied(self, file):
456 def copied(self, file):
437 return self._map.copymap.get(file, None)
457 return self._map.copymap.get(file, None)
438
458
439 def copies(self):
459 def copies(self):
440 return self._map.copymap
460 return self._map.copymap
441
461
442 @requires_no_parents_change
462 @requires_no_parents_change
443 def set_tracked(self, filename, reset_copy=False):
463 def set_tracked(self, filename, reset_copy=False):
444 """a "public" method for generic code to mark a file as tracked
464 """a "public" method for generic code to mark a file as tracked
445
465
446 This function is to be called outside of "update/merge" case. For
466 This function is to be called outside of "update/merge" case. For
447 example by a command like `hg add X`.
467 example by a command like `hg add X`.
448
468
449 if reset_copy is set, any existing copy information will be dropped.
469 if reset_copy is set, any existing copy information will be dropped.
450
470
451 return True the file was previously untracked, False otherwise.
471 return True the file was previously untracked, False otherwise.
452 """
472 """
453 self._dirty = True
473 self._dirty = True
454 entry = self._map.get(filename)
474 entry = self._map.get(filename)
455 if entry is None or not entry.tracked:
475 if entry is None or not entry.tracked:
456 self._check_new_tracked_filename(filename)
476 self._check_new_tracked_filename(filename)
457 pre_tracked = self._map.set_tracked(filename)
477 pre_tracked = self._map.set_tracked(filename)
458 if reset_copy:
478 if reset_copy:
459 self._map.copymap.pop(filename, None)
479 self._map.copymap.pop(filename, None)
460 if pre_tracked:
480 if pre_tracked:
461 self._dirty_tracked_set = True
481 self._dirty_tracked_set = True
462 return pre_tracked
482 return pre_tracked
463
483
464 @requires_no_parents_change
484 @requires_no_parents_change
465 def set_untracked(self, filename):
485 def set_untracked(self, filename):
466 """a "public" method for generic code to mark a file as untracked
486 """a "public" method for generic code to mark a file as untracked
467
487
468 This function is to be called outside of "update/merge" case. For
488 This function is to be called outside of "update/merge" case. For
469 example by a command like `hg remove X`.
489 example by a command like `hg remove X`.
470
490
471 return True the file was previously tracked, False otherwise.
491 return True the file was previously tracked, False otherwise.
472 """
492 """
473 ret = self._map.set_untracked(filename)
493 ret = self._map.set_untracked(filename)
474 if ret:
494 if ret:
475 self._dirty = True
495 self._dirty = True
476 self._dirty_tracked_set = True
496 self._dirty_tracked_set = True
477 return ret
497 return ret
478
498
479 @requires_no_parents_change
499 @requires_no_parents_change
480 def set_clean(self, filename, parentfiledata):
500 def set_clean(self, filename, parentfiledata):
481 """record that the current state of the file on disk is known to be clean"""
501 """record that the current state of the file on disk is known to be clean"""
482 self._dirty = True
502 self._dirty = True
483 if not self._map[filename].tracked:
503 if not self._map[filename].tracked:
484 self._check_new_tracked_filename(filename)
504 self._check_new_tracked_filename(filename)
485 (mode, size, mtime) = parentfiledata
505 (mode, size, mtime) = parentfiledata
486 self._map.set_clean(filename, mode, size, mtime)
506 self._map.set_clean(filename, mode, size, mtime)
487
507
488 @requires_no_parents_change
508 @requires_no_parents_change
489 def set_possibly_dirty(self, filename):
509 def set_possibly_dirty(self, filename):
490 """record that the current state of the file on disk is unknown"""
510 """record that the current state of the file on disk is unknown"""
491 self._dirty = True
511 self._dirty = True
492 self._map.set_possibly_dirty(filename)
512 self._map.set_possibly_dirty(filename)
493
513
494 @requires_parents_change
514 @requires_parents_change
495 def update_file_p1(
515 def update_file_p1(
496 self,
516 self,
497 filename,
517 filename,
498 p1_tracked,
518 p1_tracked,
499 ):
519 ):
500 """Set a file as tracked in the parent (or not)
520 """Set a file as tracked in the parent (or not)
501
521
502 This is to be called when adjust the dirstate to a new parent after an history
522 This is to be called when adjust the dirstate to a new parent after an history
503 rewriting operation.
523 rewriting operation.
504
524
505 It should not be called during a merge (p2 != nullid) and only within
525 It should not be called during a merge (p2 != nullid) and only within
506 a `with dirstate.parentchange():` context.
526 a `with dirstate.parentchange():` context.
507 """
527 """
508 if self.in_merge:
528 if self.in_merge:
509 msg = b'update_file_reference should not be called when merging'
529 msg = b'update_file_reference should not be called when merging'
510 raise error.ProgrammingError(msg)
530 raise error.ProgrammingError(msg)
511 entry = self._map.get(filename)
531 entry = self._map.get(filename)
512 if entry is None:
532 if entry is None:
513 wc_tracked = False
533 wc_tracked = False
514 else:
534 else:
515 wc_tracked = entry.tracked
535 wc_tracked = entry.tracked
516 if not (p1_tracked or wc_tracked):
536 if not (p1_tracked or wc_tracked):
517 # the file is no longer relevant to anyone
537 # the file is no longer relevant to anyone
518 if self._map.get(filename) is not None:
538 if self._map.get(filename) is not None:
519 self._map.reset_state(filename)
539 self._map.reset_state(filename)
520 self._dirty = True
540 self._dirty = True
521 elif (not p1_tracked) and wc_tracked:
541 elif (not p1_tracked) and wc_tracked:
522 if entry is not None and entry.added:
542 if entry is not None and entry.added:
523 return # avoid dropping copy information (maybe?)
543 return # avoid dropping copy information (maybe?)
524
544
525 self._map.reset_state(
545 self._map.reset_state(
526 filename,
546 filename,
527 wc_tracked,
547 wc_tracked,
528 p1_tracked,
548 p1_tracked,
529 # the underlying reference might have changed, we will have to
549 # the underlying reference might have changed, we will have to
530 # check it.
550 # check it.
531 has_meaningful_mtime=False,
551 has_meaningful_mtime=False,
532 )
552 )
533
553
534 @requires_parents_change
554 @requires_parents_change
535 def update_file(
555 def update_file(
536 self,
556 self,
537 filename,
557 filename,
538 wc_tracked,
558 wc_tracked,
539 p1_tracked,
559 p1_tracked,
540 p2_info=False,
560 p2_info=False,
541 possibly_dirty=False,
561 possibly_dirty=False,
542 parentfiledata=None,
562 parentfiledata=None,
543 ):
563 ):
544 """update the information about a file in the dirstate
564 """update the information about a file in the dirstate
545
565
546 This is to be called when the direstates parent changes to keep track
566 This is to be called when the direstates parent changes to keep track
547 of what is the file situation in regards to the working copy and its parent.
567 of what is the file situation in regards to the working copy and its parent.
548
568
549 This function must be called within a `dirstate.parentchange` context.
569 This function must be called within a `dirstate.parentchange` context.
550
570
551 note: the API is at an early stage and we might need to adjust it
571 note: the API is at an early stage and we might need to adjust it
552 depending of what information ends up being relevant and useful to
572 depending of what information ends up being relevant and useful to
553 other processing.
573 other processing.
554 """
574 """
555
575
556 # note: I do not think we need to double check name clash here since we
576 # note: I do not think we need to double check name clash here since we
557 # are in a update/merge case that should already have taken care of
577 # are in a update/merge case that should already have taken care of
558 # this. The test agrees
578 # this. The test agrees
559
579
560 self._dirty = True
580 self._dirty = True
561 old_entry = self._map.get(filename)
581 old_entry = self._map.get(filename)
562 if old_entry is None:
582 if old_entry is None:
563 prev_tracked = False
583 prev_tracked = False
564 else:
584 else:
565 prev_tracked = old_entry.tracked
585 prev_tracked = old_entry.tracked
566 if prev_tracked != wc_tracked:
586 if prev_tracked != wc_tracked:
567 self._dirty_tracked_set = True
587 self._dirty_tracked_set = True
568
588
569 self._map.reset_state(
589 self._map.reset_state(
570 filename,
590 filename,
571 wc_tracked,
591 wc_tracked,
572 p1_tracked,
592 p1_tracked,
573 p2_info=p2_info,
593 p2_info=p2_info,
574 has_meaningful_mtime=not possibly_dirty,
594 has_meaningful_mtime=not possibly_dirty,
575 parentfiledata=parentfiledata,
595 parentfiledata=parentfiledata,
576 )
596 )
577
597
578 def _check_new_tracked_filename(self, filename):
598 def _check_new_tracked_filename(self, filename):
579 scmutil.checkfilename(filename)
599 scmutil.checkfilename(filename)
580 if self._map.hastrackeddir(filename):
600 if self._map.hastrackeddir(filename):
581 msg = _(b'directory %r already in dirstate')
601 msg = _(b'directory %r already in dirstate')
582 msg %= pycompat.bytestr(filename)
602 msg %= pycompat.bytestr(filename)
583 raise error.Abort(msg)
603 raise error.Abort(msg)
584 # shadows
604 # shadows
585 for d in pathutil.finddirs(filename):
605 for d in pathutil.finddirs(filename):
586 if self._map.hastrackeddir(d):
606 if self._map.hastrackeddir(d):
587 break
607 break
588 entry = self._map.get(d)
608 entry = self._map.get(d)
589 if entry is not None and not entry.removed:
609 if entry is not None and not entry.removed:
590 msg = _(b'file %r in dirstate clashes with %r')
610 msg = _(b'file %r in dirstate clashes with %r')
591 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
611 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
592 raise error.Abort(msg)
612 raise error.Abort(msg)
593 self._check_sparse(filename)
613 self._check_sparse(filename)
594
614
595 def _check_sparse(self, filename):
615 def _check_sparse(self, filename):
596 """Check that a filename is inside the sparse profile"""
616 """Check that a filename is inside the sparse profile"""
597 sparsematch = self._sparsematcher
617 sparsematch = self._sparsematcher
598 if sparsematch is not None and not sparsematch.always():
618 if sparsematch is not None and not sparsematch.always():
599 if not sparsematch(filename):
619 if not sparsematch(filename):
600 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
620 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
601 hint = _(
621 hint = _(
602 b'include file with `hg debugsparse --include <pattern>` or use '
622 b'include file with `hg debugsparse --include <pattern>` or use '
603 b'`hg add -s <file>` to include file directory while adding'
623 b'`hg add -s <file>` to include file directory while adding'
604 )
624 )
605 raise error.Abort(msg % filename, hint=hint)
625 raise error.Abort(msg % filename, hint=hint)
606
626
607 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
627 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
608 if exists is None:
628 if exists is None:
609 exists = os.path.lexists(os.path.join(self._root, path))
629 exists = os.path.lexists(os.path.join(self._root, path))
610 if not exists:
630 if not exists:
611 # Maybe a path component exists
631 # Maybe a path component exists
612 if not ignoremissing and b'/' in path:
632 if not ignoremissing and b'/' in path:
613 d, f = path.rsplit(b'/', 1)
633 d, f = path.rsplit(b'/', 1)
614 d = self._normalize(d, False, ignoremissing, None)
634 d = self._normalize(d, False, ignoremissing, None)
615 folded = d + b"/" + f
635 folded = d + b"/" + f
616 else:
636 else:
617 # No path components, preserve original case
637 # No path components, preserve original case
618 folded = path
638 folded = path
619 else:
639 else:
620 # recursively normalize leading directory components
640 # recursively normalize leading directory components
621 # against dirstate
641 # against dirstate
622 if b'/' in normed:
642 if b'/' in normed:
623 d, f = normed.rsplit(b'/', 1)
643 d, f = normed.rsplit(b'/', 1)
624 d = self._normalize(d, False, ignoremissing, True)
644 d = self._normalize(d, False, ignoremissing, True)
625 r = self._root + b"/" + d
645 r = self._root + b"/" + d
626 folded = d + b"/" + util.fspath(f, r)
646 folded = d + b"/" + util.fspath(f, r)
627 else:
647 else:
628 folded = util.fspath(normed, self._root)
648 folded = util.fspath(normed, self._root)
629 storemap[normed] = folded
649 storemap[normed] = folded
630
650
631 return folded
651 return folded
632
652
633 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
653 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
634 normed = util.normcase(path)
654 normed = util.normcase(path)
635 folded = self._map.filefoldmap.get(normed, None)
655 folded = self._map.filefoldmap.get(normed, None)
636 if folded is None:
656 if folded is None:
637 if isknown:
657 if isknown:
638 folded = path
658 folded = path
639 else:
659 else:
640 folded = self._discoverpath(
660 folded = self._discoverpath(
641 path, normed, ignoremissing, exists, self._map.filefoldmap
661 path, normed, ignoremissing, exists, self._map.filefoldmap
642 )
662 )
643 return folded
663 return folded
644
664
645 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
665 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
646 normed = util.normcase(path)
666 normed = util.normcase(path)
647 folded = self._map.filefoldmap.get(normed, None)
667 folded = self._map.filefoldmap.get(normed, None)
648 if folded is None:
668 if folded is None:
649 folded = self._map.dirfoldmap.get(normed, None)
669 folded = self._map.dirfoldmap.get(normed, None)
650 if folded is None:
670 if folded is None:
651 if isknown:
671 if isknown:
652 folded = path
672 folded = path
653 else:
673 else:
654 # store discovered result in dirfoldmap so that future
674 # store discovered result in dirfoldmap so that future
655 # normalizefile calls don't start matching directories
675 # normalizefile calls don't start matching directories
656 folded = self._discoverpath(
676 folded = self._discoverpath(
657 path, normed, ignoremissing, exists, self._map.dirfoldmap
677 path, normed, ignoremissing, exists, self._map.dirfoldmap
658 )
678 )
659 return folded
679 return folded
660
680
661 def normalize(self, path, isknown=False, ignoremissing=False):
681 def normalize(self, path, isknown=False, ignoremissing=False):
662 """
682 """
663 normalize the case of a pathname when on a casefolding filesystem
683 normalize the case of a pathname when on a casefolding filesystem
664
684
665 isknown specifies whether the filename came from walking the
685 isknown specifies whether the filename came from walking the
666 disk, to avoid extra filesystem access.
686 disk, to avoid extra filesystem access.
667
687
668 If ignoremissing is True, missing path are returned
688 If ignoremissing is True, missing path are returned
669 unchanged. Otherwise, we try harder to normalize possibly
689 unchanged. Otherwise, we try harder to normalize possibly
670 existing path components.
690 existing path components.
671
691
672 The normalized case is determined based on the following precedence:
692 The normalized case is determined based on the following precedence:
673
693
674 - version of name already stored in the dirstate
694 - version of name already stored in the dirstate
675 - version of name stored on disk
695 - version of name stored on disk
676 - version provided via command arguments
696 - version provided via command arguments
677 """
697 """
678
698
679 if self._checkcase:
699 if self._checkcase:
680 return self._normalize(path, isknown, ignoremissing)
700 return self._normalize(path, isknown, ignoremissing)
681 return path
701 return path
682
702
683 def clear(self):
703 def clear(self):
684 self._map.clear()
704 self._map.clear()
685 self._dirty = True
705 self._dirty = True
686
706
687 def rebuild(self, parent, allfiles, changedfiles=None):
707 def rebuild(self, parent, allfiles, changedfiles=None):
688
708
689 matcher = self._sparsematcher
709 matcher = self._sparsematcher
690 if matcher is not None and not matcher.always():
710 if matcher is not None and not matcher.always():
691 # should not add non-matching files
711 # should not add non-matching files
692 allfiles = [f for f in allfiles if matcher(f)]
712 allfiles = [f for f in allfiles if matcher(f)]
693 if changedfiles:
713 if changedfiles:
694 changedfiles = [f for f in changedfiles if matcher(f)]
714 changedfiles = [f for f in changedfiles if matcher(f)]
695
715
696 if changedfiles is not None:
716 if changedfiles is not None:
697 # these files will be deleted from the dirstate when they are
717 # these files will be deleted from the dirstate when they are
698 # not found to be in allfiles
718 # not found to be in allfiles
699 dirstatefilestoremove = {f for f in self if not matcher(f)}
719 dirstatefilestoremove = {f for f in self if not matcher(f)}
700 changedfiles = dirstatefilestoremove.union(changedfiles)
720 changedfiles = dirstatefilestoremove.union(changedfiles)
701
721
702 if changedfiles is None:
722 if changedfiles is None:
703 # Rebuild entire dirstate
723 # Rebuild entire dirstate
704 to_lookup = allfiles
724 to_lookup = allfiles
705 to_drop = []
725 to_drop = []
706 self.clear()
726 self.clear()
707 elif len(changedfiles) < 10:
727 elif len(changedfiles) < 10:
708 # Avoid turning allfiles into a set, which can be expensive if it's
728 # Avoid turning allfiles into a set, which can be expensive if it's
709 # large.
729 # large.
710 to_lookup = []
730 to_lookup = []
711 to_drop = []
731 to_drop = []
712 for f in changedfiles:
732 for f in changedfiles:
713 if f in allfiles:
733 if f in allfiles:
714 to_lookup.append(f)
734 to_lookup.append(f)
715 else:
735 else:
716 to_drop.append(f)
736 to_drop.append(f)
717 else:
737 else:
718 changedfilesset = set(changedfiles)
738 changedfilesset = set(changedfiles)
719 to_lookup = changedfilesset & set(allfiles)
739 to_lookup = changedfilesset & set(allfiles)
720 to_drop = changedfilesset - to_lookup
740 to_drop = changedfilesset - to_lookup
721
741
722 if self._origpl is None:
742 if self._origpl is None:
723 self._origpl = self._pl
743 self._origpl = self._pl
724 self._map.setparents(parent, self._nodeconstants.nullid)
744 self._map.setparents(parent, self._nodeconstants.nullid)
725
745
726 for f in to_lookup:
746 for f in to_lookup:
727
747
728 if self.in_merge:
748 if self.in_merge:
729 self.set_tracked(f)
749 self.set_tracked(f)
730 else:
750 else:
731 self._map.reset_state(
751 self._map.reset_state(
732 f,
752 f,
733 wc_tracked=True,
753 wc_tracked=True,
734 p1_tracked=True,
754 p1_tracked=True,
735 )
755 )
736 for f in to_drop:
756 for f in to_drop:
737 self._map.reset_state(f)
757 self._map.reset_state(f)
738
758
739 self._dirty = True
759 self._dirty = True
740
760
741 def identity(self):
761 def identity(self):
742 """Return identity of dirstate itself to detect changing in storage
762 """Return identity of dirstate itself to detect changing in storage
743
763
744 If identity of previous dirstate is equal to this, writing
764 If identity of previous dirstate is equal to this, writing
745 changes based on the former dirstate out can keep consistency.
765 changes based on the former dirstate out can keep consistency.
746 """
766 """
747 return self._map.identity
767 return self._map.identity
748
768
749 def write(self, tr):
769 def write(self, tr):
750 if not self._dirty:
770 if not self._dirty:
751 return
771 return
752
772
753 write_key = self._use_tracked_hint and self._dirty_tracked_set
773 write_key = self._use_tracked_hint and self._dirty_tracked_set
754 if tr:
774 if tr:
755 # delay writing in-memory changes out
775 # delay writing in-memory changes out
756 tr.addfilegenerator(
776 tr.addfilegenerator(
757 b'dirstate-1-main',
777 b'dirstate-1-main',
758 (self._filename,),
778 (self._filename,),
759 lambda f: self._writedirstate(tr, f),
779 lambda f: self._writedirstate(tr, f),
760 location=b'plain',
780 location=b'plain',
761 post_finalize=True,
781 post_finalize=True,
762 )
782 )
763 if write_key:
783 if write_key:
764 tr.addfilegenerator(
784 tr.addfilegenerator(
765 b'dirstate-2-key-post',
785 b'dirstate-2-key-post',
766 (self._filename_th,),
786 (self._filename_th,),
767 lambda f: self._write_tracked_hint(tr, f),
787 lambda f: self._write_tracked_hint(tr, f),
768 location=b'plain',
788 location=b'plain',
769 post_finalize=True,
789 post_finalize=True,
770 )
790 )
771 return
791 return
772
792
773 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
793 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
774 with file(self._filename) as f:
794 with file(self._filename) as f:
775 self._writedirstate(tr, f)
795 self._writedirstate(tr, f)
776 if write_key:
796 if write_key:
777 # we update the key-file after writing to make sure reader have a
797 # we update the key-file after writing to make sure reader have a
778 # key that match the newly written content
798 # key that match the newly written content
779 with file(self._filename_th) as f:
799 with file(self._filename_th) as f:
780 self._write_tracked_hint(tr, f)
800 self._write_tracked_hint(tr, f)
781
801
782 def delete_tracked_hint(self):
802 def delete_tracked_hint(self):
783 """remove the tracked_hint file
803 """remove the tracked_hint file
784
804
785 To be used by format downgrades operation"""
805 To be used by format downgrades operation"""
786 self._opener.unlink(self._filename_th)
806 self._opener.unlink(self._filename_th)
787 self._use_tracked_hint = False
807 self._use_tracked_hint = False
788
808
789 def addparentchangecallback(self, category, callback):
809 def addparentchangecallback(self, category, callback):
790 """add a callback to be called when the wd parents are changed
810 """add a callback to be called when the wd parents are changed
791
811
792 Callback will be called with the following arguments:
812 Callback will be called with the following arguments:
793 dirstate, (oldp1, oldp2), (newp1, newp2)
813 dirstate, (oldp1, oldp2), (newp1, newp2)
794
814
795 Category is a unique identifier to allow overwriting an old callback
815 Category is a unique identifier to allow overwriting an old callback
796 with a newer callback.
816 with a newer callback.
797 """
817 """
798 self._plchangecallbacks[category] = callback
818 self._plchangecallbacks[category] = callback
799
819
800 def _writedirstate(self, tr, st):
820 def _writedirstate(self, tr, st):
801 # notify callbacks about parents change
821 # notify callbacks about parents change
802 if self._origpl is not None and self._origpl != self._pl:
822 if self._origpl is not None and self._origpl != self._pl:
803 for c, callback in sorted(self._plchangecallbacks.items()):
823 for c, callback in sorted(self._plchangecallbacks.items()):
804 callback(self, self._origpl, self._pl)
824 callback(self, self._origpl, self._pl)
805 self._origpl = None
825 self._origpl = None
806 self._map.write(tr, st)
826 self._map.write(tr, st)
807 self._dirty = False
827 self._dirty = False
808 self._dirty_tracked_set = False
828 self._dirty_tracked_set = False
809
829
810 def _write_tracked_hint(self, tr, f):
830 def _write_tracked_hint(self, tr, f):
811 key = node.hex(uuid.uuid4().bytes)
831 key = node.hex(uuid.uuid4().bytes)
812 f.write(b"1\n%s\n" % key) # 1 is the format version
832 f.write(b"1\n%s\n" % key) # 1 is the format version
813
833
814 def _dirignore(self, f):
834 def _dirignore(self, f):
815 if self._ignore(f):
835 if self._ignore(f):
816 return True
836 return True
817 for p in pathutil.finddirs(f):
837 for p in pathutil.finddirs(f):
818 if self._ignore(p):
838 if self._ignore(p):
819 return True
839 return True
820 return False
840 return False
821
841
822 def _ignorefiles(self):
842 def _ignorefiles(self):
823 files = []
843 files = []
824 if os.path.exists(self._join(b'.hgignore')):
844 if os.path.exists(self._join(b'.hgignore')):
825 files.append(self._join(b'.hgignore'))
845 files.append(self._join(b'.hgignore'))
826 for name, path in self._ui.configitems(b"ui"):
846 for name, path in self._ui.configitems(b"ui"):
827 if name == b'ignore' or name.startswith(b'ignore.'):
847 if name == b'ignore' or name.startswith(b'ignore.'):
828 # we need to use os.path.join here rather than self._join
848 # we need to use os.path.join here rather than self._join
829 # because path is arbitrary and user-specified
849 # because path is arbitrary and user-specified
830 files.append(os.path.join(self._rootdir, util.expandpath(path)))
850 files.append(os.path.join(self._rootdir, util.expandpath(path)))
831 return files
851 return files
832
852
833 def _ignorefileandline(self, f):
853 def _ignorefileandline(self, f):
834 files = collections.deque(self._ignorefiles())
854 files = collections.deque(self._ignorefiles())
835 visited = set()
855 visited = set()
836 while files:
856 while files:
837 i = files.popleft()
857 i = files.popleft()
838 patterns = matchmod.readpatternfile(
858 patterns = matchmod.readpatternfile(
839 i, self._ui.warn, sourceinfo=True
859 i, self._ui.warn, sourceinfo=True
840 )
860 )
841 for pattern, lineno, line in patterns:
861 for pattern, lineno, line in patterns:
842 kind, p = matchmod._patsplit(pattern, b'glob')
862 kind, p = matchmod._patsplit(pattern, b'glob')
843 if kind == b"subinclude":
863 if kind == b"subinclude":
844 if p not in visited:
864 if p not in visited:
845 files.append(p)
865 files.append(p)
846 continue
866 continue
847 m = matchmod.match(
867 m = matchmod.match(
848 self._root, b'', [], [pattern], warn=self._ui.warn
868 self._root, b'', [], [pattern], warn=self._ui.warn
849 )
869 )
850 if m(f):
870 if m(f):
851 return (i, lineno, line)
871 return (i, lineno, line)
852 visited.add(i)
872 visited.add(i)
853 return (None, -1, b"")
873 return (None, -1, b"")
854
874
855 def _walkexplicit(self, match, subrepos):
875 def _walkexplicit(self, match, subrepos):
856 """Get stat data about the files explicitly specified by match.
876 """Get stat data about the files explicitly specified by match.
857
877
858 Return a triple (results, dirsfound, dirsnotfound).
878 Return a triple (results, dirsfound, dirsnotfound).
859 - results is a mapping from filename to stat result. It also contains
879 - results is a mapping from filename to stat result. It also contains
860 listings mapping subrepos and .hg to None.
880 listings mapping subrepos and .hg to None.
861 - dirsfound is a list of files found to be directories.
881 - dirsfound is a list of files found to be directories.
862 - dirsnotfound is a list of files that the dirstate thinks are
882 - dirsnotfound is a list of files that the dirstate thinks are
863 directories and that were not found."""
883 directories and that were not found."""
864
884
865 def badtype(mode):
885 def badtype(mode):
866 kind = _(b'unknown')
886 kind = _(b'unknown')
867 if stat.S_ISCHR(mode):
887 if stat.S_ISCHR(mode):
868 kind = _(b'character device')
888 kind = _(b'character device')
869 elif stat.S_ISBLK(mode):
889 elif stat.S_ISBLK(mode):
870 kind = _(b'block device')
890 kind = _(b'block device')
871 elif stat.S_ISFIFO(mode):
891 elif stat.S_ISFIFO(mode):
872 kind = _(b'fifo')
892 kind = _(b'fifo')
873 elif stat.S_ISSOCK(mode):
893 elif stat.S_ISSOCK(mode):
874 kind = _(b'socket')
894 kind = _(b'socket')
875 elif stat.S_ISDIR(mode):
895 elif stat.S_ISDIR(mode):
876 kind = _(b'directory')
896 kind = _(b'directory')
877 return _(b'unsupported file type (type is %s)') % kind
897 return _(b'unsupported file type (type is %s)') % kind
878
898
879 badfn = match.bad
899 badfn = match.bad
880 dmap = self._map
900 dmap = self._map
881 lstat = os.lstat
901 lstat = os.lstat
882 getkind = stat.S_IFMT
902 getkind = stat.S_IFMT
883 dirkind = stat.S_IFDIR
903 dirkind = stat.S_IFDIR
884 regkind = stat.S_IFREG
904 regkind = stat.S_IFREG
885 lnkkind = stat.S_IFLNK
905 lnkkind = stat.S_IFLNK
886 join = self._join
906 join = self._join
887 dirsfound = []
907 dirsfound = []
888 foundadd = dirsfound.append
908 foundadd = dirsfound.append
889 dirsnotfound = []
909 dirsnotfound = []
890 notfoundadd = dirsnotfound.append
910 notfoundadd = dirsnotfound.append
891
911
892 if not match.isexact() and self._checkcase:
912 if not match.isexact() and self._checkcase:
893 normalize = self._normalize
913 normalize = self._normalize
894 else:
914 else:
895 normalize = None
915 normalize = None
896
916
897 files = sorted(match.files())
917 files = sorted(match.files())
898 subrepos.sort()
918 subrepos.sort()
899 i, j = 0, 0
919 i, j = 0, 0
900 while i < len(files) and j < len(subrepos):
920 while i < len(files) and j < len(subrepos):
901 subpath = subrepos[j] + b"/"
921 subpath = subrepos[j] + b"/"
902 if files[i] < subpath:
922 if files[i] < subpath:
903 i += 1
923 i += 1
904 continue
924 continue
905 while i < len(files) and files[i].startswith(subpath):
925 while i < len(files) and files[i].startswith(subpath):
906 del files[i]
926 del files[i]
907 j += 1
927 j += 1
908
928
909 if not files or b'' in files:
929 if not files or b'' in files:
910 files = [b'']
930 files = [b'']
911 # constructing the foldmap is expensive, so don't do it for the
931 # constructing the foldmap is expensive, so don't do it for the
912 # common case where files is ['']
932 # common case where files is ['']
913 normalize = None
933 normalize = None
914 results = dict.fromkeys(subrepos)
934 results = dict.fromkeys(subrepos)
915 results[b'.hg'] = None
935 results[b'.hg'] = None
916
936
917 for ff in files:
937 for ff in files:
918 if normalize:
938 if normalize:
919 nf = normalize(ff, False, True)
939 nf = normalize(ff, False, True)
920 else:
940 else:
921 nf = ff
941 nf = ff
922 if nf in results:
942 if nf in results:
923 continue
943 continue
924
944
925 try:
945 try:
926 st = lstat(join(nf))
946 st = lstat(join(nf))
927 kind = getkind(st.st_mode)
947 kind = getkind(st.st_mode)
928 if kind == dirkind:
948 if kind == dirkind:
929 if nf in dmap:
949 if nf in dmap:
930 # file replaced by dir on disk but still in dirstate
950 # file replaced by dir on disk but still in dirstate
931 results[nf] = None
951 results[nf] = None
932 foundadd((nf, ff))
952 foundadd((nf, ff))
933 elif kind == regkind or kind == lnkkind:
953 elif kind == regkind or kind == lnkkind:
934 results[nf] = st
954 results[nf] = st
935 else:
955 else:
936 badfn(ff, badtype(kind))
956 badfn(ff, badtype(kind))
937 if nf in dmap:
957 if nf in dmap:
938 results[nf] = None
958 results[nf] = None
939 except OSError as inst: # nf not found on disk - it is dirstate only
959 except OSError as inst: # nf not found on disk - it is dirstate only
940 if nf in dmap: # does it exactly match a missing file?
960 if nf in dmap: # does it exactly match a missing file?
941 results[nf] = None
961 results[nf] = None
942 else: # does it match a missing directory?
962 else: # does it match a missing directory?
943 if self._map.hasdir(nf):
963 if self._map.hasdir(nf):
944 notfoundadd(nf)
964 notfoundadd(nf)
945 else:
965 else:
946 badfn(ff, encoding.strtolocal(inst.strerror))
966 badfn(ff, encoding.strtolocal(inst.strerror))
947
967
948 # match.files() may contain explicitly-specified paths that shouldn't
968 # match.files() may contain explicitly-specified paths that shouldn't
949 # be taken; drop them from the list of files found. dirsfound/notfound
969 # be taken; drop them from the list of files found. dirsfound/notfound
950 # aren't filtered here because they will be tested later.
970 # aren't filtered here because they will be tested later.
951 if match.anypats():
971 if match.anypats():
952 for f in list(results):
972 for f in list(results):
953 if f == b'.hg' or f in subrepos:
973 if f == b'.hg' or f in subrepos:
954 # keep sentinel to disable further out-of-repo walks
974 # keep sentinel to disable further out-of-repo walks
955 continue
975 continue
956 if not match(f):
976 if not match(f):
957 del results[f]
977 del results[f]
958
978
959 # Case insensitive filesystems cannot rely on lstat() failing to detect
979 # Case insensitive filesystems cannot rely on lstat() failing to detect
960 # a case-only rename. Prune the stat object for any file that does not
980 # a case-only rename. Prune the stat object for any file that does not
961 # match the case in the filesystem, if there are multiple files that
981 # match the case in the filesystem, if there are multiple files that
962 # normalize to the same path.
982 # normalize to the same path.
963 if match.isexact() and self._checkcase:
983 if match.isexact() and self._checkcase:
964 normed = {}
984 normed = {}
965
985
966 for f, st in results.items():
986 for f, st in results.items():
967 if st is None:
987 if st is None:
968 continue
988 continue
969
989
970 nc = util.normcase(f)
990 nc = util.normcase(f)
971 paths = normed.get(nc)
991 paths = normed.get(nc)
972
992
973 if paths is None:
993 if paths is None:
974 paths = set()
994 paths = set()
975 normed[nc] = paths
995 normed[nc] = paths
976
996
977 paths.add(f)
997 paths.add(f)
978
998
979 for norm, paths in normed.items():
999 for norm, paths in normed.items():
980 if len(paths) > 1:
1000 if len(paths) > 1:
981 for path in paths:
1001 for path in paths:
982 folded = self._discoverpath(
1002 folded = self._discoverpath(
983 path, norm, True, None, self._map.dirfoldmap
1003 path, norm, True, None, self._map.dirfoldmap
984 )
1004 )
985 if path != folded:
1005 if path != folded:
986 results[path] = None
1006 results[path] = None
987
1007
988 return results, dirsfound, dirsnotfound
1008 return results, dirsfound, dirsnotfound
989
1009
990 def walk(self, match, subrepos, unknown, ignored, full=True):
1010 def walk(self, match, subrepos, unknown, ignored, full=True):
991 """
1011 """
992 Walk recursively through the directory tree, finding all files
1012 Walk recursively through the directory tree, finding all files
993 matched by match.
1013 matched by match.
994
1014
995 If full is False, maybe skip some known-clean files.
1015 If full is False, maybe skip some known-clean files.
996
1016
997 Return a dict mapping filename to stat-like object (either
1017 Return a dict mapping filename to stat-like object (either
998 mercurial.osutil.stat instance or return value of os.stat()).
1018 mercurial.osutil.stat instance or return value of os.stat()).
999
1019
1000 """
1020 """
1001 # full is a flag that extensions that hook into walk can use -- this
1021 # full is a flag that extensions that hook into walk can use -- this
1002 # implementation doesn't use it at all. This satisfies the contract
1022 # implementation doesn't use it at all. This satisfies the contract
1003 # because we only guarantee a "maybe".
1023 # because we only guarantee a "maybe".
1004
1024
1005 if ignored:
1025 if ignored:
1006 ignore = util.never
1026 ignore = util.never
1007 dirignore = util.never
1027 dirignore = util.never
1008 elif unknown:
1028 elif unknown:
1009 ignore = self._ignore
1029 ignore = self._ignore
1010 dirignore = self._dirignore
1030 dirignore = self._dirignore
1011 else:
1031 else:
1012 # if not unknown and not ignored, drop dir recursion and step 2
1032 # if not unknown and not ignored, drop dir recursion and step 2
1013 ignore = util.always
1033 ignore = util.always
1014 dirignore = util.always
1034 dirignore = util.always
1015
1035
1016 if self._sparsematchfn is not None:
1036 if self._sparsematchfn is not None:
1017 em = matchmod.exact(match.files())
1037 em = matchmod.exact(match.files())
1018 sm = matchmod.unionmatcher([self._sparsematcher, em])
1038 sm = matchmod.unionmatcher([self._sparsematcher, em])
1019 match = matchmod.intersectmatchers(match, sm)
1039 match = matchmod.intersectmatchers(match, sm)
1020
1040
1021 matchfn = match.matchfn
1041 matchfn = match.matchfn
1022 matchalways = match.always()
1042 matchalways = match.always()
1023 matchtdir = match.traversedir
1043 matchtdir = match.traversedir
1024 dmap = self._map
1044 dmap = self._map
1025 listdir = util.listdir
1045 listdir = util.listdir
1026 lstat = os.lstat
1046 lstat = os.lstat
1027 dirkind = stat.S_IFDIR
1047 dirkind = stat.S_IFDIR
1028 regkind = stat.S_IFREG
1048 regkind = stat.S_IFREG
1029 lnkkind = stat.S_IFLNK
1049 lnkkind = stat.S_IFLNK
1030 join = self._join
1050 join = self._join
1031
1051
1032 exact = skipstep3 = False
1052 exact = skipstep3 = False
1033 if match.isexact(): # match.exact
1053 if match.isexact(): # match.exact
1034 exact = True
1054 exact = True
1035 dirignore = util.always # skip step 2
1055 dirignore = util.always # skip step 2
1036 elif match.prefix(): # match.match, no patterns
1056 elif match.prefix(): # match.match, no patterns
1037 skipstep3 = True
1057 skipstep3 = True
1038
1058
1039 if not exact and self._checkcase:
1059 if not exact and self._checkcase:
1040 normalize = self._normalize
1060 normalize = self._normalize
1041 normalizefile = self._normalizefile
1061 normalizefile = self._normalizefile
1042 skipstep3 = False
1062 skipstep3 = False
1043 else:
1063 else:
1044 normalize = self._normalize
1064 normalize = self._normalize
1045 normalizefile = None
1065 normalizefile = None
1046
1066
1047 # step 1: find all explicit files
1067 # step 1: find all explicit files
1048 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1068 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1049 if matchtdir:
1069 if matchtdir:
1050 for d in work:
1070 for d in work:
1051 matchtdir(d[0])
1071 matchtdir(d[0])
1052 for d in dirsnotfound:
1072 for d in dirsnotfound:
1053 matchtdir(d)
1073 matchtdir(d)
1054
1074
1055 skipstep3 = skipstep3 and not (work or dirsnotfound)
1075 skipstep3 = skipstep3 and not (work or dirsnotfound)
1056 work = [d for d in work if not dirignore(d[0])]
1076 work = [d for d in work if not dirignore(d[0])]
1057
1077
1058 # step 2: visit subdirectories
1078 # step 2: visit subdirectories
1059 def traverse(work, alreadynormed):
1079 def traverse(work, alreadynormed):
1060 wadd = work.append
1080 wadd = work.append
1061 while work:
1081 while work:
1062 tracing.counter('dirstate.walk work', len(work))
1082 tracing.counter('dirstate.walk work', len(work))
1063 nd = work.pop()
1083 nd = work.pop()
1064 visitentries = match.visitchildrenset(nd)
1084 visitentries = match.visitchildrenset(nd)
1065 if not visitentries:
1085 if not visitentries:
1066 continue
1086 continue
1067 if visitentries == b'this' or visitentries == b'all':
1087 if visitentries == b'this' or visitentries == b'all':
1068 visitentries = None
1088 visitentries = None
1069 skip = None
1089 skip = None
1070 if nd != b'':
1090 if nd != b'':
1071 skip = b'.hg'
1091 skip = b'.hg'
1072 try:
1092 try:
1073 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1093 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1074 entries = listdir(join(nd), stat=True, skip=skip)
1094 entries = listdir(join(nd), stat=True, skip=skip)
1075 except (PermissionError, FileNotFoundError) as inst:
1095 except (PermissionError, FileNotFoundError) as inst:
1076 match.bad(
1096 match.bad(
1077 self.pathto(nd), encoding.strtolocal(inst.strerror)
1097 self.pathto(nd), encoding.strtolocal(inst.strerror)
1078 )
1098 )
1079 continue
1099 continue
1080 for f, kind, st in entries:
1100 for f, kind, st in entries:
1081 # Some matchers may return files in the visitentries set,
1101 # Some matchers may return files in the visitentries set,
1082 # instead of 'this', if the matcher explicitly mentions them
1102 # instead of 'this', if the matcher explicitly mentions them
1083 # and is not an exactmatcher. This is acceptable; we do not
1103 # and is not an exactmatcher. This is acceptable; we do not
1084 # make any hard assumptions about file-or-directory below
1104 # make any hard assumptions about file-or-directory below
1085 # based on the presence of `f` in visitentries. If
1105 # based on the presence of `f` in visitentries. If
1086 # visitchildrenset returned a set, we can always skip the
1106 # visitchildrenset returned a set, we can always skip the
1087 # entries *not* in the set it provided regardless of whether
1107 # entries *not* in the set it provided regardless of whether
1088 # they're actually a file or a directory.
1108 # they're actually a file or a directory.
1089 if visitentries and f not in visitentries:
1109 if visitentries and f not in visitentries:
1090 continue
1110 continue
1091 if normalizefile:
1111 if normalizefile:
1092 # even though f might be a directory, we're only
1112 # even though f might be a directory, we're only
1093 # interested in comparing it to files currently in the
1113 # interested in comparing it to files currently in the
1094 # dmap -- therefore normalizefile is enough
1114 # dmap -- therefore normalizefile is enough
1095 nf = normalizefile(
1115 nf = normalizefile(
1096 nd and (nd + b"/" + f) or f, True, True
1116 nd and (nd + b"/" + f) or f, True, True
1097 )
1117 )
1098 else:
1118 else:
1099 nf = nd and (nd + b"/" + f) or f
1119 nf = nd and (nd + b"/" + f) or f
1100 if nf not in results:
1120 if nf not in results:
1101 if kind == dirkind:
1121 if kind == dirkind:
1102 if not ignore(nf):
1122 if not ignore(nf):
1103 if matchtdir:
1123 if matchtdir:
1104 matchtdir(nf)
1124 matchtdir(nf)
1105 wadd(nf)
1125 wadd(nf)
1106 if nf in dmap and (matchalways or matchfn(nf)):
1126 if nf in dmap and (matchalways or matchfn(nf)):
1107 results[nf] = None
1127 results[nf] = None
1108 elif kind == regkind or kind == lnkkind:
1128 elif kind == regkind or kind == lnkkind:
1109 if nf in dmap:
1129 if nf in dmap:
1110 if matchalways or matchfn(nf):
1130 if matchalways or matchfn(nf):
1111 results[nf] = st
1131 results[nf] = st
1112 elif (matchalways or matchfn(nf)) and not ignore(
1132 elif (matchalways or matchfn(nf)) and not ignore(
1113 nf
1133 nf
1114 ):
1134 ):
1115 # unknown file -- normalize if necessary
1135 # unknown file -- normalize if necessary
1116 if not alreadynormed:
1136 if not alreadynormed:
1117 nf = normalize(nf, False, True)
1137 nf = normalize(nf, False, True)
1118 results[nf] = st
1138 results[nf] = st
1119 elif nf in dmap and (matchalways or matchfn(nf)):
1139 elif nf in dmap and (matchalways or matchfn(nf)):
1120 results[nf] = None
1140 results[nf] = None
1121
1141
1122 for nd, d in work:
1142 for nd, d in work:
1123 # alreadynormed means that processwork doesn't have to do any
1143 # alreadynormed means that processwork doesn't have to do any
1124 # expensive directory normalization
1144 # expensive directory normalization
1125 alreadynormed = not normalize or nd == d
1145 alreadynormed = not normalize or nd == d
1126 traverse([d], alreadynormed)
1146 traverse([d], alreadynormed)
1127
1147
1128 for s in subrepos:
1148 for s in subrepos:
1129 del results[s]
1149 del results[s]
1130 del results[b'.hg']
1150 del results[b'.hg']
1131
1151
1132 # step 3: visit remaining files from dmap
1152 # step 3: visit remaining files from dmap
1133 if not skipstep3 and not exact:
1153 if not skipstep3 and not exact:
1134 # If a dmap file is not in results yet, it was either
1154 # If a dmap file is not in results yet, it was either
1135 # a) not matching matchfn b) ignored, c) missing, or d) under a
1155 # a) not matching matchfn b) ignored, c) missing, or d) under a
1136 # symlink directory.
1156 # symlink directory.
1137 if not results and matchalways:
1157 if not results and matchalways:
1138 visit = [f for f in dmap]
1158 visit = [f for f in dmap]
1139 else:
1159 else:
1140 visit = [f for f in dmap if f not in results and matchfn(f)]
1160 visit = [f for f in dmap if f not in results and matchfn(f)]
1141 visit.sort()
1161 visit.sort()
1142
1162
1143 if unknown:
1163 if unknown:
1144 # unknown == True means we walked all dirs under the roots
1164 # unknown == True means we walked all dirs under the roots
1145 # that wasn't ignored, and everything that matched was stat'ed
1165 # that wasn't ignored, and everything that matched was stat'ed
1146 # and is already in results.
1166 # and is already in results.
1147 # The rest must thus be ignored or under a symlink.
1167 # The rest must thus be ignored or under a symlink.
1148 audit_path = pathutil.pathauditor(self._root, cached=True)
1168 audit_path = pathutil.pathauditor(self._root, cached=True)
1149
1169
1150 for nf in iter(visit):
1170 for nf in iter(visit):
1151 # If a stat for the same file was already added with a
1171 # If a stat for the same file was already added with a
1152 # different case, don't add one for this, since that would
1172 # different case, don't add one for this, since that would
1153 # make it appear as if the file exists under both names
1173 # make it appear as if the file exists under both names
1154 # on disk.
1174 # on disk.
1155 if (
1175 if (
1156 normalizefile
1176 normalizefile
1157 and normalizefile(nf, True, True) in results
1177 and normalizefile(nf, True, True) in results
1158 ):
1178 ):
1159 results[nf] = None
1179 results[nf] = None
1160 # Report ignored items in the dmap as long as they are not
1180 # Report ignored items in the dmap as long as they are not
1161 # under a symlink directory.
1181 # under a symlink directory.
1162 elif audit_path.check(nf):
1182 elif audit_path.check(nf):
1163 try:
1183 try:
1164 results[nf] = lstat(join(nf))
1184 results[nf] = lstat(join(nf))
1165 # file was just ignored, no links, and exists
1185 # file was just ignored, no links, and exists
1166 except OSError:
1186 except OSError:
1167 # file doesn't exist
1187 # file doesn't exist
1168 results[nf] = None
1188 results[nf] = None
1169 else:
1189 else:
1170 # It's either missing or under a symlink directory
1190 # It's either missing or under a symlink directory
1171 # which we in this case report as missing
1191 # which we in this case report as missing
1172 results[nf] = None
1192 results[nf] = None
1173 else:
1193 else:
1174 # We may not have walked the full directory tree above,
1194 # We may not have walked the full directory tree above,
1175 # so stat and check everything we missed.
1195 # so stat and check everything we missed.
1176 iv = iter(visit)
1196 iv = iter(visit)
1177 for st in util.statfiles([join(i) for i in visit]):
1197 for st in util.statfiles([join(i) for i in visit]):
1178 results[next(iv)] = st
1198 results[next(iv)] = st
1179 return results
1199 return results
1180
1200
1181 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1201 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1182 if self._sparsematchfn is not None:
1202 if self._sparsematchfn is not None:
1183 em = matchmod.exact(matcher.files())
1203 em = matchmod.exact(matcher.files())
1184 sm = matchmod.unionmatcher([self._sparsematcher, em])
1204 sm = matchmod.unionmatcher([self._sparsematcher, em])
1185 matcher = matchmod.intersectmatchers(matcher, sm)
1205 matcher = matchmod.intersectmatchers(matcher, sm)
1186 # Force Rayon (Rust parallelism library) to respect the number of
1206 # Force Rayon (Rust parallelism library) to respect the number of
1187 # workers. This is a temporary workaround until Rust code knows
1207 # workers. This is a temporary workaround until Rust code knows
1188 # how to read the config file.
1208 # how to read the config file.
1189 numcpus = self._ui.configint(b"worker", b"numcpus")
1209 numcpus = self._ui.configint(b"worker", b"numcpus")
1190 if numcpus is not None:
1210 if numcpus is not None:
1191 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1211 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1192
1212
1193 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1213 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1194 if not workers_enabled:
1214 if not workers_enabled:
1195 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1215 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1196
1216
1197 (
1217 (
1198 lookup,
1218 lookup,
1199 modified,
1219 modified,
1200 added,
1220 added,
1201 removed,
1221 removed,
1202 deleted,
1222 deleted,
1203 clean,
1223 clean,
1204 ignored,
1224 ignored,
1205 unknown,
1225 unknown,
1206 warnings,
1226 warnings,
1207 bad,
1227 bad,
1208 traversed,
1228 traversed,
1209 dirty,
1229 dirty,
1210 ) = rustmod.status(
1230 ) = rustmod.status(
1211 self._map._map,
1231 self._map._map,
1212 matcher,
1232 matcher,
1213 self._rootdir,
1233 self._rootdir,
1214 self._ignorefiles(),
1234 self._ignorefiles(),
1215 self._checkexec,
1235 self._checkexec,
1216 bool(list_clean),
1236 bool(list_clean),
1217 bool(list_ignored),
1237 bool(list_ignored),
1218 bool(list_unknown),
1238 bool(list_unknown),
1219 bool(matcher.traversedir),
1239 bool(matcher.traversedir),
1220 )
1240 )
1221
1241
1222 self._dirty |= dirty
1242 self._dirty |= dirty
1223
1243
1224 if matcher.traversedir:
1244 if matcher.traversedir:
1225 for dir in traversed:
1245 for dir in traversed:
1226 matcher.traversedir(dir)
1246 matcher.traversedir(dir)
1227
1247
1228 if self._ui.warn:
1248 if self._ui.warn:
1229 for item in warnings:
1249 for item in warnings:
1230 if isinstance(item, tuple):
1250 if isinstance(item, tuple):
1231 file_path, syntax = item
1251 file_path, syntax = item
1232 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1252 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1233 file_path,
1253 file_path,
1234 syntax,
1254 syntax,
1235 )
1255 )
1236 self._ui.warn(msg)
1256 self._ui.warn(msg)
1237 else:
1257 else:
1238 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1258 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1239 self._ui.warn(
1259 self._ui.warn(
1240 msg
1260 msg
1241 % (
1261 % (
1242 pathutil.canonpath(
1262 pathutil.canonpath(
1243 self._rootdir, self._rootdir, item
1263 self._rootdir, self._rootdir, item
1244 ),
1264 ),
1245 b"No such file or directory",
1265 b"No such file or directory",
1246 )
1266 )
1247 )
1267 )
1248
1268
1249 for (fn, message) in bad:
1269 for (fn, message) in bad:
1250 matcher.bad(fn, encoding.strtolocal(message))
1270 matcher.bad(fn, encoding.strtolocal(message))
1251
1271
1252 status = scmutil.status(
1272 status = scmutil.status(
1253 modified=modified,
1273 modified=modified,
1254 added=added,
1274 added=added,
1255 removed=removed,
1275 removed=removed,
1256 deleted=deleted,
1276 deleted=deleted,
1257 unknown=unknown,
1277 unknown=unknown,
1258 ignored=ignored,
1278 ignored=ignored,
1259 clean=clean,
1279 clean=clean,
1260 )
1280 )
1261 return (lookup, status)
1281 return (lookup, status)
1262
1282
1263 def status(self, match, subrepos, ignored, clean, unknown):
1283 def status(self, match, subrepos, ignored, clean, unknown):
1264 """Determine the status of the working copy relative to the
1284 """Determine the status of the working copy relative to the
1265 dirstate and return a pair of (unsure, status), where status is of type
1285 dirstate and return a pair of (unsure, status), where status is of type
1266 scmutil.status and:
1286 scmutil.status and:
1267
1287
1268 unsure:
1288 unsure:
1269 files that might have been modified since the dirstate was
1289 files that might have been modified since the dirstate was
1270 written, but need to be read to be sure (size is the same
1290 written, but need to be read to be sure (size is the same
1271 but mtime differs)
1291 but mtime differs)
1272 status.modified:
1292 status.modified:
1273 files that have definitely been modified since the dirstate
1293 files that have definitely been modified since the dirstate
1274 was written (different size or mode)
1294 was written (different size or mode)
1275 status.clean:
1295 status.clean:
1276 files that have definitely not been modified since the
1296 files that have definitely not been modified since the
1277 dirstate was written
1297 dirstate was written
1278 """
1298 """
1279 listignored, listclean, listunknown = ignored, clean, unknown
1299 listignored, listclean, listunknown = ignored, clean, unknown
1280 lookup, modified, added, unknown, ignored = [], [], [], [], []
1300 lookup, modified, added, unknown, ignored = [], [], [], [], []
1281 removed, deleted, clean = [], [], []
1301 removed, deleted, clean = [], [], []
1282
1302
1283 dmap = self._map
1303 dmap = self._map
1284 dmap.preload()
1304 dmap.preload()
1285
1305
1286 use_rust = True
1306 use_rust = True
1287
1307
1288 allowed_matchers = (
1308 allowed_matchers = (
1289 matchmod.alwaysmatcher,
1309 matchmod.alwaysmatcher,
1290 matchmod.differencematcher,
1310 matchmod.differencematcher,
1291 matchmod.exactmatcher,
1311 matchmod.exactmatcher,
1292 matchmod.includematcher,
1312 matchmod.includematcher,
1293 matchmod.intersectionmatcher,
1313 matchmod.intersectionmatcher,
1294 matchmod.nevermatcher,
1314 matchmod.nevermatcher,
1295 matchmod.unionmatcher,
1315 matchmod.unionmatcher,
1296 )
1316 )
1297
1317
1298 if rustmod is None:
1318 if rustmod is None:
1299 use_rust = False
1319 use_rust = False
1300 elif self._checkcase:
1320 elif self._checkcase:
1301 # Case-insensitive filesystems are not handled yet
1321 # Case-insensitive filesystems are not handled yet
1302 use_rust = False
1322 use_rust = False
1303 elif subrepos:
1323 elif subrepos:
1304 use_rust = False
1324 use_rust = False
1305 elif not isinstance(match, allowed_matchers):
1325 elif not isinstance(match, allowed_matchers):
1306 # Some matchers have yet to be implemented
1326 # Some matchers have yet to be implemented
1307 use_rust = False
1327 use_rust = False
1308
1328
1309 # Get the time from the filesystem so we can disambiguate files that
1329 # Get the time from the filesystem so we can disambiguate files that
1310 # appear modified in the present or future.
1330 # appear modified in the present or future.
1311 try:
1331 try:
1312 mtime_boundary = timestamp.get_fs_now(self._opener)
1332 mtime_boundary = timestamp.get_fs_now(self._opener)
1313 except OSError:
1333 except OSError:
1314 # In largefiles or readonly context
1334 # In largefiles or readonly context
1315 mtime_boundary = None
1335 mtime_boundary = None
1316
1336
1317 if use_rust:
1337 if use_rust:
1318 try:
1338 try:
1319 res = self._rust_status(
1339 res = self._rust_status(
1320 match, listclean, listignored, listunknown
1340 match, listclean, listignored, listunknown
1321 )
1341 )
1322 return res + (mtime_boundary,)
1342 return res + (mtime_boundary,)
1323 except rustmod.FallbackError:
1343 except rustmod.FallbackError:
1324 pass
1344 pass
1325
1345
1326 def noop(f):
1346 def noop(f):
1327 pass
1347 pass
1328
1348
1329 dcontains = dmap.__contains__
1349 dcontains = dmap.__contains__
1330 dget = dmap.__getitem__
1350 dget = dmap.__getitem__
1331 ladd = lookup.append # aka "unsure"
1351 ladd = lookup.append # aka "unsure"
1332 madd = modified.append
1352 madd = modified.append
1333 aadd = added.append
1353 aadd = added.append
1334 uadd = unknown.append if listunknown else noop
1354 uadd = unknown.append if listunknown else noop
1335 iadd = ignored.append if listignored else noop
1355 iadd = ignored.append if listignored else noop
1336 radd = removed.append
1356 radd = removed.append
1337 dadd = deleted.append
1357 dadd = deleted.append
1338 cadd = clean.append if listclean else noop
1358 cadd = clean.append if listclean else noop
1339 mexact = match.exact
1359 mexact = match.exact
1340 dirignore = self._dirignore
1360 dirignore = self._dirignore
1341 checkexec = self._checkexec
1361 checkexec = self._checkexec
1342 checklink = self._checklink
1362 checklink = self._checklink
1343 copymap = self._map.copymap
1363 copymap = self._map.copymap
1344
1364
1345 # We need to do full walks when either
1365 # We need to do full walks when either
1346 # - we're listing all clean files, or
1366 # - we're listing all clean files, or
1347 # - match.traversedir does something, because match.traversedir should
1367 # - match.traversedir does something, because match.traversedir should
1348 # be called for every dir in the working dir
1368 # be called for every dir in the working dir
1349 full = listclean or match.traversedir is not None
1369 full = listclean or match.traversedir is not None
1350 for fn, st in self.walk(
1370 for fn, st in self.walk(
1351 match, subrepos, listunknown, listignored, full=full
1371 match, subrepos, listunknown, listignored, full=full
1352 ).items():
1372 ).items():
1353 if not dcontains(fn):
1373 if not dcontains(fn):
1354 if (listignored or mexact(fn)) and dirignore(fn):
1374 if (listignored or mexact(fn)) and dirignore(fn):
1355 if listignored:
1375 if listignored:
1356 iadd(fn)
1376 iadd(fn)
1357 else:
1377 else:
1358 uadd(fn)
1378 uadd(fn)
1359 continue
1379 continue
1360
1380
1361 t = dget(fn)
1381 t = dget(fn)
1362 mode = t.mode
1382 mode = t.mode
1363 size = t.size
1383 size = t.size
1364
1384
1365 if not st and t.tracked:
1385 if not st and t.tracked:
1366 dadd(fn)
1386 dadd(fn)
1367 elif t.p2_info:
1387 elif t.p2_info:
1368 madd(fn)
1388 madd(fn)
1369 elif t.added:
1389 elif t.added:
1370 aadd(fn)
1390 aadd(fn)
1371 elif t.removed:
1391 elif t.removed:
1372 radd(fn)
1392 radd(fn)
1373 elif t.tracked:
1393 elif t.tracked:
1374 if not checklink and t.has_fallback_symlink:
1394 if not checklink and t.has_fallback_symlink:
1375 # If the file system does not support symlink, the mode
1395 # If the file system does not support symlink, the mode
1376 # might not be correctly stored in the dirstate, so do not
1396 # might not be correctly stored in the dirstate, so do not
1377 # trust it.
1397 # trust it.
1378 ladd(fn)
1398 ladd(fn)
1379 elif not checkexec and t.has_fallback_exec:
1399 elif not checkexec and t.has_fallback_exec:
1380 # If the file system does not support exec bits, the mode
1400 # If the file system does not support exec bits, the mode
1381 # might not be correctly stored in the dirstate, so do not
1401 # might not be correctly stored in the dirstate, so do not
1382 # trust it.
1402 # trust it.
1383 ladd(fn)
1403 ladd(fn)
1384 elif (
1404 elif (
1385 size >= 0
1405 size >= 0
1386 and (
1406 and (
1387 (size != st.st_size and size != st.st_size & _rangemask)
1407 (size != st.st_size and size != st.st_size & _rangemask)
1388 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1408 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1389 )
1409 )
1390 or fn in copymap
1410 or fn in copymap
1391 ):
1411 ):
1392 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1412 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1393 # issue6456: Size returned may be longer due to
1413 # issue6456: Size returned may be longer due to
1394 # encryption on EXT-4 fscrypt, undecided.
1414 # encryption on EXT-4 fscrypt, undecided.
1395 ladd(fn)
1415 ladd(fn)
1396 else:
1416 else:
1397 madd(fn)
1417 madd(fn)
1398 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1418 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1399 # There might be a change in the future if for example the
1419 # There might be a change in the future if for example the
1400 # internal clock is off, but this is a case where the issues
1420 # internal clock is off, but this is a case where the issues
1401 # the user would face would be a lot worse and there is
1421 # the user would face would be a lot worse and there is
1402 # nothing we can really do.
1422 # nothing we can really do.
1403 ladd(fn)
1423 ladd(fn)
1404 elif listclean:
1424 elif listclean:
1405 cadd(fn)
1425 cadd(fn)
1406 status = scmutil.status(
1426 status = scmutil.status(
1407 modified, added, removed, deleted, unknown, ignored, clean
1427 modified, added, removed, deleted, unknown, ignored, clean
1408 )
1428 )
1409 return (lookup, status, mtime_boundary)
1429 return (lookup, status, mtime_boundary)
1410
1430
1411 def matches(self, match):
1431 def matches(self, match):
1412 """
1432 """
1413 return files in the dirstate (in whatever state) filtered by match
1433 return files in the dirstate (in whatever state) filtered by match
1414 """
1434 """
1415 dmap = self._map
1435 dmap = self._map
1416 if rustmod is not None:
1436 if rustmod is not None:
1417 dmap = self._map._map
1437 dmap = self._map._map
1418
1438
1419 if match.always():
1439 if match.always():
1420 return dmap.keys()
1440 return dmap.keys()
1421 files = match.files()
1441 files = match.files()
1422 if match.isexact():
1442 if match.isexact():
1423 # fast path -- filter the other way around, since typically files is
1443 # fast path -- filter the other way around, since typically files is
1424 # much smaller than dmap
1444 # much smaller than dmap
1425 return [f for f in files if f in dmap]
1445 return [f for f in files if f in dmap]
1426 if match.prefix() and all(fn in dmap for fn in files):
1446 if match.prefix() and all(fn in dmap for fn in files):
1427 # fast path -- all the values are known to be files, so just return
1447 # fast path -- all the values are known to be files, so just return
1428 # that
1448 # that
1429 return list(files)
1449 return list(files)
1430 return [f for f in dmap if match(f)]
1450 return [f for f in dmap if match(f)]
1431
1451
1432 def _actualfilename(self, tr):
1452 def _actualfilename(self, tr):
1433 if tr:
1453 if tr:
1434 return self._pendingfilename
1454 return self._pendingfilename
1435 else:
1455 else:
1436 return self._filename
1456 return self._filename
1437
1457
1438 def data_backup_filename(self, backupname):
1458 def data_backup_filename(self, backupname):
1439 if not self._use_dirstate_v2:
1459 if not self._use_dirstate_v2:
1440 return None
1460 return None
1441 return backupname + b'.v2-data'
1461 return backupname + b'.v2-data'
1442
1462
1443 def _new_backup_data_filename(self, backupname):
1463 def _new_backup_data_filename(self, backupname):
1444 """return a filename to backup a data-file or None"""
1464 """return a filename to backup a data-file or None"""
1445 if not self._use_dirstate_v2:
1465 if not self._use_dirstate_v2:
1446 return None
1466 return None
1447 data_filename = self._map.docket.data_filename()
1467 data_filename = self._map.docket.data_filename()
1448 return data_filename, self.data_backup_filename(backupname)
1468 return data_filename, self.data_backup_filename(backupname)
1449
1469
1450 def backup_data_file(self, backupname):
1470 def backup_data_file(self, backupname):
1451 if not self._use_dirstate_v2:
1471 if not self._use_dirstate_v2:
1452 return None
1472 return None
1453 docket = docketmod.DirstateDocket.parse(
1473 docket = docketmod.DirstateDocket.parse(
1454 self._opener.read(backupname),
1474 self._opener.read(backupname),
1455 self._nodeconstants,
1475 self._nodeconstants,
1456 )
1476 )
1457 return self.data_backup_filename(backupname), docket.data_filename()
1477 return self.data_backup_filename(backupname), docket.data_filename()
1458
1478
1459 def savebackup(self, tr, backupname):
1479 def savebackup(self, tr, backupname):
1460 '''Save current dirstate into backup file'''
1480 '''Save current dirstate into backup file'''
1461 filename = self._actualfilename(tr)
1481 filename = self._actualfilename(tr)
1462 assert backupname != filename
1482 assert backupname != filename
1463
1483
1464 # use '_writedirstate' instead of 'write' to write changes certainly,
1484 # use '_writedirstate' instead of 'write' to write changes certainly,
1465 # because the latter omits writing out if transaction is running.
1485 # because the latter omits writing out if transaction is running.
1466 # output file will be used to create backup of dirstate at this point.
1486 # output file will be used to create backup of dirstate at this point.
1467 if self._dirty or not self._opener.exists(filename):
1487 if self._dirty or not self._opener.exists(filename):
1468 self._writedirstate(
1488 self._writedirstate(
1469 tr,
1489 tr,
1470 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1490 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1471 )
1491 )
1472
1492
1473 if tr:
1493 if tr:
1474 # ensure that subsequent tr.writepending returns True for
1494 # ensure that subsequent tr.writepending returns True for
1475 # changes written out above, even if dirstate is never
1495 # changes written out above, even if dirstate is never
1476 # changed after this
1496 # changed after this
1477 tr.addfilegenerator(
1497 tr.addfilegenerator(
1478 b'dirstate-1-main',
1498 b'dirstate-1-main',
1479 (self._filename,),
1499 (self._filename,),
1480 lambda f: self._writedirstate(tr, f),
1500 lambda f: self._writedirstate(tr, f),
1481 location=b'plain',
1501 location=b'plain',
1482 post_finalize=True,
1502 post_finalize=True,
1483 )
1503 )
1484
1504
1485 # ensure that pending file written above is unlinked at
1505 # ensure that pending file written above is unlinked at
1486 # failure, even if tr.writepending isn't invoked until the
1506 # failure, even if tr.writepending isn't invoked until the
1487 # end of this transaction
1507 # end of this transaction
1488 tr.registertmp(filename, location=b'plain')
1508 tr.registertmp(filename, location=b'plain')
1489
1509
1490 self._opener.tryunlink(backupname)
1510 self._opener.tryunlink(backupname)
1491 # hardlink backup is okay because _writedirstate is always called
1511 # hardlink backup is okay because _writedirstate is always called
1492 # with an "atomictemp=True" file.
1512 # with an "atomictemp=True" file.
1493 util.copyfile(
1513 util.copyfile(
1494 self._opener.join(filename),
1514 self._opener.join(filename),
1495 self._opener.join(backupname),
1515 self._opener.join(backupname),
1496 hardlink=True,
1516 hardlink=True,
1497 )
1517 )
1498 data_pair = self._new_backup_data_filename(backupname)
1518 data_pair = self._new_backup_data_filename(backupname)
1499 if data_pair is not None:
1519 if data_pair is not None:
1500 data_filename, bck_data_filename = data_pair
1520 data_filename, bck_data_filename = data_pair
1501 util.copyfile(
1521 util.copyfile(
1502 self._opener.join(data_filename),
1522 self._opener.join(data_filename),
1503 self._opener.join(bck_data_filename),
1523 self._opener.join(bck_data_filename),
1504 hardlink=True,
1524 hardlink=True,
1505 )
1525 )
1506 if tr is not None:
1526 if tr is not None:
1507 # ensure that pending file written above is unlinked at
1527 # ensure that pending file written above is unlinked at
1508 # failure, even if tr.writepending isn't invoked until the
1528 # failure, even if tr.writepending isn't invoked until the
1509 # end of this transaction
1529 # end of this transaction
1510 tr.registertmp(bck_data_filename, location=b'plain')
1530 tr.registertmp(bck_data_filename, location=b'plain')
1511
1531
1512 def restorebackup(self, tr, backupname):
1532 def restorebackup(self, tr, backupname):
1513 '''Restore dirstate by backup file'''
1533 '''Restore dirstate by backup file'''
1514 # this "invalidate()" prevents "wlock.release()" from writing
1534 # this "invalidate()" prevents "wlock.release()" from writing
1515 # changes of dirstate out after restoring from backup file
1535 # changes of dirstate out after restoring from backup file
1516 self.invalidate()
1536 self.invalidate()
1517 filename = self._actualfilename(tr)
1537 filename = self._actualfilename(tr)
1518 o = self._opener
1538 o = self._opener
1519 data_pair = self.backup_data_file(backupname)
1539 data_pair = self.backup_data_file(backupname)
1520 if util.samefile(o.join(backupname), o.join(filename)):
1540 if util.samefile(o.join(backupname), o.join(filename)):
1521 o.unlink(backupname)
1541 o.unlink(backupname)
1522 else:
1542 else:
1523 o.rename(backupname, filename, checkambig=True)
1543 o.rename(backupname, filename, checkambig=True)
1524
1544
1525 if data_pair is not None:
1545 if data_pair is not None:
1526 data_backup, target = data_pair
1546 data_backup, target = data_pair
1527 if o.exists(target) and util.samefile(
1547 if o.exists(target) and util.samefile(
1528 o.join(data_backup), o.join(target)
1548 o.join(data_backup), o.join(target)
1529 ):
1549 ):
1530 o.unlink(data_backup)
1550 o.unlink(data_backup)
1531 else:
1551 else:
1532 o.rename(data_backup, target, checkambig=True)
1552 o.rename(data_backup, target, checkambig=True)
1533
1553
1534 def clearbackup(self, tr, backupname):
1554 def clearbackup(self, tr, backupname):
1535 '''Clear backup file'''
1555 '''Clear backup file'''
1536 o = self._opener
1556 o = self._opener
1537 data_backup = self.backup_data_file(backupname)
1557 data_backup = self.backup_data_file(backupname)
1538 o.unlink(backupname)
1558 o.unlink(backupname)
1539
1559
1540 if data_backup is not None:
1560 if data_backup is not None:
1541 o.unlink(data_backup[0])
1561 o.unlink(data_backup[0])
1542
1562
1543 def verify(self, m1, m2, p1, narrow_matcher=None):
1563 def verify(self, m1, m2, p1, narrow_matcher=None):
1544 """
1564 """
1545 check the dirstate contents against the parent manifest and yield errors
1565 check the dirstate contents against the parent manifest and yield errors
1546 """
1566 """
1547 missing_from_p1 = _(
1567 missing_from_p1 = _(
1548 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1568 b"%s marked as tracked in p1 (%s) but not in manifest1\n"
1549 )
1569 )
1550 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1570 unexpected_in_p1 = _(b"%s marked as added, but also in manifest1\n")
1551 missing_from_ps = _(
1571 missing_from_ps = _(
1552 b"%s marked as modified, but not in either manifest\n"
1572 b"%s marked as modified, but not in either manifest\n"
1553 )
1573 )
1554 missing_from_ds = _(
1574 missing_from_ds = _(
1555 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1575 b"%s in manifest1, but not marked as tracked in p1 (%s)\n"
1556 )
1576 )
1557 for f, entry in self.items():
1577 for f, entry in self.items():
1558 if entry.p1_tracked:
1578 if entry.p1_tracked:
1559 if entry.modified and f not in m1 and f not in m2:
1579 if entry.modified and f not in m1 and f not in m2:
1560 yield missing_from_ps % f
1580 yield missing_from_ps % f
1561 elif f not in m1:
1581 elif f not in m1:
1562 yield missing_from_p1 % (f, node.short(p1))
1582 yield missing_from_p1 % (f, node.short(p1))
1563 if entry.added and f in m1:
1583 if entry.added and f in m1:
1564 yield unexpected_in_p1 % f
1584 yield unexpected_in_p1 % f
1565 for f in m1:
1585 for f in m1:
1566 if narrow_matcher is not None and not narrow_matcher(f):
1586 if narrow_matcher is not None and not narrow_matcher(f):
1567 continue
1587 continue
1568 entry = self.get_entry(f)
1588 entry = self.get_entry(f)
1569 if not entry.p1_tracked:
1589 if not entry.p1_tracked:
1570 yield missing_from_ds % (f, node.short(p1))
1590 yield missing_from_ds % (f, node.short(p1))
General Comments 0
You need to be logged in to leave comments. Login now