##// END OF EJS Templates
dirstate: stop gathering parentfiledata in update_file_p1...
marmoute -
r49209:6becd577 default
parent child Browse files
Show More
@@ -1,1526 +1,1513 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .dirstateutils import (
34 from .dirstateutils import (
35 timestamp,
35 timestamp,
36 )
36 )
37
37
38 from .interfaces import (
38 from .interfaces import (
39 dirstate as intdirstate,
39 dirstate as intdirstate,
40 util as interfaceutil,
40 util as interfaceutil,
41 )
41 )
42
42
43 parsers = policy.importmod('parsers')
43 parsers = policy.importmod('parsers')
44 rustmod = policy.importrust('dirstate')
44 rustmod = policy.importrust('dirstate')
45
45
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46 HAS_FAST_DIRSTATE_V2 = rustmod is not None
47
47
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49 filecache = scmutil.filecache
49 filecache = scmutil.filecache
50 _rangemask = dirstatemap.rangemask
50 _rangemask = dirstatemap.rangemask
51
51
52 DirstateItem = dirstatemap.DirstateItem
52 DirstateItem = dirstatemap.DirstateItem
53
53
54
54
55 class repocache(filecache):
55 class repocache(filecache):
56 """filecache for files in .hg/"""
56 """filecache for files in .hg/"""
57
57
58 def join(self, obj, fname):
58 def join(self, obj, fname):
59 return obj._opener.join(fname)
59 return obj._opener.join(fname)
60
60
61
61
62 class rootcache(filecache):
62 class rootcache(filecache):
63 """filecache for files in the repository root"""
63 """filecache for files in the repository root"""
64
64
65 def join(self, obj, fname):
65 def join(self, obj, fname):
66 return obj._join(fname)
66 return obj._join(fname)
67
67
68
68
69 def requires_parents_change(func):
69 def requires_parents_change(func):
70 def wrap(self, *args, **kwargs):
70 def wrap(self, *args, **kwargs):
71 if not self.pendingparentchange():
71 if not self.pendingparentchange():
72 msg = 'calling `%s` outside of a parentchange context'
72 msg = 'calling `%s` outside of a parentchange context'
73 msg %= func.__name__
73 msg %= func.__name__
74 raise error.ProgrammingError(msg)
74 raise error.ProgrammingError(msg)
75 return func(self, *args, **kwargs)
75 return func(self, *args, **kwargs)
76
76
77 return wrap
77 return wrap
78
78
79
79
80 def requires_no_parents_change(func):
80 def requires_no_parents_change(func):
81 def wrap(self, *args, **kwargs):
81 def wrap(self, *args, **kwargs):
82 if self.pendingparentchange():
82 if self.pendingparentchange():
83 msg = 'calling `%s` inside of a parentchange context'
83 msg = 'calling `%s` inside of a parentchange context'
84 msg %= func.__name__
84 msg %= func.__name__
85 raise error.ProgrammingError(msg)
85 raise error.ProgrammingError(msg)
86 return func(self, *args, **kwargs)
86 return func(self, *args, **kwargs)
87
87
88 return wrap
88 return wrap
89
89
90
90
91 @interfaceutil.implementer(intdirstate.idirstate)
91 @interfaceutil.implementer(intdirstate.idirstate)
92 class dirstate(object):
92 class dirstate(object):
93 def __init__(
93 def __init__(
94 self,
94 self,
95 opener,
95 opener,
96 ui,
96 ui,
97 root,
97 root,
98 validate,
98 validate,
99 sparsematchfn,
99 sparsematchfn,
100 nodeconstants,
100 nodeconstants,
101 use_dirstate_v2,
101 use_dirstate_v2,
102 ):
102 ):
103 """Create a new dirstate object.
103 """Create a new dirstate object.
104
104
105 opener is an open()-like callable that can be used to open the
105 opener is an open()-like callable that can be used to open the
106 dirstate file; root is the root of the directory tracked by
106 dirstate file; root is the root of the directory tracked by
107 the dirstate.
107 the dirstate.
108 """
108 """
109 self._use_dirstate_v2 = use_dirstate_v2
109 self._use_dirstate_v2 = use_dirstate_v2
110 self._nodeconstants = nodeconstants
110 self._nodeconstants = nodeconstants
111 self._opener = opener
111 self._opener = opener
112 self._validate = validate
112 self._validate = validate
113 self._root = root
113 self._root = root
114 self._sparsematchfn = sparsematchfn
114 self._sparsematchfn = sparsematchfn
115 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
115 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
116 # UNC path pointing to root share (issue4557)
116 # UNC path pointing to root share (issue4557)
117 self._rootdir = pathutil.normasprefix(root)
117 self._rootdir = pathutil.normasprefix(root)
118 self._dirty = False
118 self._dirty = False
119 self._lastnormaltime = timestamp.zero()
119 self._lastnormaltime = timestamp.zero()
120 self._ui = ui
120 self._ui = ui
121 self._filecache = {}
121 self._filecache = {}
122 self._parentwriters = 0
122 self._parentwriters = 0
123 self._filename = b'dirstate'
123 self._filename = b'dirstate'
124 self._pendingfilename = b'%s.pending' % self._filename
124 self._pendingfilename = b'%s.pending' % self._filename
125 self._plchangecallbacks = {}
125 self._plchangecallbacks = {}
126 self._origpl = None
126 self._origpl = None
127 self._mapcls = dirstatemap.dirstatemap
127 self._mapcls = dirstatemap.dirstatemap
128 # Access and cache cwd early, so we don't access it for the first time
128 # Access and cache cwd early, so we don't access it for the first time
129 # after a working-copy update caused it to not exist (accessing it then
129 # after a working-copy update caused it to not exist (accessing it then
130 # raises an exception).
130 # raises an exception).
131 self._cwd
131 self._cwd
132
132
133 def prefetch_parents(self):
133 def prefetch_parents(self):
134 """make sure the parents are loaded
134 """make sure the parents are loaded
135
135
136 Used to avoid a race condition.
136 Used to avoid a race condition.
137 """
137 """
138 self._pl
138 self._pl
139
139
140 @contextlib.contextmanager
140 @contextlib.contextmanager
141 def parentchange(self):
141 def parentchange(self):
142 """Context manager for handling dirstate parents.
142 """Context manager for handling dirstate parents.
143
143
144 If an exception occurs in the scope of the context manager,
144 If an exception occurs in the scope of the context manager,
145 the incoherent dirstate won't be written when wlock is
145 the incoherent dirstate won't be written when wlock is
146 released.
146 released.
147 """
147 """
148 self._parentwriters += 1
148 self._parentwriters += 1
149 yield
149 yield
150 # Typically we want the "undo" step of a context manager in a
150 # Typically we want the "undo" step of a context manager in a
151 # finally block so it happens even when an exception
151 # finally block so it happens even when an exception
152 # occurs. In this case, however, we only want to decrement
152 # occurs. In this case, however, we only want to decrement
153 # parentwriters if the code in the with statement exits
153 # parentwriters if the code in the with statement exits
154 # normally, so we don't have a try/finally here on purpose.
154 # normally, so we don't have a try/finally here on purpose.
155 self._parentwriters -= 1
155 self._parentwriters -= 1
156
156
157 def pendingparentchange(self):
157 def pendingparentchange(self):
158 """Returns true if the dirstate is in the middle of a set of changes
158 """Returns true if the dirstate is in the middle of a set of changes
159 that modify the dirstate parent.
159 that modify the dirstate parent.
160 """
160 """
161 return self._parentwriters > 0
161 return self._parentwriters > 0
162
162
163 @propertycache
163 @propertycache
164 def _map(self):
164 def _map(self):
165 """Return the dirstate contents (see documentation for dirstatemap)."""
165 """Return the dirstate contents (see documentation for dirstatemap)."""
166 self._map = self._mapcls(
166 self._map = self._mapcls(
167 self._ui,
167 self._ui,
168 self._opener,
168 self._opener,
169 self._root,
169 self._root,
170 self._nodeconstants,
170 self._nodeconstants,
171 self._use_dirstate_v2,
171 self._use_dirstate_v2,
172 )
172 )
173 return self._map
173 return self._map
174
174
175 @property
175 @property
176 def _sparsematcher(self):
176 def _sparsematcher(self):
177 """The matcher for the sparse checkout.
177 """The matcher for the sparse checkout.
178
178
179 The working directory may not include every file from a manifest. The
179 The working directory may not include every file from a manifest. The
180 matcher obtained by this property will match a path if it is to be
180 matcher obtained by this property will match a path if it is to be
181 included in the working directory.
181 included in the working directory.
182 """
182 """
183 # TODO there is potential to cache this property. For now, the matcher
183 # TODO there is potential to cache this property. For now, the matcher
184 # is resolved on every access. (But the called function does use a
184 # is resolved on every access. (But the called function does use a
185 # cache to keep the lookup fast.)
185 # cache to keep the lookup fast.)
186 return self._sparsematchfn()
186 return self._sparsematchfn()
187
187
188 @repocache(b'branch')
188 @repocache(b'branch')
189 def _branch(self):
189 def _branch(self):
190 try:
190 try:
191 return self._opener.read(b"branch").strip() or b"default"
191 return self._opener.read(b"branch").strip() or b"default"
192 except IOError as inst:
192 except IOError as inst:
193 if inst.errno != errno.ENOENT:
193 if inst.errno != errno.ENOENT:
194 raise
194 raise
195 return b"default"
195 return b"default"
196
196
197 @property
197 @property
198 def _pl(self):
198 def _pl(self):
199 return self._map.parents()
199 return self._map.parents()
200
200
201 def hasdir(self, d):
201 def hasdir(self, d):
202 return self._map.hastrackeddir(d)
202 return self._map.hastrackeddir(d)
203
203
204 @rootcache(b'.hgignore')
204 @rootcache(b'.hgignore')
205 def _ignore(self):
205 def _ignore(self):
206 files = self._ignorefiles()
206 files = self._ignorefiles()
207 if not files:
207 if not files:
208 return matchmod.never()
208 return matchmod.never()
209
209
210 pats = [b'include:%s' % f for f in files]
210 pats = [b'include:%s' % f for f in files]
211 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
211 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
212
212
213 @propertycache
213 @propertycache
214 def _slash(self):
214 def _slash(self):
215 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
215 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
216
216
217 @propertycache
217 @propertycache
218 def _checklink(self):
218 def _checklink(self):
219 return util.checklink(self._root)
219 return util.checklink(self._root)
220
220
221 @propertycache
221 @propertycache
222 def _checkexec(self):
222 def _checkexec(self):
223 return bool(util.checkexec(self._root))
223 return bool(util.checkexec(self._root))
224
224
225 @propertycache
225 @propertycache
226 def _checkcase(self):
226 def _checkcase(self):
227 return not util.fscasesensitive(self._join(b'.hg'))
227 return not util.fscasesensitive(self._join(b'.hg'))
228
228
229 def _join(self, f):
229 def _join(self, f):
230 # much faster than os.path.join()
230 # much faster than os.path.join()
231 # it's safe because f is always a relative path
231 # it's safe because f is always a relative path
232 return self._rootdir + f
232 return self._rootdir + f
233
233
234 def flagfunc(self, buildfallback):
234 def flagfunc(self, buildfallback):
235 """build a callable that returns flags associated with a filename
235 """build a callable that returns flags associated with a filename
236
236
237 The information is extracted from three possible layers:
237 The information is extracted from three possible layers:
238 1. the file system if it supports the information
238 1. the file system if it supports the information
239 2. the "fallback" information stored in the dirstate if any
239 2. the "fallback" information stored in the dirstate if any
240 3. a more expensive mechanism inferring the flags from the parents.
240 3. a more expensive mechanism inferring the flags from the parents.
241 """
241 """
242
242
243 # small hack to cache the result of buildfallback()
243 # small hack to cache the result of buildfallback()
244 fallback_func = []
244 fallback_func = []
245
245
246 def get_flags(x):
246 def get_flags(x):
247 entry = None
247 entry = None
248 fallback_value = None
248 fallback_value = None
249 try:
249 try:
250 st = os.lstat(self._join(x))
250 st = os.lstat(self._join(x))
251 except OSError:
251 except OSError:
252 return b''
252 return b''
253
253
254 if self._checklink:
254 if self._checklink:
255 if util.statislink(st):
255 if util.statislink(st):
256 return b'l'
256 return b'l'
257 else:
257 else:
258 entry = self.get_entry(x)
258 entry = self.get_entry(x)
259 if entry.has_fallback_symlink:
259 if entry.has_fallback_symlink:
260 if entry.fallback_symlink:
260 if entry.fallback_symlink:
261 return b'l'
261 return b'l'
262 else:
262 else:
263 if not fallback_func:
263 if not fallback_func:
264 fallback_func.append(buildfallback())
264 fallback_func.append(buildfallback())
265 fallback_value = fallback_func[0](x)
265 fallback_value = fallback_func[0](x)
266 if b'l' in fallback_value:
266 if b'l' in fallback_value:
267 return b'l'
267 return b'l'
268
268
269 if self._checkexec:
269 if self._checkexec:
270 if util.statisexec(st):
270 if util.statisexec(st):
271 return b'x'
271 return b'x'
272 else:
272 else:
273 if entry is None:
273 if entry is None:
274 entry = self.get_entry(x)
274 entry = self.get_entry(x)
275 if entry.has_fallback_exec:
275 if entry.has_fallback_exec:
276 if entry.fallback_exec:
276 if entry.fallback_exec:
277 return b'x'
277 return b'x'
278 else:
278 else:
279 if fallback_value is None:
279 if fallback_value is None:
280 if not fallback_func:
280 if not fallback_func:
281 fallback_func.append(buildfallback())
281 fallback_func.append(buildfallback())
282 fallback_value = fallback_func[0](x)
282 fallback_value = fallback_func[0](x)
283 if b'x' in fallback_value:
283 if b'x' in fallback_value:
284 return b'x'
284 return b'x'
285 return b''
285 return b''
286
286
287 return get_flags
287 return get_flags
288
288
289 @propertycache
289 @propertycache
290 def _cwd(self):
290 def _cwd(self):
291 # internal config: ui.forcecwd
291 # internal config: ui.forcecwd
292 forcecwd = self._ui.config(b'ui', b'forcecwd')
292 forcecwd = self._ui.config(b'ui', b'forcecwd')
293 if forcecwd:
293 if forcecwd:
294 return forcecwd
294 return forcecwd
295 return encoding.getcwd()
295 return encoding.getcwd()
296
296
297 def getcwd(self):
297 def getcwd(self):
298 """Return the path from which a canonical path is calculated.
298 """Return the path from which a canonical path is calculated.
299
299
300 This path should be used to resolve file patterns or to convert
300 This path should be used to resolve file patterns or to convert
301 canonical paths back to file paths for display. It shouldn't be
301 canonical paths back to file paths for display. It shouldn't be
302 used to get real file paths. Use vfs functions instead.
302 used to get real file paths. Use vfs functions instead.
303 """
303 """
304 cwd = self._cwd
304 cwd = self._cwd
305 if cwd == self._root:
305 if cwd == self._root:
306 return b''
306 return b''
307 # self._root ends with a path separator if self._root is '/' or 'C:\'
307 # self._root ends with a path separator if self._root is '/' or 'C:\'
308 rootsep = self._root
308 rootsep = self._root
309 if not util.endswithsep(rootsep):
309 if not util.endswithsep(rootsep):
310 rootsep += pycompat.ossep
310 rootsep += pycompat.ossep
311 if cwd.startswith(rootsep):
311 if cwd.startswith(rootsep):
312 return cwd[len(rootsep) :]
312 return cwd[len(rootsep) :]
313 else:
313 else:
314 # we're outside the repo. return an absolute path.
314 # we're outside the repo. return an absolute path.
315 return cwd
315 return cwd
316
316
317 def pathto(self, f, cwd=None):
317 def pathto(self, f, cwd=None):
318 if cwd is None:
318 if cwd is None:
319 cwd = self.getcwd()
319 cwd = self.getcwd()
320 path = util.pathto(self._root, cwd, f)
320 path = util.pathto(self._root, cwd, f)
321 if self._slash:
321 if self._slash:
322 return util.pconvert(path)
322 return util.pconvert(path)
323 return path
323 return path
324
324
325 def __getitem__(self, key):
325 def __getitem__(self, key):
326 """Return the current state of key (a filename) in the dirstate.
326 """Return the current state of key (a filename) in the dirstate.
327
327
328 States are:
328 States are:
329 n normal
329 n normal
330 m needs merging
330 m needs merging
331 r marked for removal
331 r marked for removal
332 a marked for addition
332 a marked for addition
333 ? not tracked
333 ? not tracked
334
334
335 XXX The "state" is a bit obscure to be in the "public" API. we should
335 XXX The "state" is a bit obscure to be in the "public" API. we should
336 consider migrating all user of this to going through the dirstate entry
336 consider migrating all user of this to going through the dirstate entry
337 instead.
337 instead.
338 """
338 """
339 msg = b"don't use dirstate[file], use dirstate.get_entry(file)"
339 msg = b"don't use dirstate[file], use dirstate.get_entry(file)"
340 util.nouideprecwarn(msg, b'6.1', stacklevel=2)
340 util.nouideprecwarn(msg, b'6.1', stacklevel=2)
341 entry = self._map.get(key)
341 entry = self._map.get(key)
342 if entry is not None:
342 if entry is not None:
343 return entry.state
343 return entry.state
344 return b'?'
344 return b'?'
345
345
346 def get_entry(self, path):
346 def get_entry(self, path):
347 """return a DirstateItem for the associated path"""
347 """return a DirstateItem for the associated path"""
348 entry = self._map.get(path)
348 entry = self._map.get(path)
349 if entry is None:
349 if entry is None:
350 return DirstateItem()
350 return DirstateItem()
351 return entry
351 return entry
352
352
353 def __contains__(self, key):
353 def __contains__(self, key):
354 return key in self._map
354 return key in self._map
355
355
356 def __iter__(self):
356 def __iter__(self):
357 return iter(sorted(self._map))
357 return iter(sorted(self._map))
358
358
359 def items(self):
359 def items(self):
360 return pycompat.iteritems(self._map)
360 return pycompat.iteritems(self._map)
361
361
362 iteritems = items
362 iteritems = items
363
363
364 def parents(self):
364 def parents(self):
365 return [self._validate(p) for p in self._pl]
365 return [self._validate(p) for p in self._pl]
366
366
367 def p1(self):
367 def p1(self):
368 return self._validate(self._pl[0])
368 return self._validate(self._pl[0])
369
369
370 def p2(self):
370 def p2(self):
371 return self._validate(self._pl[1])
371 return self._validate(self._pl[1])
372
372
373 @property
373 @property
374 def in_merge(self):
374 def in_merge(self):
375 """True if a merge is in progress"""
375 """True if a merge is in progress"""
376 return self._pl[1] != self._nodeconstants.nullid
376 return self._pl[1] != self._nodeconstants.nullid
377
377
378 def branch(self):
378 def branch(self):
379 return encoding.tolocal(self._branch)
379 return encoding.tolocal(self._branch)
380
380
381 def setparents(self, p1, p2=None):
381 def setparents(self, p1, p2=None):
382 """Set dirstate parents to p1 and p2.
382 """Set dirstate parents to p1 and p2.
383
383
384 When moving from two parents to one, "merged" entries a
384 When moving from two parents to one, "merged" entries a
385 adjusted to normal and previous copy records discarded and
385 adjusted to normal and previous copy records discarded and
386 returned by the call.
386 returned by the call.
387
387
388 See localrepo.setparents()
388 See localrepo.setparents()
389 """
389 """
390 if p2 is None:
390 if p2 is None:
391 p2 = self._nodeconstants.nullid
391 p2 = self._nodeconstants.nullid
392 if self._parentwriters == 0:
392 if self._parentwriters == 0:
393 raise ValueError(
393 raise ValueError(
394 b"cannot set dirstate parent outside of "
394 b"cannot set dirstate parent outside of "
395 b"dirstate.parentchange context manager"
395 b"dirstate.parentchange context manager"
396 )
396 )
397
397
398 self._dirty = True
398 self._dirty = True
399 oldp2 = self._pl[1]
399 oldp2 = self._pl[1]
400 if self._origpl is None:
400 if self._origpl is None:
401 self._origpl = self._pl
401 self._origpl = self._pl
402 nullid = self._nodeconstants.nullid
402 nullid = self._nodeconstants.nullid
403 # True if we need to fold p2 related state back to a linear case
403 # True if we need to fold p2 related state back to a linear case
404 fold_p2 = oldp2 != nullid and p2 == nullid
404 fold_p2 = oldp2 != nullid and p2 == nullid
405 return self._map.setparents(p1, p2, fold_p2=fold_p2)
405 return self._map.setparents(p1, p2, fold_p2=fold_p2)
406
406
407 def setbranch(self, branch):
407 def setbranch(self, branch):
408 self.__class__._branch.set(self, encoding.fromlocal(branch))
408 self.__class__._branch.set(self, encoding.fromlocal(branch))
409 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
409 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
410 try:
410 try:
411 f.write(self._branch + b'\n')
411 f.write(self._branch + b'\n')
412 f.close()
412 f.close()
413
413
414 # make sure filecache has the correct stat info for _branch after
414 # make sure filecache has the correct stat info for _branch after
415 # replacing the underlying file
415 # replacing the underlying file
416 ce = self._filecache[b'_branch']
416 ce = self._filecache[b'_branch']
417 if ce:
417 if ce:
418 ce.refresh()
418 ce.refresh()
419 except: # re-raises
419 except: # re-raises
420 f.discard()
420 f.discard()
421 raise
421 raise
422
422
423 def invalidate(self):
423 def invalidate(self):
424 """Causes the next access to reread the dirstate.
424 """Causes the next access to reread the dirstate.
425
425
426 This is different from localrepo.invalidatedirstate() because it always
426 This is different from localrepo.invalidatedirstate() because it always
427 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
427 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
428 check whether the dirstate has changed before rereading it."""
428 check whether the dirstate has changed before rereading it."""
429
429
430 for a in ("_map", "_branch", "_ignore"):
430 for a in ("_map", "_branch", "_ignore"):
431 if a in self.__dict__:
431 if a in self.__dict__:
432 delattr(self, a)
432 delattr(self, a)
433 self._lastnormaltime = timestamp.zero()
433 self._lastnormaltime = timestamp.zero()
434 self._dirty = False
434 self._dirty = False
435 self._parentwriters = 0
435 self._parentwriters = 0
436 self._origpl = None
436 self._origpl = None
437
437
438 def copy(self, source, dest):
438 def copy(self, source, dest):
439 """Mark dest as a copy of source. Unmark dest if source is None."""
439 """Mark dest as a copy of source. Unmark dest if source is None."""
440 if source == dest:
440 if source == dest:
441 return
441 return
442 self._dirty = True
442 self._dirty = True
443 if source is not None:
443 if source is not None:
444 self._map.copymap[dest] = source
444 self._map.copymap[dest] = source
445 else:
445 else:
446 self._map.copymap.pop(dest, None)
446 self._map.copymap.pop(dest, None)
447
447
448 def copied(self, file):
448 def copied(self, file):
449 return self._map.copymap.get(file, None)
449 return self._map.copymap.get(file, None)
450
450
451 def copies(self):
451 def copies(self):
452 return self._map.copymap
452 return self._map.copymap
453
453
454 @requires_no_parents_change
454 @requires_no_parents_change
455 def set_tracked(self, filename, reset_copy=False):
455 def set_tracked(self, filename, reset_copy=False):
456 """a "public" method for generic code to mark a file as tracked
456 """a "public" method for generic code to mark a file as tracked
457
457
458 This function is to be called outside of "update/merge" case. For
458 This function is to be called outside of "update/merge" case. For
459 example by a command like `hg add X`.
459 example by a command like `hg add X`.
460
460
461 if reset_copy is set, any existing copy information will be dropped.
461 if reset_copy is set, any existing copy information will be dropped.
462
462
463 return True the file was previously untracked, False otherwise.
463 return True the file was previously untracked, False otherwise.
464 """
464 """
465 self._dirty = True
465 self._dirty = True
466 entry = self._map.get(filename)
466 entry = self._map.get(filename)
467 if entry is None or not entry.tracked:
467 if entry is None or not entry.tracked:
468 self._check_new_tracked_filename(filename)
468 self._check_new_tracked_filename(filename)
469 pre_tracked = self._map.set_tracked(filename)
469 pre_tracked = self._map.set_tracked(filename)
470 if reset_copy:
470 if reset_copy:
471 self._map.copymap.pop(filename, None)
471 self._map.copymap.pop(filename, None)
472 return pre_tracked
472 return pre_tracked
473
473
474 @requires_no_parents_change
474 @requires_no_parents_change
475 def set_untracked(self, filename):
475 def set_untracked(self, filename):
476 """a "public" method for generic code to mark a file as untracked
476 """a "public" method for generic code to mark a file as untracked
477
477
478 This function is to be called outside of "update/merge" case. For
478 This function is to be called outside of "update/merge" case. For
479 example by a command like `hg remove X`.
479 example by a command like `hg remove X`.
480
480
481 return True the file was previously tracked, False otherwise.
481 return True the file was previously tracked, False otherwise.
482 """
482 """
483 ret = self._map.set_untracked(filename)
483 ret = self._map.set_untracked(filename)
484 if ret:
484 if ret:
485 self._dirty = True
485 self._dirty = True
486 return ret
486 return ret
487
487
488 @requires_no_parents_change
488 @requires_no_parents_change
489 def set_clean(self, filename, parentfiledata):
489 def set_clean(self, filename, parentfiledata):
490 """record that the current state of the file on disk is known to be clean"""
490 """record that the current state of the file on disk is known to be clean"""
491 self._dirty = True
491 self._dirty = True
492 if not self._map[filename].tracked:
492 if not self._map[filename].tracked:
493 self._check_new_tracked_filename(filename)
493 self._check_new_tracked_filename(filename)
494 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
495 self._map.set_clean(filename, mode, size, mtime)
495 self._map.set_clean(filename, mode, size, mtime)
496 if mtime > self._lastnormaltime:
496 if mtime > self._lastnormaltime:
497 # Remember the most recent modification timeslot for status(),
497 # Remember the most recent modification timeslot for status(),
498 # to make sure we won't miss future size-preserving file content
498 # to make sure we won't miss future size-preserving file content
499 # modifications that happen within the same timeslot.
499 # modifications that happen within the same timeslot.
500 self._lastnormaltime = mtime
500 self._lastnormaltime = mtime
501
501
502 @requires_no_parents_change
502 @requires_no_parents_change
503 def set_possibly_dirty(self, filename):
503 def set_possibly_dirty(self, filename):
504 """record that the current state of the file on disk is unknown"""
504 """record that the current state of the file on disk is unknown"""
505 self._dirty = True
505 self._dirty = True
506 self._map.set_possibly_dirty(filename)
506 self._map.set_possibly_dirty(filename)
507
507
508 @requires_parents_change
508 @requires_parents_change
509 def update_file_p1(
509 def update_file_p1(
510 self,
510 self,
511 filename,
511 filename,
512 p1_tracked,
512 p1_tracked,
513 ):
513 ):
514 """Set a file as tracked in the parent (or not)
514 """Set a file as tracked in the parent (or not)
515
515
516 This is to be called when adjust the dirstate to a new parent after an history
516 This is to be called when adjust the dirstate to a new parent after an history
517 rewriting operation.
517 rewriting operation.
518
518
519 It should not be called during a merge (p2 != nullid) and only within
519 It should not be called during a merge (p2 != nullid) and only within
520 a `with dirstate.parentchange():` context.
520 a `with dirstate.parentchange():` context.
521 """
521 """
522 if self.in_merge:
522 if self.in_merge:
523 msg = b'update_file_reference should not be called when merging'
523 msg = b'update_file_reference should not be called when merging'
524 raise error.ProgrammingError(msg)
524 raise error.ProgrammingError(msg)
525 entry = self._map.get(filename)
525 entry = self._map.get(filename)
526 if entry is None:
526 if entry is None:
527 wc_tracked = False
527 wc_tracked = False
528 else:
528 else:
529 wc_tracked = entry.tracked
529 wc_tracked = entry.tracked
530 if not (p1_tracked or wc_tracked):
530 if not (p1_tracked or wc_tracked):
531 # the file is no longer relevant to anyone
531 # the file is no longer relevant to anyone
532 if self._map.get(filename) is not None:
532 if self._map.get(filename) is not None:
533 self._map.reset_state(filename)
533 self._map.reset_state(filename)
534 self._dirty = True
534 self._dirty = True
535 elif (not p1_tracked) and wc_tracked:
535 elif (not p1_tracked) and wc_tracked:
536 if entry is not None and entry.added:
536 if entry is not None and entry.added:
537 return # avoid dropping copy information (maybe?)
537 return # avoid dropping copy information (maybe?)
538
538
539 parentfiledata = None
540 if wc_tracked and p1_tracked:
541 parentfiledata = self._get_filedata(filename)
542
543 self._map.reset_state(
539 self._map.reset_state(
544 filename,
540 filename,
545 wc_tracked,
541 wc_tracked,
546 p1_tracked,
542 p1_tracked,
547 # the underlying reference might have changed, we will have to
543 # the underlying reference might have changed, we will have to
548 # check it.
544 # check it.
549 has_meaningful_mtime=False,
545 has_meaningful_mtime=False,
550 parentfiledata=parentfiledata,
551 )
546 )
552 if (
553 parentfiledata is not None
554 and parentfiledata[2] > self._lastnormaltime
555 ):
556 # Remember the most recent modification timeslot for status(),
557 # to make sure we won't miss future size-preserving file content
558 # modifications that happen within the same timeslot.
559 self._lastnormaltime = parentfiledata[2]
560
547
561 @requires_parents_change
548 @requires_parents_change
562 def update_file(
549 def update_file(
563 self,
550 self,
564 filename,
551 filename,
565 wc_tracked,
552 wc_tracked,
566 p1_tracked,
553 p1_tracked,
567 p2_info=False,
554 p2_info=False,
568 possibly_dirty=False,
555 possibly_dirty=False,
569 parentfiledata=None,
556 parentfiledata=None,
570 ):
557 ):
571 """update the information about a file in the dirstate
558 """update the information about a file in the dirstate
572
559
573 This is to be called when the direstates parent changes to keep track
560 This is to be called when the direstates parent changes to keep track
574 of what is the file situation in regards to the working copy and its parent.
561 of what is the file situation in regards to the working copy and its parent.
575
562
576 This function must be called within a `dirstate.parentchange` context.
563 This function must be called within a `dirstate.parentchange` context.
577
564
578 note: the API is at an early stage and we might need to adjust it
565 note: the API is at an early stage and we might need to adjust it
579 depending of what information ends up being relevant and useful to
566 depending of what information ends up being relevant and useful to
580 other processing.
567 other processing.
581 """
568 """
582
569
583 # note: I do not think we need to double check name clash here since we
570 # note: I do not think we need to double check name clash here since we
584 # are in a update/merge case that should already have taken care of
571 # are in a update/merge case that should already have taken care of
585 # this. The test agrees
572 # this. The test agrees
586
573
587 self._dirty = True
574 self._dirty = True
588
575
589 need_parent_file_data = (
576 need_parent_file_data = (
590 not possibly_dirty and not p2_info and wc_tracked and p1_tracked
577 not possibly_dirty and not p2_info and wc_tracked and p1_tracked
591 )
578 )
592
579
593 if need_parent_file_data and parentfiledata is None:
580 if need_parent_file_data and parentfiledata is None:
594 parentfiledata = self._get_filedata(filename)
581 parentfiledata = self._get_filedata(filename)
595
582
596 self._map.reset_state(
583 self._map.reset_state(
597 filename,
584 filename,
598 wc_tracked,
585 wc_tracked,
599 p1_tracked,
586 p1_tracked,
600 p2_info=p2_info,
587 p2_info=p2_info,
601 has_meaningful_mtime=not possibly_dirty,
588 has_meaningful_mtime=not possibly_dirty,
602 parentfiledata=parentfiledata,
589 parentfiledata=parentfiledata,
603 )
590 )
604 if (
591 if (
605 parentfiledata is not None
592 parentfiledata is not None
606 and parentfiledata[2] is not None
593 and parentfiledata[2] is not None
607 and parentfiledata[2] > self._lastnormaltime
594 and parentfiledata[2] > self._lastnormaltime
608 ):
595 ):
609 # Remember the most recent modification timeslot for status(),
596 # Remember the most recent modification timeslot for status(),
610 # to make sure we won't miss future size-preserving file content
597 # to make sure we won't miss future size-preserving file content
611 # modifications that happen within the same timeslot.
598 # modifications that happen within the same timeslot.
612 self._lastnormaltime = parentfiledata[2]
599 self._lastnormaltime = parentfiledata[2]
613
600
614 def _check_new_tracked_filename(self, filename):
601 def _check_new_tracked_filename(self, filename):
615 scmutil.checkfilename(filename)
602 scmutil.checkfilename(filename)
616 if self._map.hastrackeddir(filename):
603 if self._map.hastrackeddir(filename):
617 msg = _(b'directory %r already in dirstate')
604 msg = _(b'directory %r already in dirstate')
618 msg %= pycompat.bytestr(filename)
605 msg %= pycompat.bytestr(filename)
619 raise error.Abort(msg)
606 raise error.Abort(msg)
620 # shadows
607 # shadows
621 for d in pathutil.finddirs(filename):
608 for d in pathutil.finddirs(filename):
622 if self._map.hastrackeddir(d):
609 if self._map.hastrackeddir(d):
623 break
610 break
624 entry = self._map.get(d)
611 entry = self._map.get(d)
625 if entry is not None and not entry.removed:
612 if entry is not None and not entry.removed:
626 msg = _(b'file %r in dirstate clashes with %r')
613 msg = _(b'file %r in dirstate clashes with %r')
627 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
614 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
628 raise error.Abort(msg)
615 raise error.Abort(msg)
629
616
630 def _get_filedata(self, filename):
617 def _get_filedata(self, filename):
631 """returns"""
618 """returns"""
632 s = os.lstat(self._join(filename))
619 s = os.lstat(self._join(filename))
633 mode = s.st_mode
620 mode = s.st_mode
634 size = s.st_size
621 size = s.st_size
635 mtime = timestamp.mtime_of(s)
622 mtime = timestamp.mtime_of(s)
636 return (mode, size, mtime)
623 return (mode, size, mtime)
637
624
638 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
625 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
639 if exists is None:
626 if exists is None:
640 exists = os.path.lexists(os.path.join(self._root, path))
627 exists = os.path.lexists(os.path.join(self._root, path))
641 if not exists:
628 if not exists:
642 # Maybe a path component exists
629 # Maybe a path component exists
643 if not ignoremissing and b'/' in path:
630 if not ignoremissing and b'/' in path:
644 d, f = path.rsplit(b'/', 1)
631 d, f = path.rsplit(b'/', 1)
645 d = self._normalize(d, False, ignoremissing, None)
632 d = self._normalize(d, False, ignoremissing, None)
646 folded = d + b"/" + f
633 folded = d + b"/" + f
647 else:
634 else:
648 # No path components, preserve original case
635 # No path components, preserve original case
649 folded = path
636 folded = path
650 else:
637 else:
651 # recursively normalize leading directory components
638 # recursively normalize leading directory components
652 # against dirstate
639 # against dirstate
653 if b'/' in normed:
640 if b'/' in normed:
654 d, f = normed.rsplit(b'/', 1)
641 d, f = normed.rsplit(b'/', 1)
655 d = self._normalize(d, False, ignoremissing, True)
642 d = self._normalize(d, False, ignoremissing, True)
656 r = self._root + b"/" + d
643 r = self._root + b"/" + d
657 folded = d + b"/" + util.fspath(f, r)
644 folded = d + b"/" + util.fspath(f, r)
658 else:
645 else:
659 folded = util.fspath(normed, self._root)
646 folded = util.fspath(normed, self._root)
660 storemap[normed] = folded
647 storemap[normed] = folded
661
648
662 return folded
649 return folded
663
650
664 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
651 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
665 normed = util.normcase(path)
652 normed = util.normcase(path)
666 folded = self._map.filefoldmap.get(normed, None)
653 folded = self._map.filefoldmap.get(normed, None)
667 if folded is None:
654 if folded is None:
668 if isknown:
655 if isknown:
669 folded = path
656 folded = path
670 else:
657 else:
671 folded = self._discoverpath(
658 folded = self._discoverpath(
672 path, normed, ignoremissing, exists, self._map.filefoldmap
659 path, normed, ignoremissing, exists, self._map.filefoldmap
673 )
660 )
674 return folded
661 return folded
675
662
676 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
663 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
677 normed = util.normcase(path)
664 normed = util.normcase(path)
678 folded = self._map.filefoldmap.get(normed, None)
665 folded = self._map.filefoldmap.get(normed, None)
679 if folded is None:
666 if folded is None:
680 folded = self._map.dirfoldmap.get(normed, None)
667 folded = self._map.dirfoldmap.get(normed, None)
681 if folded is None:
668 if folded is None:
682 if isknown:
669 if isknown:
683 folded = path
670 folded = path
684 else:
671 else:
685 # store discovered result in dirfoldmap so that future
672 # store discovered result in dirfoldmap so that future
686 # normalizefile calls don't start matching directories
673 # normalizefile calls don't start matching directories
687 folded = self._discoverpath(
674 folded = self._discoverpath(
688 path, normed, ignoremissing, exists, self._map.dirfoldmap
675 path, normed, ignoremissing, exists, self._map.dirfoldmap
689 )
676 )
690 return folded
677 return folded
691
678
692 def normalize(self, path, isknown=False, ignoremissing=False):
679 def normalize(self, path, isknown=False, ignoremissing=False):
693 """
680 """
694 normalize the case of a pathname when on a casefolding filesystem
681 normalize the case of a pathname when on a casefolding filesystem
695
682
696 isknown specifies whether the filename came from walking the
683 isknown specifies whether the filename came from walking the
697 disk, to avoid extra filesystem access.
684 disk, to avoid extra filesystem access.
698
685
699 If ignoremissing is True, missing path are returned
686 If ignoremissing is True, missing path are returned
700 unchanged. Otherwise, we try harder to normalize possibly
687 unchanged. Otherwise, we try harder to normalize possibly
701 existing path components.
688 existing path components.
702
689
703 The normalized case is determined based on the following precedence:
690 The normalized case is determined based on the following precedence:
704
691
705 - version of name already stored in the dirstate
692 - version of name already stored in the dirstate
706 - version of name stored on disk
693 - version of name stored on disk
707 - version provided via command arguments
694 - version provided via command arguments
708 """
695 """
709
696
710 if self._checkcase:
697 if self._checkcase:
711 return self._normalize(path, isknown, ignoremissing)
698 return self._normalize(path, isknown, ignoremissing)
712 return path
699 return path
713
700
714 def clear(self):
701 def clear(self):
715 self._map.clear()
702 self._map.clear()
716 self._lastnormaltime = timestamp.zero()
703 self._lastnormaltime = timestamp.zero()
717 self._dirty = True
704 self._dirty = True
718
705
719 def rebuild(self, parent, allfiles, changedfiles=None):
706 def rebuild(self, parent, allfiles, changedfiles=None):
720 if changedfiles is None:
707 if changedfiles is None:
721 # Rebuild entire dirstate
708 # Rebuild entire dirstate
722 to_lookup = allfiles
709 to_lookup = allfiles
723 to_drop = []
710 to_drop = []
724 lastnormaltime = self._lastnormaltime
711 lastnormaltime = self._lastnormaltime
725 self.clear()
712 self.clear()
726 self._lastnormaltime = lastnormaltime
713 self._lastnormaltime = lastnormaltime
727 elif len(changedfiles) < 10:
714 elif len(changedfiles) < 10:
728 # Avoid turning allfiles into a set, which can be expensive if it's
715 # Avoid turning allfiles into a set, which can be expensive if it's
729 # large.
716 # large.
730 to_lookup = []
717 to_lookup = []
731 to_drop = []
718 to_drop = []
732 for f in changedfiles:
719 for f in changedfiles:
733 if f in allfiles:
720 if f in allfiles:
734 to_lookup.append(f)
721 to_lookup.append(f)
735 else:
722 else:
736 to_drop.append(f)
723 to_drop.append(f)
737 else:
724 else:
738 changedfilesset = set(changedfiles)
725 changedfilesset = set(changedfiles)
739 to_lookup = changedfilesset & set(allfiles)
726 to_lookup = changedfilesset & set(allfiles)
740 to_drop = changedfilesset - to_lookup
727 to_drop = changedfilesset - to_lookup
741
728
742 if self._origpl is None:
729 if self._origpl is None:
743 self._origpl = self._pl
730 self._origpl = self._pl
744 self._map.setparents(parent, self._nodeconstants.nullid)
731 self._map.setparents(parent, self._nodeconstants.nullid)
745
732
746 for f in to_lookup:
733 for f in to_lookup:
747
734
748 if self.in_merge:
735 if self.in_merge:
749 self.set_tracked(f)
736 self.set_tracked(f)
750 else:
737 else:
751 self._map.reset_state(
738 self._map.reset_state(
752 f,
739 f,
753 wc_tracked=True,
740 wc_tracked=True,
754 p1_tracked=True,
741 p1_tracked=True,
755 )
742 )
756 for f in to_drop:
743 for f in to_drop:
757 self._map.reset_state(f)
744 self._map.reset_state(f)
758
745
759 self._dirty = True
746 self._dirty = True
760
747
761 def identity(self):
748 def identity(self):
762 """Return identity of dirstate itself to detect changing in storage
749 """Return identity of dirstate itself to detect changing in storage
763
750
764 If identity of previous dirstate is equal to this, writing
751 If identity of previous dirstate is equal to this, writing
765 changes based on the former dirstate out can keep consistency.
752 changes based on the former dirstate out can keep consistency.
766 """
753 """
767 return self._map.identity
754 return self._map.identity
768
755
769 def write(self, tr):
756 def write(self, tr):
770 if not self._dirty:
757 if not self._dirty:
771 return
758 return
772
759
773 filename = self._filename
760 filename = self._filename
774 if tr:
761 if tr:
775 # 'dirstate.write()' is not only for writing in-memory
762 # 'dirstate.write()' is not only for writing in-memory
776 # changes out, but also for dropping ambiguous timestamp.
763 # changes out, but also for dropping ambiguous timestamp.
777 # delayed writing re-raise "ambiguous timestamp issue".
764 # delayed writing re-raise "ambiguous timestamp issue".
778 # See also the wiki page below for detail:
765 # See also the wiki page below for detail:
779 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
766 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
780
767
781 # record when mtime start to be ambiguous
768 # record when mtime start to be ambiguous
782 now = timestamp.get_fs_now(self._opener)
769 now = timestamp.get_fs_now(self._opener)
783
770
784 # delay writing in-memory changes out
771 # delay writing in-memory changes out
785 tr.addfilegenerator(
772 tr.addfilegenerator(
786 b'dirstate',
773 b'dirstate',
787 (self._filename,),
774 (self._filename,),
788 lambda f: self._writedirstate(tr, f, now=now),
775 lambda f: self._writedirstate(tr, f, now=now),
789 location=b'plain',
776 location=b'plain',
790 )
777 )
791 return
778 return
792
779
793 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
780 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
794 self._writedirstate(tr, st)
781 self._writedirstate(tr, st)
795
782
796 def addparentchangecallback(self, category, callback):
783 def addparentchangecallback(self, category, callback):
797 """add a callback to be called when the wd parents are changed
784 """add a callback to be called when the wd parents are changed
798
785
799 Callback will be called with the following arguments:
786 Callback will be called with the following arguments:
800 dirstate, (oldp1, oldp2), (newp1, newp2)
787 dirstate, (oldp1, oldp2), (newp1, newp2)
801
788
802 Category is a unique identifier to allow overwriting an old callback
789 Category is a unique identifier to allow overwriting an old callback
803 with a newer callback.
790 with a newer callback.
804 """
791 """
805 self._plchangecallbacks[category] = callback
792 self._plchangecallbacks[category] = callback
806
793
807 def _writedirstate(self, tr, st, now=None):
794 def _writedirstate(self, tr, st, now=None):
808 # notify callbacks about parents change
795 # notify callbacks about parents change
809 if self._origpl is not None and self._origpl != self._pl:
796 if self._origpl is not None and self._origpl != self._pl:
810 for c, callback in sorted(
797 for c, callback in sorted(
811 pycompat.iteritems(self._plchangecallbacks)
798 pycompat.iteritems(self._plchangecallbacks)
812 ):
799 ):
813 callback(self, self._origpl, self._pl)
800 callback(self, self._origpl, self._pl)
814 self._origpl = None
801 self._origpl = None
815
802
816 if now is None:
803 if now is None:
817 # use the modification time of the newly created temporary file as the
804 # use the modification time of the newly created temporary file as the
818 # filesystem's notion of 'now'
805 # filesystem's notion of 'now'
819 now = timestamp.mtime_of(util.fstat(st))
806 now = timestamp.mtime_of(util.fstat(st))
820
807
821 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
808 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
822 # timestamp of each entries in dirstate, because of 'now > mtime'
809 # timestamp of each entries in dirstate, because of 'now > mtime'
823 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
810 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
824 if delaywrite > 0:
811 if delaywrite > 0:
825 # do we have any files to delay for?
812 # do we have any files to delay for?
826 for f, e in pycompat.iteritems(self._map):
813 for f, e in pycompat.iteritems(self._map):
827 if e.need_delay(now):
814 if e.need_delay(now):
828 import time # to avoid useless import
815 import time # to avoid useless import
829
816
830 # rather than sleep n seconds, sleep until the next
817 # rather than sleep n seconds, sleep until the next
831 # multiple of n seconds
818 # multiple of n seconds
832 clock = time.time()
819 clock = time.time()
833 start = int(clock) - (int(clock) % delaywrite)
820 start = int(clock) - (int(clock) % delaywrite)
834 end = start + delaywrite
821 end = start + delaywrite
835 time.sleep(end - clock)
822 time.sleep(end - clock)
836 # trust our estimate that the end is near now
823 # trust our estimate that the end is near now
837 now = timestamp.timestamp((end, 0))
824 now = timestamp.timestamp((end, 0))
838 break
825 break
839
826
840 self._map.write(tr, st, now)
827 self._map.write(tr, st, now)
841 self._lastnormaltime = timestamp.zero()
828 self._lastnormaltime = timestamp.zero()
842 self._dirty = False
829 self._dirty = False
843
830
844 def _dirignore(self, f):
831 def _dirignore(self, f):
845 if self._ignore(f):
832 if self._ignore(f):
846 return True
833 return True
847 for p in pathutil.finddirs(f):
834 for p in pathutil.finddirs(f):
848 if self._ignore(p):
835 if self._ignore(p):
849 return True
836 return True
850 return False
837 return False
851
838
852 def _ignorefiles(self):
839 def _ignorefiles(self):
853 files = []
840 files = []
854 if os.path.exists(self._join(b'.hgignore')):
841 if os.path.exists(self._join(b'.hgignore')):
855 files.append(self._join(b'.hgignore'))
842 files.append(self._join(b'.hgignore'))
856 for name, path in self._ui.configitems(b"ui"):
843 for name, path in self._ui.configitems(b"ui"):
857 if name == b'ignore' or name.startswith(b'ignore.'):
844 if name == b'ignore' or name.startswith(b'ignore.'):
858 # we need to use os.path.join here rather than self._join
845 # we need to use os.path.join here rather than self._join
859 # because path is arbitrary and user-specified
846 # because path is arbitrary and user-specified
860 files.append(os.path.join(self._rootdir, util.expandpath(path)))
847 files.append(os.path.join(self._rootdir, util.expandpath(path)))
861 return files
848 return files
862
849
863 def _ignorefileandline(self, f):
850 def _ignorefileandline(self, f):
864 files = collections.deque(self._ignorefiles())
851 files = collections.deque(self._ignorefiles())
865 visited = set()
852 visited = set()
866 while files:
853 while files:
867 i = files.popleft()
854 i = files.popleft()
868 patterns = matchmod.readpatternfile(
855 patterns = matchmod.readpatternfile(
869 i, self._ui.warn, sourceinfo=True
856 i, self._ui.warn, sourceinfo=True
870 )
857 )
871 for pattern, lineno, line in patterns:
858 for pattern, lineno, line in patterns:
872 kind, p = matchmod._patsplit(pattern, b'glob')
859 kind, p = matchmod._patsplit(pattern, b'glob')
873 if kind == b"subinclude":
860 if kind == b"subinclude":
874 if p not in visited:
861 if p not in visited:
875 files.append(p)
862 files.append(p)
876 continue
863 continue
877 m = matchmod.match(
864 m = matchmod.match(
878 self._root, b'', [], [pattern], warn=self._ui.warn
865 self._root, b'', [], [pattern], warn=self._ui.warn
879 )
866 )
880 if m(f):
867 if m(f):
881 return (i, lineno, line)
868 return (i, lineno, line)
882 visited.add(i)
869 visited.add(i)
883 return (None, -1, b"")
870 return (None, -1, b"")
884
871
885 def _walkexplicit(self, match, subrepos):
872 def _walkexplicit(self, match, subrepos):
886 """Get stat data about the files explicitly specified by match.
873 """Get stat data about the files explicitly specified by match.
887
874
888 Return a triple (results, dirsfound, dirsnotfound).
875 Return a triple (results, dirsfound, dirsnotfound).
889 - results is a mapping from filename to stat result. It also contains
876 - results is a mapping from filename to stat result. It also contains
890 listings mapping subrepos and .hg to None.
877 listings mapping subrepos and .hg to None.
891 - dirsfound is a list of files found to be directories.
878 - dirsfound is a list of files found to be directories.
892 - dirsnotfound is a list of files that the dirstate thinks are
879 - dirsnotfound is a list of files that the dirstate thinks are
893 directories and that were not found."""
880 directories and that were not found."""
894
881
895 def badtype(mode):
882 def badtype(mode):
896 kind = _(b'unknown')
883 kind = _(b'unknown')
897 if stat.S_ISCHR(mode):
884 if stat.S_ISCHR(mode):
898 kind = _(b'character device')
885 kind = _(b'character device')
899 elif stat.S_ISBLK(mode):
886 elif stat.S_ISBLK(mode):
900 kind = _(b'block device')
887 kind = _(b'block device')
901 elif stat.S_ISFIFO(mode):
888 elif stat.S_ISFIFO(mode):
902 kind = _(b'fifo')
889 kind = _(b'fifo')
903 elif stat.S_ISSOCK(mode):
890 elif stat.S_ISSOCK(mode):
904 kind = _(b'socket')
891 kind = _(b'socket')
905 elif stat.S_ISDIR(mode):
892 elif stat.S_ISDIR(mode):
906 kind = _(b'directory')
893 kind = _(b'directory')
907 return _(b'unsupported file type (type is %s)') % kind
894 return _(b'unsupported file type (type is %s)') % kind
908
895
909 badfn = match.bad
896 badfn = match.bad
910 dmap = self._map
897 dmap = self._map
911 lstat = os.lstat
898 lstat = os.lstat
912 getkind = stat.S_IFMT
899 getkind = stat.S_IFMT
913 dirkind = stat.S_IFDIR
900 dirkind = stat.S_IFDIR
914 regkind = stat.S_IFREG
901 regkind = stat.S_IFREG
915 lnkkind = stat.S_IFLNK
902 lnkkind = stat.S_IFLNK
916 join = self._join
903 join = self._join
917 dirsfound = []
904 dirsfound = []
918 foundadd = dirsfound.append
905 foundadd = dirsfound.append
919 dirsnotfound = []
906 dirsnotfound = []
920 notfoundadd = dirsnotfound.append
907 notfoundadd = dirsnotfound.append
921
908
922 if not match.isexact() and self._checkcase:
909 if not match.isexact() and self._checkcase:
923 normalize = self._normalize
910 normalize = self._normalize
924 else:
911 else:
925 normalize = None
912 normalize = None
926
913
927 files = sorted(match.files())
914 files = sorted(match.files())
928 subrepos.sort()
915 subrepos.sort()
929 i, j = 0, 0
916 i, j = 0, 0
930 while i < len(files) and j < len(subrepos):
917 while i < len(files) and j < len(subrepos):
931 subpath = subrepos[j] + b"/"
918 subpath = subrepos[j] + b"/"
932 if files[i] < subpath:
919 if files[i] < subpath:
933 i += 1
920 i += 1
934 continue
921 continue
935 while i < len(files) and files[i].startswith(subpath):
922 while i < len(files) and files[i].startswith(subpath):
936 del files[i]
923 del files[i]
937 j += 1
924 j += 1
938
925
939 if not files or b'' in files:
926 if not files or b'' in files:
940 files = [b'']
927 files = [b'']
941 # constructing the foldmap is expensive, so don't do it for the
928 # constructing the foldmap is expensive, so don't do it for the
942 # common case where files is ['']
929 # common case where files is ['']
943 normalize = None
930 normalize = None
944 results = dict.fromkeys(subrepos)
931 results = dict.fromkeys(subrepos)
945 results[b'.hg'] = None
932 results[b'.hg'] = None
946
933
947 for ff in files:
934 for ff in files:
948 if normalize:
935 if normalize:
949 nf = normalize(ff, False, True)
936 nf = normalize(ff, False, True)
950 else:
937 else:
951 nf = ff
938 nf = ff
952 if nf in results:
939 if nf in results:
953 continue
940 continue
954
941
955 try:
942 try:
956 st = lstat(join(nf))
943 st = lstat(join(nf))
957 kind = getkind(st.st_mode)
944 kind = getkind(st.st_mode)
958 if kind == dirkind:
945 if kind == dirkind:
959 if nf in dmap:
946 if nf in dmap:
960 # file replaced by dir on disk but still in dirstate
947 # file replaced by dir on disk but still in dirstate
961 results[nf] = None
948 results[nf] = None
962 foundadd((nf, ff))
949 foundadd((nf, ff))
963 elif kind == regkind or kind == lnkkind:
950 elif kind == regkind or kind == lnkkind:
964 results[nf] = st
951 results[nf] = st
965 else:
952 else:
966 badfn(ff, badtype(kind))
953 badfn(ff, badtype(kind))
967 if nf in dmap:
954 if nf in dmap:
968 results[nf] = None
955 results[nf] = None
969 except OSError as inst: # nf not found on disk - it is dirstate only
956 except OSError as inst: # nf not found on disk - it is dirstate only
970 if nf in dmap: # does it exactly match a missing file?
957 if nf in dmap: # does it exactly match a missing file?
971 results[nf] = None
958 results[nf] = None
972 else: # does it match a missing directory?
959 else: # does it match a missing directory?
973 if self._map.hasdir(nf):
960 if self._map.hasdir(nf):
974 notfoundadd(nf)
961 notfoundadd(nf)
975 else:
962 else:
976 badfn(ff, encoding.strtolocal(inst.strerror))
963 badfn(ff, encoding.strtolocal(inst.strerror))
977
964
978 # match.files() may contain explicitly-specified paths that shouldn't
965 # match.files() may contain explicitly-specified paths that shouldn't
979 # be taken; drop them from the list of files found. dirsfound/notfound
966 # be taken; drop them from the list of files found. dirsfound/notfound
980 # aren't filtered here because they will be tested later.
967 # aren't filtered here because they will be tested later.
981 if match.anypats():
968 if match.anypats():
982 for f in list(results):
969 for f in list(results):
983 if f == b'.hg' or f in subrepos:
970 if f == b'.hg' or f in subrepos:
984 # keep sentinel to disable further out-of-repo walks
971 # keep sentinel to disable further out-of-repo walks
985 continue
972 continue
986 if not match(f):
973 if not match(f):
987 del results[f]
974 del results[f]
988
975
989 # Case insensitive filesystems cannot rely on lstat() failing to detect
976 # Case insensitive filesystems cannot rely on lstat() failing to detect
990 # a case-only rename. Prune the stat object for any file that does not
977 # a case-only rename. Prune the stat object for any file that does not
991 # match the case in the filesystem, if there are multiple files that
978 # match the case in the filesystem, if there are multiple files that
992 # normalize to the same path.
979 # normalize to the same path.
993 if match.isexact() and self._checkcase:
980 if match.isexact() and self._checkcase:
994 normed = {}
981 normed = {}
995
982
996 for f, st in pycompat.iteritems(results):
983 for f, st in pycompat.iteritems(results):
997 if st is None:
984 if st is None:
998 continue
985 continue
999
986
1000 nc = util.normcase(f)
987 nc = util.normcase(f)
1001 paths = normed.get(nc)
988 paths = normed.get(nc)
1002
989
1003 if paths is None:
990 if paths is None:
1004 paths = set()
991 paths = set()
1005 normed[nc] = paths
992 normed[nc] = paths
1006
993
1007 paths.add(f)
994 paths.add(f)
1008
995
1009 for norm, paths in pycompat.iteritems(normed):
996 for norm, paths in pycompat.iteritems(normed):
1010 if len(paths) > 1:
997 if len(paths) > 1:
1011 for path in paths:
998 for path in paths:
1012 folded = self._discoverpath(
999 folded = self._discoverpath(
1013 path, norm, True, None, self._map.dirfoldmap
1000 path, norm, True, None, self._map.dirfoldmap
1014 )
1001 )
1015 if path != folded:
1002 if path != folded:
1016 results[path] = None
1003 results[path] = None
1017
1004
1018 return results, dirsfound, dirsnotfound
1005 return results, dirsfound, dirsnotfound
1019
1006
1020 def walk(self, match, subrepos, unknown, ignored, full=True):
1007 def walk(self, match, subrepos, unknown, ignored, full=True):
1021 """
1008 """
1022 Walk recursively through the directory tree, finding all files
1009 Walk recursively through the directory tree, finding all files
1023 matched by match.
1010 matched by match.
1024
1011
1025 If full is False, maybe skip some known-clean files.
1012 If full is False, maybe skip some known-clean files.
1026
1013
1027 Return a dict mapping filename to stat-like object (either
1014 Return a dict mapping filename to stat-like object (either
1028 mercurial.osutil.stat instance or return value of os.stat()).
1015 mercurial.osutil.stat instance or return value of os.stat()).
1029
1016
1030 """
1017 """
1031 # full is a flag that extensions that hook into walk can use -- this
1018 # full is a flag that extensions that hook into walk can use -- this
1032 # implementation doesn't use it at all. This satisfies the contract
1019 # implementation doesn't use it at all. This satisfies the contract
1033 # because we only guarantee a "maybe".
1020 # because we only guarantee a "maybe".
1034
1021
1035 if ignored:
1022 if ignored:
1036 ignore = util.never
1023 ignore = util.never
1037 dirignore = util.never
1024 dirignore = util.never
1038 elif unknown:
1025 elif unknown:
1039 ignore = self._ignore
1026 ignore = self._ignore
1040 dirignore = self._dirignore
1027 dirignore = self._dirignore
1041 else:
1028 else:
1042 # if not unknown and not ignored, drop dir recursion and step 2
1029 # if not unknown and not ignored, drop dir recursion and step 2
1043 ignore = util.always
1030 ignore = util.always
1044 dirignore = util.always
1031 dirignore = util.always
1045
1032
1046 matchfn = match.matchfn
1033 matchfn = match.matchfn
1047 matchalways = match.always()
1034 matchalways = match.always()
1048 matchtdir = match.traversedir
1035 matchtdir = match.traversedir
1049 dmap = self._map
1036 dmap = self._map
1050 listdir = util.listdir
1037 listdir = util.listdir
1051 lstat = os.lstat
1038 lstat = os.lstat
1052 dirkind = stat.S_IFDIR
1039 dirkind = stat.S_IFDIR
1053 regkind = stat.S_IFREG
1040 regkind = stat.S_IFREG
1054 lnkkind = stat.S_IFLNK
1041 lnkkind = stat.S_IFLNK
1055 join = self._join
1042 join = self._join
1056
1043
1057 exact = skipstep3 = False
1044 exact = skipstep3 = False
1058 if match.isexact(): # match.exact
1045 if match.isexact(): # match.exact
1059 exact = True
1046 exact = True
1060 dirignore = util.always # skip step 2
1047 dirignore = util.always # skip step 2
1061 elif match.prefix(): # match.match, no patterns
1048 elif match.prefix(): # match.match, no patterns
1062 skipstep3 = True
1049 skipstep3 = True
1063
1050
1064 if not exact and self._checkcase:
1051 if not exact and self._checkcase:
1065 normalize = self._normalize
1052 normalize = self._normalize
1066 normalizefile = self._normalizefile
1053 normalizefile = self._normalizefile
1067 skipstep3 = False
1054 skipstep3 = False
1068 else:
1055 else:
1069 normalize = self._normalize
1056 normalize = self._normalize
1070 normalizefile = None
1057 normalizefile = None
1071
1058
1072 # step 1: find all explicit files
1059 # step 1: find all explicit files
1073 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1060 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1074 if matchtdir:
1061 if matchtdir:
1075 for d in work:
1062 for d in work:
1076 matchtdir(d[0])
1063 matchtdir(d[0])
1077 for d in dirsnotfound:
1064 for d in dirsnotfound:
1078 matchtdir(d)
1065 matchtdir(d)
1079
1066
1080 skipstep3 = skipstep3 and not (work or dirsnotfound)
1067 skipstep3 = skipstep3 and not (work or dirsnotfound)
1081 work = [d for d in work if not dirignore(d[0])]
1068 work = [d for d in work if not dirignore(d[0])]
1082
1069
1083 # step 2: visit subdirectories
1070 # step 2: visit subdirectories
1084 def traverse(work, alreadynormed):
1071 def traverse(work, alreadynormed):
1085 wadd = work.append
1072 wadd = work.append
1086 while work:
1073 while work:
1087 tracing.counter('dirstate.walk work', len(work))
1074 tracing.counter('dirstate.walk work', len(work))
1088 nd = work.pop()
1075 nd = work.pop()
1089 visitentries = match.visitchildrenset(nd)
1076 visitentries = match.visitchildrenset(nd)
1090 if not visitentries:
1077 if not visitentries:
1091 continue
1078 continue
1092 if visitentries == b'this' or visitentries == b'all':
1079 if visitentries == b'this' or visitentries == b'all':
1093 visitentries = None
1080 visitentries = None
1094 skip = None
1081 skip = None
1095 if nd != b'':
1082 if nd != b'':
1096 skip = b'.hg'
1083 skip = b'.hg'
1097 try:
1084 try:
1098 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1085 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1099 entries = listdir(join(nd), stat=True, skip=skip)
1086 entries = listdir(join(nd), stat=True, skip=skip)
1100 except OSError as inst:
1087 except OSError as inst:
1101 if inst.errno in (errno.EACCES, errno.ENOENT):
1088 if inst.errno in (errno.EACCES, errno.ENOENT):
1102 match.bad(
1089 match.bad(
1103 self.pathto(nd), encoding.strtolocal(inst.strerror)
1090 self.pathto(nd), encoding.strtolocal(inst.strerror)
1104 )
1091 )
1105 continue
1092 continue
1106 raise
1093 raise
1107 for f, kind, st in entries:
1094 for f, kind, st in entries:
1108 # Some matchers may return files in the visitentries set,
1095 # Some matchers may return files in the visitentries set,
1109 # instead of 'this', if the matcher explicitly mentions them
1096 # instead of 'this', if the matcher explicitly mentions them
1110 # and is not an exactmatcher. This is acceptable; we do not
1097 # and is not an exactmatcher. This is acceptable; we do not
1111 # make any hard assumptions about file-or-directory below
1098 # make any hard assumptions about file-or-directory below
1112 # based on the presence of `f` in visitentries. If
1099 # based on the presence of `f` in visitentries. If
1113 # visitchildrenset returned a set, we can always skip the
1100 # visitchildrenset returned a set, we can always skip the
1114 # entries *not* in the set it provided regardless of whether
1101 # entries *not* in the set it provided regardless of whether
1115 # they're actually a file or a directory.
1102 # they're actually a file or a directory.
1116 if visitentries and f not in visitentries:
1103 if visitentries and f not in visitentries:
1117 continue
1104 continue
1118 if normalizefile:
1105 if normalizefile:
1119 # even though f might be a directory, we're only
1106 # even though f might be a directory, we're only
1120 # interested in comparing it to files currently in the
1107 # interested in comparing it to files currently in the
1121 # dmap -- therefore normalizefile is enough
1108 # dmap -- therefore normalizefile is enough
1122 nf = normalizefile(
1109 nf = normalizefile(
1123 nd and (nd + b"/" + f) or f, True, True
1110 nd and (nd + b"/" + f) or f, True, True
1124 )
1111 )
1125 else:
1112 else:
1126 nf = nd and (nd + b"/" + f) or f
1113 nf = nd and (nd + b"/" + f) or f
1127 if nf not in results:
1114 if nf not in results:
1128 if kind == dirkind:
1115 if kind == dirkind:
1129 if not ignore(nf):
1116 if not ignore(nf):
1130 if matchtdir:
1117 if matchtdir:
1131 matchtdir(nf)
1118 matchtdir(nf)
1132 wadd(nf)
1119 wadd(nf)
1133 if nf in dmap and (matchalways or matchfn(nf)):
1120 if nf in dmap and (matchalways or matchfn(nf)):
1134 results[nf] = None
1121 results[nf] = None
1135 elif kind == regkind or kind == lnkkind:
1122 elif kind == regkind or kind == lnkkind:
1136 if nf in dmap:
1123 if nf in dmap:
1137 if matchalways or matchfn(nf):
1124 if matchalways or matchfn(nf):
1138 results[nf] = st
1125 results[nf] = st
1139 elif (matchalways or matchfn(nf)) and not ignore(
1126 elif (matchalways or matchfn(nf)) and not ignore(
1140 nf
1127 nf
1141 ):
1128 ):
1142 # unknown file -- normalize if necessary
1129 # unknown file -- normalize if necessary
1143 if not alreadynormed:
1130 if not alreadynormed:
1144 nf = normalize(nf, False, True)
1131 nf = normalize(nf, False, True)
1145 results[nf] = st
1132 results[nf] = st
1146 elif nf in dmap and (matchalways or matchfn(nf)):
1133 elif nf in dmap and (matchalways or matchfn(nf)):
1147 results[nf] = None
1134 results[nf] = None
1148
1135
1149 for nd, d in work:
1136 for nd, d in work:
1150 # alreadynormed means that processwork doesn't have to do any
1137 # alreadynormed means that processwork doesn't have to do any
1151 # expensive directory normalization
1138 # expensive directory normalization
1152 alreadynormed = not normalize or nd == d
1139 alreadynormed = not normalize or nd == d
1153 traverse([d], alreadynormed)
1140 traverse([d], alreadynormed)
1154
1141
1155 for s in subrepos:
1142 for s in subrepos:
1156 del results[s]
1143 del results[s]
1157 del results[b'.hg']
1144 del results[b'.hg']
1158
1145
1159 # step 3: visit remaining files from dmap
1146 # step 3: visit remaining files from dmap
1160 if not skipstep3 and not exact:
1147 if not skipstep3 and not exact:
1161 # If a dmap file is not in results yet, it was either
1148 # If a dmap file is not in results yet, it was either
1162 # a) not matching matchfn b) ignored, c) missing, or d) under a
1149 # a) not matching matchfn b) ignored, c) missing, or d) under a
1163 # symlink directory.
1150 # symlink directory.
1164 if not results and matchalways:
1151 if not results and matchalways:
1165 visit = [f for f in dmap]
1152 visit = [f for f in dmap]
1166 else:
1153 else:
1167 visit = [f for f in dmap if f not in results and matchfn(f)]
1154 visit = [f for f in dmap if f not in results and matchfn(f)]
1168 visit.sort()
1155 visit.sort()
1169
1156
1170 if unknown:
1157 if unknown:
1171 # unknown == True means we walked all dirs under the roots
1158 # unknown == True means we walked all dirs under the roots
1172 # that wasn't ignored, and everything that matched was stat'ed
1159 # that wasn't ignored, and everything that matched was stat'ed
1173 # and is already in results.
1160 # and is already in results.
1174 # The rest must thus be ignored or under a symlink.
1161 # The rest must thus be ignored or under a symlink.
1175 audit_path = pathutil.pathauditor(self._root, cached=True)
1162 audit_path = pathutil.pathauditor(self._root, cached=True)
1176
1163
1177 for nf in iter(visit):
1164 for nf in iter(visit):
1178 # If a stat for the same file was already added with a
1165 # If a stat for the same file was already added with a
1179 # different case, don't add one for this, since that would
1166 # different case, don't add one for this, since that would
1180 # make it appear as if the file exists under both names
1167 # make it appear as if the file exists under both names
1181 # on disk.
1168 # on disk.
1182 if (
1169 if (
1183 normalizefile
1170 normalizefile
1184 and normalizefile(nf, True, True) in results
1171 and normalizefile(nf, True, True) in results
1185 ):
1172 ):
1186 results[nf] = None
1173 results[nf] = None
1187 # Report ignored items in the dmap as long as they are not
1174 # Report ignored items in the dmap as long as they are not
1188 # under a symlink directory.
1175 # under a symlink directory.
1189 elif audit_path.check(nf):
1176 elif audit_path.check(nf):
1190 try:
1177 try:
1191 results[nf] = lstat(join(nf))
1178 results[nf] = lstat(join(nf))
1192 # file was just ignored, no links, and exists
1179 # file was just ignored, no links, and exists
1193 except OSError:
1180 except OSError:
1194 # file doesn't exist
1181 # file doesn't exist
1195 results[nf] = None
1182 results[nf] = None
1196 else:
1183 else:
1197 # It's either missing or under a symlink directory
1184 # It's either missing or under a symlink directory
1198 # which we in this case report as missing
1185 # which we in this case report as missing
1199 results[nf] = None
1186 results[nf] = None
1200 else:
1187 else:
1201 # We may not have walked the full directory tree above,
1188 # We may not have walked the full directory tree above,
1202 # so stat and check everything we missed.
1189 # so stat and check everything we missed.
1203 iv = iter(visit)
1190 iv = iter(visit)
1204 for st in util.statfiles([join(i) for i in visit]):
1191 for st in util.statfiles([join(i) for i in visit]):
1205 results[next(iv)] = st
1192 results[next(iv)] = st
1206 return results
1193 return results
1207
1194
1208 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1195 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1209 # Force Rayon (Rust parallelism library) to respect the number of
1196 # Force Rayon (Rust parallelism library) to respect the number of
1210 # workers. This is a temporary workaround until Rust code knows
1197 # workers. This is a temporary workaround until Rust code knows
1211 # how to read the config file.
1198 # how to read the config file.
1212 numcpus = self._ui.configint(b"worker", b"numcpus")
1199 numcpus = self._ui.configint(b"worker", b"numcpus")
1213 if numcpus is not None:
1200 if numcpus is not None:
1214 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1201 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1215
1202
1216 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1203 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1217 if not workers_enabled:
1204 if not workers_enabled:
1218 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1205 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1219
1206
1220 (
1207 (
1221 lookup,
1208 lookup,
1222 modified,
1209 modified,
1223 added,
1210 added,
1224 removed,
1211 removed,
1225 deleted,
1212 deleted,
1226 clean,
1213 clean,
1227 ignored,
1214 ignored,
1228 unknown,
1215 unknown,
1229 warnings,
1216 warnings,
1230 bad,
1217 bad,
1231 traversed,
1218 traversed,
1232 dirty,
1219 dirty,
1233 ) = rustmod.status(
1220 ) = rustmod.status(
1234 self._map._map,
1221 self._map._map,
1235 matcher,
1222 matcher,
1236 self._rootdir,
1223 self._rootdir,
1237 self._ignorefiles(),
1224 self._ignorefiles(),
1238 self._checkexec,
1225 self._checkexec,
1239 self._lastnormaltime,
1226 self._lastnormaltime,
1240 bool(list_clean),
1227 bool(list_clean),
1241 bool(list_ignored),
1228 bool(list_ignored),
1242 bool(list_unknown),
1229 bool(list_unknown),
1243 bool(matcher.traversedir),
1230 bool(matcher.traversedir),
1244 )
1231 )
1245
1232
1246 self._dirty |= dirty
1233 self._dirty |= dirty
1247
1234
1248 if matcher.traversedir:
1235 if matcher.traversedir:
1249 for dir in traversed:
1236 for dir in traversed:
1250 matcher.traversedir(dir)
1237 matcher.traversedir(dir)
1251
1238
1252 if self._ui.warn:
1239 if self._ui.warn:
1253 for item in warnings:
1240 for item in warnings:
1254 if isinstance(item, tuple):
1241 if isinstance(item, tuple):
1255 file_path, syntax = item
1242 file_path, syntax = item
1256 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1243 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1257 file_path,
1244 file_path,
1258 syntax,
1245 syntax,
1259 )
1246 )
1260 self._ui.warn(msg)
1247 self._ui.warn(msg)
1261 else:
1248 else:
1262 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1249 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1263 self._ui.warn(
1250 self._ui.warn(
1264 msg
1251 msg
1265 % (
1252 % (
1266 pathutil.canonpath(
1253 pathutil.canonpath(
1267 self._rootdir, self._rootdir, item
1254 self._rootdir, self._rootdir, item
1268 ),
1255 ),
1269 b"No such file or directory",
1256 b"No such file or directory",
1270 )
1257 )
1271 )
1258 )
1272
1259
1273 for (fn, message) in bad:
1260 for (fn, message) in bad:
1274 matcher.bad(fn, encoding.strtolocal(message))
1261 matcher.bad(fn, encoding.strtolocal(message))
1275
1262
1276 status = scmutil.status(
1263 status = scmutil.status(
1277 modified=modified,
1264 modified=modified,
1278 added=added,
1265 added=added,
1279 removed=removed,
1266 removed=removed,
1280 deleted=deleted,
1267 deleted=deleted,
1281 unknown=unknown,
1268 unknown=unknown,
1282 ignored=ignored,
1269 ignored=ignored,
1283 clean=clean,
1270 clean=clean,
1284 )
1271 )
1285 return (lookup, status)
1272 return (lookup, status)
1286
1273
1287 def status(self, match, subrepos, ignored, clean, unknown):
1274 def status(self, match, subrepos, ignored, clean, unknown):
1288 """Determine the status of the working copy relative to the
1275 """Determine the status of the working copy relative to the
1289 dirstate and return a pair of (unsure, status), where status is of type
1276 dirstate and return a pair of (unsure, status), where status is of type
1290 scmutil.status and:
1277 scmutil.status and:
1291
1278
1292 unsure:
1279 unsure:
1293 files that might have been modified since the dirstate was
1280 files that might have been modified since the dirstate was
1294 written, but need to be read to be sure (size is the same
1281 written, but need to be read to be sure (size is the same
1295 but mtime differs)
1282 but mtime differs)
1296 status.modified:
1283 status.modified:
1297 files that have definitely been modified since the dirstate
1284 files that have definitely been modified since the dirstate
1298 was written (different size or mode)
1285 was written (different size or mode)
1299 status.clean:
1286 status.clean:
1300 files that have definitely not been modified since the
1287 files that have definitely not been modified since the
1301 dirstate was written
1288 dirstate was written
1302 """
1289 """
1303 listignored, listclean, listunknown = ignored, clean, unknown
1290 listignored, listclean, listunknown = ignored, clean, unknown
1304 lookup, modified, added, unknown, ignored = [], [], [], [], []
1291 lookup, modified, added, unknown, ignored = [], [], [], [], []
1305 removed, deleted, clean = [], [], []
1292 removed, deleted, clean = [], [], []
1306
1293
1307 dmap = self._map
1294 dmap = self._map
1308 dmap.preload()
1295 dmap.preload()
1309
1296
1310 use_rust = True
1297 use_rust = True
1311
1298
1312 allowed_matchers = (
1299 allowed_matchers = (
1313 matchmod.alwaysmatcher,
1300 matchmod.alwaysmatcher,
1314 matchmod.exactmatcher,
1301 matchmod.exactmatcher,
1315 matchmod.includematcher,
1302 matchmod.includematcher,
1316 )
1303 )
1317
1304
1318 if rustmod is None:
1305 if rustmod is None:
1319 use_rust = False
1306 use_rust = False
1320 elif self._checkcase:
1307 elif self._checkcase:
1321 # Case-insensitive filesystems are not handled yet
1308 # Case-insensitive filesystems are not handled yet
1322 use_rust = False
1309 use_rust = False
1323 elif subrepos:
1310 elif subrepos:
1324 use_rust = False
1311 use_rust = False
1325 elif sparse.enabled:
1312 elif sparse.enabled:
1326 use_rust = False
1313 use_rust = False
1327 elif not isinstance(match, allowed_matchers):
1314 elif not isinstance(match, allowed_matchers):
1328 # Some matchers have yet to be implemented
1315 # Some matchers have yet to be implemented
1329 use_rust = False
1316 use_rust = False
1330
1317
1331 if use_rust:
1318 if use_rust:
1332 try:
1319 try:
1333 return self._rust_status(
1320 return self._rust_status(
1334 match, listclean, listignored, listunknown
1321 match, listclean, listignored, listunknown
1335 )
1322 )
1336 except rustmod.FallbackError:
1323 except rustmod.FallbackError:
1337 pass
1324 pass
1338
1325
1339 def noop(f):
1326 def noop(f):
1340 pass
1327 pass
1341
1328
1342 dcontains = dmap.__contains__
1329 dcontains = dmap.__contains__
1343 dget = dmap.__getitem__
1330 dget = dmap.__getitem__
1344 ladd = lookup.append # aka "unsure"
1331 ladd = lookup.append # aka "unsure"
1345 madd = modified.append
1332 madd = modified.append
1346 aadd = added.append
1333 aadd = added.append
1347 uadd = unknown.append if listunknown else noop
1334 uadd = unknown.append if listunknown else noop
1348 iadd = ignored.append if listignored else noop
1335 iadd = ignored.append if listignored else noop
1349 radd = removed.append
1336 radd = removed.append
1350 dadd = deleted.append
1337 dadd = deleted.append
1351 cadd = clean.append if listclean else noop
1338 cadd = clean.append if listclean else noop
1352 mexact = match.exact
1339 mexact = match.exact
1353 dirignore = self._dirignore
1340 dirignore = self._dirignore
1354 checkexec = self._checkexec
1341 checkexec = self._checkexec
1355 checklink = self._checklink
1342 checklink = self._checklink
1356 copymap = self._map.copymap
1343 copymap = self._map.copymap
1357 lastnormaltime = self._lastnormaltime
1344 lastnormaltime = self._lastnormaltime
1358
1345
1359 # We need to do full walks when either
1346 # We need to do full walks when either
1360 # - we're listing all clean files, or
1347 # - we're listing all clean files, or
1361 # - match.traversedir does something, because match.traversedir should
1348 # - match.traversedir does something, because match.traversedir should
1362 # be called for every dir in the working dir
1349 # be called for every dir in the working dir
1363 full = listclean or match.traversedir is not None
1350 full = listclean or match.traversedir is not None
1364 for fn, st in pycompat.iteritems(
1351 for fn, st in pycompat.iteritems(
1365 self.walk(match, subrepos, listunknown, listignored, full=full)
1352 self.walk(match, subrepos, listunknown, listignored, full=full)
1366 ):
1353 ):
1367 if not dcontains(fn):
1354 if not dcontains(fn):
1368 if (listignored or mexact(fn)) and dirignore(fn):
1355 if (listignored or mexact(fn)) and dirignore(fn):
1369 if listignored:
1356 if listignored:
1370 iadd(fn)
1357 iadd(fn)
1371 else:
1358 else:
1372 uadd(fn)
1359 uadd(fn)
1373 continue
1360 continue
1374
1361
1375 t = dget(fn)
1362 t = dget(fn)
1376 mode = t.mode
1363 mode = t.mode
1377 size = t.size
1364 size = t.size
1378
1365
1379 if not st and t.tracked:
1366 if not st and t.tracked:
1380 dadd(fn)
1367 dadd(fn)
1381 elif t.p2_info:
1368 elif t.p2_info:
1382 madd(fn)
1369 madd(fn)
1383 elif t.added:
1370 elif t.added:
1384 aadd(fn)
1371 aadd(fn)
1385 elif t.removed:
1372 elif t.removed:
1386 radd(fn)
1373 radd(fn)
1387 elif t.tracked:
1374 elif t.tracked:
1388 if not checklink and t.has_fallback_symlink:
1375 if not checklink and t.has_fallback_symlink:
1389 # If the file system does not support symlink, the mode
1376 # If the file system does not support symlink, the mode
1390 # might not be correctly stored in the dirstate, so do not
1377 # might not be correctly stored in the dirstate, so do not
1391 # trust it.
1378 # trust it.
1392 ladd(fn)
1379 ladd(fn)
1393 elif not checkexec and t.has_fallback_exec:
1380 elif not checkexec and t.has_fallback_exec:
1394 # If the file system does not support exec bits, the mode
1381 # If the file system does not support exec bits, the mode
1395 # might not be correctly stored in the dirstate, so do not
1382 # might not be correctly stored in the dirstate, so do not
1396 # trust it.
1383 # trust it.
1397 ladd(fn)
1384 ladd(fn)
1398 elif (
1385 elif (
1399 size >= 0
1386 size >= 0
1400 and (
1387 and (
1401 (size != st.st_size and size != st.st_size & _rangemask)
1388 (size != st.st_size and size != st.st_size & _rangemask)
1402 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1389 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1403 )
1390 )
1404 or fn in copymap
1391 or fn in copymap
1405 ):
1392 ):
1406 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1393 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1407 # issue6456: Size returned may be longer due to
1394 # issue6456: Size returned may be longer due to
1408 # encryption on EXT-4 fscrypt, undecided.
1395 # encryption on EXT-4 fscrypt, undecided.
1409 ladd(fn)
1396 ladd(fn)
1410 else:
1397 else:
1411 madd(fn)
1398 madd(fn)
1412 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1399 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1413 ladd(fn)
1400 ladd(fn)
1414 elif timestamp.mtime_of(st) == lastnormaltime:
1401 elif timestamp.mtime_of(st) == lastnormaltime:
1415 # fn may have just been marked as normal and it may have
1402 # fn may have just been marked as normal and it may have
1416 # changed in the same second without changing its size.
1403 # changed in the same second without changing its size.
1417 # This can happen if we quickly do multiple commits.
1404 # This can happen if we quickly do multiple commits.
1418 # Force lookup, so we don't miss such a racy file change.
1405 # Force lookup, so we don't miss such a racy file change.
1419 ladd(fn)
1406 ladd(fn)
1420 elif listclean:
1407 elif listclean:
1421 cadd(fn)
1408 cadd(fn)
1422 status = scmutil.status(
1409 status = scmutil.status(
1423 modified, added, removed, deleted, unknown, ignored, clean
1410 modified, added, removed, deleted, unknown, ignored, clean
1424 )
1411 )
1425 return (lookup, status)
1412 return (lookup, status)
1426
1413
1427 def matches(self, match):
1414 def matches(self, match):
1428 """
1415 """
1429 return files in the dirstate (in whatever state) filtered by match
1416 return files in the dirstate (in whatever state) filtered by match
1430 """
1417 """
1431 dmap = self._map
1418 dmap = self._map
1432 if rustmod is not None:
1419 if rustmod is not None:
1433 dmap = self._map._map
1420 dmap = self._map._map
1434
1421
1435 if match.always():
1422 if match.always():
1436 return dmap.keys()
1423 return dmap.keys()
1437 files = match.files()
1424 files = match.files()
1438 if match.isexact():
1425 if match.isexact():
1439 # fast path -- filter the other way around, since typically files is
1426 # fast path -- filter the other way around, since typically files is
1440 # much smaller than dmap
1427 # much smaller than dmap
1441 return [f for f in files if f in dmap]
1428 return [f for f in files if f in dmap]
1442 if match.prefix() and all(fn in dmap for fn in files):
1429 if match.prefix() and all(fn in dmap for fn in files):
1443 # fast path -- all the values are known to be files, so just return
1430 # fast path -- all the values are known to be files, so just return
1444 # that
1431 # that
1445 return list(files)
1432 return list(files)
1446 return [f for f in dmap if match(f)]
1433 return [f for f in dmap if match(f)]
1447
1434
1448 def _actualfilename(self, tr):
1435 def _actualfilename(self, tr):
1449 if tr:
1436 if tr:
1450 return self._pendingfilename
1437 return self._pendingfilename
1451 else:
1438 else:
1452 return self._filename
1439 return self._filename
1453
1440
1454 def savebackup(self, tr, backupname):
1441 def savebackup(self, tr, backupname):
1455 '''Save current dirstate into backup file'''
1442 '''Save current dirstate into backup file'''
1456 filename = self._actualfilename(tr)
1443 filename = self._actualfilename(tr)
1457 assert backupname != filename
1444 assert backupname != filename
1458
1445
1459 # use '_writedirstate' instead of 'write' to write changes certainly,
1446 # use '_writedirstate' instead of 'write' to write changes certainly,
1460 # because the latter omits writing out if transaction is running.
1447 # because the latter omits writing out if transaction is running.
1461 # output file will be used to create backup of dirstate at this point.
1448 # output file will be used to create backup of dirstate at this point.
1462 if self._dirty or not self._opener.exists(filename):
1449 if self._dirty or not self._opener.exists(filename):
1463 self._writedirstate(
1450 self._writedirstate(
1464 tr,
1451 tr,
1465 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1452 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1466 )
1453 )
1467
1454
1468 if tr:
1455 if tr:
1469 # ensure that subsequent tr.writepending returns True for
1456 # ensure that subsequent tr.writepending returns True for
1470 # changes written out above, even if dirstate is never
1457 # changes written out above, even if dirstate is never
1471 # changed after this
1458 # changed after this
1472 tr.addfilegenerator(
1459 tr.addfilegenerator(
1473 b'dirstate',
1460 b'dirstate',
1474 (self._filename,),
1461 (self._filename,),
1475 lambda f: self._writedirstate(tr, f),
1462 lambda f: self._writedirstate(tr, f),
1476 location=b'plain',
1463 location=b'plain',
1477 )
1464 )
1478
1465
1479 # ensure that pending file written above is unlinked at
1466 # ensure that pending file written above is unlinked at
1480 # failure, even if tr.writepending isn't invoked until the
1467 # failure, even if tr.writepending isn't invoked until the
1481 # end of this transaction
1468 # end of this transaction
1482 tr.registertmp(filename, location=b'plain')
1469 tr.registertmp(filename, location=b'plain')
1483
1470
1484 self._opener.tryunlink(backupname)
1471 self._opener.tryunlink(backupname)
1485 # hardlink backup is okay because _writedirstate is always called
1472 # hardlink backup is okay because _writedirstate is always called
1486 # with an "atomictemp=True" file.
1473 # with an "atomictemp=True" file.
1487 util.copyfile(
1474 util.copyfile(
1488 self._opener.join(filename),
1475 self._opener.join(filename),
1489 self._opener.join(backupname),
1476 self._opener.join(backupname),
1490 hardlink=True,
1477 hardlink=True,
1491 )
1478 )
1492
1479
1493 def restorebackup(self, tr, backupname):
1480 def restorebackup(self, tr, backupname):
1494 '''Restore dirstate by backup file'''
1481 '''Restore dirstate by backup file'''
1495 # this "invalidate()" prevents "wlock.release()" from writing
1482 # this "invalidate()" prevents "wlock.release()" from writing
1496 # changes of dirstate out after restoring from backup file
1483 # changes of dirstate out after restoring from backup file
1497 self.invalidate()
1484 self.invalidate()
1498 filename = self._actualfilename(tr)
1485 filename = self._actualfilename(tr)
1499 o = self._opener
1486 o = self._opener
1500 if util.samefile(o.join(backupname), o.join(filename)):
1487 if util.samefile(o.join(backupname), o.join(filename)):
1501 o.unlink(backupname)
1488 o.unlink(backupname)
1502 else:
1489 else:
1503 o.rename(backupname, filename, checkambig=True)
1490 o.rename(backupname, filename, checkambig=True)
1504
1491
1505 def clearbackup(self, tr, backupname):
1492 def clearbackup(self, tr, backupname):
1506 '''Clear backup file'''
1493 '''Clear backup file'''
1507 self._opener.unlink(backupname)
1494 self._opener.unlink(backupname)
1508
1495
1509 def verify(self, m1, m2):
1496 def verify(self, m1, m2):
1510 """check the dirstate content again the parent manifest and yield errors"""
1497 """check the dirstate content again the parent manifest and yield errors"""
1511 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1498 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1512 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1499 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1513 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1500 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1514 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1501 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1515 for f, entry in self.items():
1502 for f, entry in self.items():
1516 state = entry.state
1503 state = entry.state
1517 if state in b"nr" and f not in m1:
1504 if state in b"nr" and f not in m1:
1518 yield (missing_from_p1, f, state)
1505 yield (missing_from_p1, f, state)
1519 if state in b"a" and f in m1:
1506 if state in b"a" and f in m1:
1520 yield (unexpected_in_p1, f, state)
1507 yield (unexpected_in_p1, f, state)
1521 if state in b"m" and f not in m1 and f not in m2:
1508 if state in b"m" and f not in m1 and f not in m2:
1522 yield (missing_from_ps, f, state)
1509 yield (missing_from_ps, f, state)
1523 for f in m1:
1510 for f in m1:
1524 state = self.get_entry(f).state
1511 state = self.get_entry(f).state
1525 if state not in b"nrm":
1512 if state not in b"nrm":
1526 yield (missing_from_ds, f, state)
1513 yield (missing_from_ds, f, state)
General Comments 0
You need to be logged in to leave comments. Login now