##// END OF EJS Templates
dirstate: introduce an internal `_drop` method...
marmoute -
r48391:3d8b639b default
parent child Browse files
Show More
@@ -1,1444 +1,1448 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304
304
305 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
306 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
307 instead.
307 instead.
308 """
308 """
309 entry = self._map.get(key)
309 entry = self._map.get(key)
310 if entry is not None:
310 if entry is not None:
311 return entry.state
311 return entry.state
312 return b'?'
312 return b'?'
313
313
314 def __contains__(self, key):
314 def __contains__(self, key):
315 return key in self._map
315 return key in self._map
316
316
317 def __iter__(self):
317 def __iter__(self):
318 return iter(sorted(self._map))
318 return iter(sorted(self._map))
319
319
320 def items(self):
320 def items(self):
321 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
322
322
323 iteritems = items
323 iteritems = items
324
324
325 def directories(self):
325 def directories(self):
326 return self._map.directories()
326 return self._map.directories()
327
327
328 def parents(self):
328 def parents(self):
329 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
330
330
331 def p1(self):
331 def p1(self):
332 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
333
333
334 def p2(self):
334 def p2(self):
335 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
336
336
337 @property
337 @property
338 def in_merge(self):
338 def in_merge(self):
339 """True if a merge is in progress"""
339 """True if a merge is in progress"""
340 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
341
341
342 def branch(self):
342 def branch(self):
343 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
344
344
345 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
346 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
347
347
348 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
349 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
350 returned by the call.
350 returned by the call.
351
351
352 See localrepo.setparents()
352 See localrepo.setparents()
353 """
353 """
354 if p2 is None:
354 if p2 is None:
355 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
356 if self._parentwriters == 0:
356 if self._parentwriters == 0:
357 raise ValueError(
357 raise ValueError(
358 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
359 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
360 )
360 )
361
361
362 self._dirty = True
362 self._dirty = True
363 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
364 if self._origpl is None:
364 if self._origpl is None:
365 self._origpl = self._pl
365 self._origpl = self._pl
366 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
367 copies = {}
367 copies = {}
368 if (
368 if (
369 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
371 ):
371 ):
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
373
373
374 for f in candidatefiles:
374 for f in candidatefiles:
375 s = self._map.get(f)
375 s = self._map.get(f)
376 if s is None:
376 if s is None:
377 continue
377 continue
378
378
379 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
380 if s.merged:
380 if s.merged:
381 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
382 if source:
382 if source:
383 copies[f] = source
383 copies[f] = source
384 self.normallookup(f)
384 self.normallookup(f)
385 # Also fix up otherparent markers
385 # Also fix up otherparent markers
386 elif s.from_p2:
386 elif s.from_p2:
387 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
388 if source:
388 if source:
389 copies[f] = source
389 copies[f] = source
390 self._add(f)
390 self._add(f)
391 return copies
391 return copies
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._lastnormaltime = 0
419 self._lastnormaltime = 0
420 self._dirty = False
420 self._dirty = False
421 self._updatedfiles.clear()
421 self._updatedfiles.clear()
422 self._parentwriters = 0
422 self._parentwriters = 0
423 self._origpl = None
423 self._origpl = None
424
424
425 def copy(self, source, dest):
425 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
427 if source == dest:
428 return
428 return
429 self._dirty = True
429 self._dirty = True
430 if source is not None:
430 if source is not None:
431 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
432 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
435 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
436
436
437 def copied(self, file):
437 def copied(self, file):
438 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
439
439
440 def copies(self):
440 def copies(self):
441 return self._map.copymap
441 return self._map.copymap
442
442
443 def _addpath(
443 def _addpath(
444 self,
444 self,
445 f,
445 f,
446 mode=0,
446 mode=0,
447 size=None,
447 size=None,
448 mtime=None,
448 mtime=None,
449 added=False,
449 added=False,
450 merged=False,
450 merged=False,
451 from_p2=False,
451 from_p2=False,
452 possibly_dirty=False,
452 possibly_dirty=False,
453 ):
453 ):
454 entry = self._map.get(f)
454 entry = self._map.get(f)
455 if added or entry is not None and entry.removed:
455 if added or entry is not None and entry.removed:
456 scmutil.checkfilename(f)
456 scmutil.checkfilename(f)
457 if self._map.hastrackeddir(f):
457 if self._map.hastrackeddir(f):
458 msg = _(b'directory %r already in dirstate')
458 msg = _(b'directory %r already in dirstate')
459 msg %= pycompat.bytestr(f)
459 msg %= pycompat.bytestr(f)
460 raise error.Abort(msg)
460 raise error.Abort(msg)
461 # shadows
461 # shadows
462 for d in pathutil.finddirs(f):
462 for d in pathutil.finddirs(f):
463 if self._map.hastrackeddir(d):
463 if self._map.hastrackeddir(d):
464 break
464 break
465 entry = self._map.get(d)
465 entry = self._map.get(d)
466 if entry is not None and not entry.removed:
466 if entry is not None and not entry.removed:
467 msg = _(b'file %r in dirstate clashes with %r')
467 msg = _(b'file %r in dirstate clashes with %r')
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
469 raise error.Abort(msg)
469 raise error.Abort(msg)
470 self._dirty = True
470 self._dirty = True
471 self._updatedfiles.add(f)
471 self._updatedfiles.add(f)
472 self._map.addfile(
472 self._map.addfile(
473 f,
473 f,
474 mode=mode,
474 mode=mode,
475 size=size,
475 size=size,
476 mtime=mtime,
476 mtime=mtime,
477 added=added,
477 added=added,
478 merged=merged,
478 merged=merged,
479 from_p2=from_p2,
479 from_p2=from_p2,
480 possibly_dirty=possibly_dirty,
480 possibly_dirty=possibly_dirty,
481 )
481 )
482
482
483 def normal(self, f, parentfiledata=None):
483 def normal(self, f, parentfiledata=None):
484 """Mark a file normal and clean.
484 """Mark a file normal and clean.
485
485
486 parentfiledata: (mode, size, mtime) of the clean file
486 parentfiledata: (mode, size, mtime) of the clean file
487
487
488 parentfiledata should be computed from memory (for mode,
488 parentfiledata should be computed from memory (for mode,
489 size), as or close as possible from the point where we
489 size), as or close as possible from the point where we
490 determined the file was clean, to limit the risk of the
490 determined the file was clean, to limit the risk of the
491 file having been changed by an external process between the
491 file having been changed by an external process between the
492 moment where the file was determined to be clean and now."""
492 moment where the file was determined to be clean and now."""
493 if parentfiledata:
493 if parentfiledata:
494 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
495 else:
495 else:
496 s = os.lstat(self._join(f))
496 s = os.lstat(self._join(f))
497 mode = s.st_mode
497 mode = s.st_mode
498 size = s.st_size
498 size = s.st_size
499 mtime = s[stat.ST_MTIME]
499 mtime = s[stat.ST_MTIME]
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
501 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
502 if f in self._map.nonnormalset:
502 if f in self._map.nonnormalset:
503 self._map.nonnormalset.remove(f)
503 self._map.nonnormalset.remove(f)
504 if mtime > self._lastnormaltime:
504 if mtime > self._lastnormaltime:
505 # Remember the most recent modification timeslot for status(),
505 # Remember the most recent modification timeslot for status(),
506 # to make sure we won't miss future size-preserving file content
506 # to make sure we won't miss future size-preserving file content
507 # modifications that happen within the same timeslot.
507 # modifications that happen within the same timeslot.
508 self._lastnormaltime = mtime
508 self._lastnormaltime = mtime
509
509
510 def normallookup(self, f):
510 def normallookup(self, f):
511 '''Mark a file normal, but possibly dirty.'''
511 '''Mark a file normal, but possibly dirty.'''
512 if self.in_merge:
512 if self.in_merge:
513 # if there is a merge going on and the file was either
513 # if there is a merge going on and the file was either
514 # "merged" or coming from other parent (-2) before
514 # "merged" or coming from other parent (-2) before
515 # being removed, restore that state.
515 # being removed, restore that state.
516 entry = self._map.get(f)
516 entry = self._map.get(f)
517 if entry is not None:
517 if entry is not None:
518 # XXX this should probably be dealt with a a lower level
518 # XXX this should probably be dealt with a a lower level
519 # (see `merged_removed` and `from_p2_removed`)
519 # (see `merged_removed` and `from_p2_removed`)
520 if entry.merged_removed or entry.from_p2_removed:
520 if entry.merged_removed or entry.from_p2_removed:
521 source = self._map.copymap.get(f)
521 source = self._map.copymap.get(f)
522 if entry.merged_removed:
522 if entry.merged_removed:
523 self.merge(f)
523 self.merge(f)
524 elif entry.from_p2_removed:
524 elif entry.from_p2_removed:
525 self.otherparent(f)
525 self.otherparent(f)
526 if source is not None:
526 if source is not None:
527 self.copy(source, f)
527 self.copy(source, f)
528 return
528 return
529 elif entry.merged or entry.from_p2:
529 elif entry.merged or entry.from_p2:
530 return
530 return
531 self._addpath(f, possibly_dirty=True)
531 self._addpath(f, possibly_dirty=True)
532 self._map.copymap.pop(f, None)
532 self._map.copymap.pop(f, None)
533
533
534 def otherparent(self, f):
534 def otherparent(self, f):
535 '''Mark as coming from the other parent, always dirty.'''
535 '''Mark as coming from the other parent, always dirty.'''
536 if not self.in_merge:
536 if not self.in_merge:
537 msg = _(b"setting %r to other parent only allowed in merges") % f
537 msg = _(b"setting %r to other parent only allowed in merges") % f
538 raise error.Abort(msg)
538 raise error.Abort(msg)
539 entry = self._map.get(f)
539 entry = self._map.get(f)
540 if entry is not None and entry.tracked:
540 if entry is not None and entry.tracked:
541 # merge-like
541 # merge-like
542 self._addpath(f, merged=True)
542 self._addpath(f, merged=True)
543 else:
543 else:
544 # add-like
544 # add-like
545 self._addpath(f, from_p2=True)
545 self._addpath(f, from_p2=True)
546 self._map.copymap.pop(f, None)
546 self._map.copymap.pop(f, None)
547
547
548 def add(self, f):
548 def add(self, f):
549 '''Mark a file added.'''
549 '''Mark a file added.'''
550 self._add(f)
550 self._add(f)
551
551
552 def _add(self, filename):
552 def _add(self, filename):
553 """internal function to mark a file as added"""
553 """internal function to mark a file as added"""
554 self._addpath(filename, added=True)
554 self._addpath(filename, added=True)
555 self._map.copymap.pop(filename, None)
555 self._map.copymap.pop(filename, None)
556
556
557 def remove(self, f):
557 def remove(self, f):
558 '''Mark a file removed'''
558 '''Mark a file removed'''
559 self._remove(f)
559 self._remove(f)
560
560
561 def _remove(self, filename):
561 def _remove(self, filename):
562 """internal function to mark a file removed"""
562 """internal function to mark a file removed"""
563 self._dirty = True
563 self._dirty = True
564 self._updatedfiles.add(filename)
564 self._updatedfiles.add(filename)
565 self._map.removefile(filename, in_merge=self.in_merge)
565 self._map.removefile(filename, in_merge=self.in_merge)
566
566
567 def merge(self, f):
567 def merge(self, f):
568 '''Mark a file merged.'''
568 '''Mark a file merged.'''
569 if not self.in_merge:
569 if not self.in_merge:
570 return self.normallookup(f)
570 return self.normallookup(f)
571 return self.otherparent(f)
571 return self.otherparent(f)
572
572
573 def drop(self, f):
573 def drop(self, f):
574 '''Drop a file from the dirstate'''
574 '''Drop a file from the dirstate'''
575 if self._map.dropfile(f):
575 self._drop(f)
576
577 def _drop(self, filename):
578 """internal function to drop a file from the dirstate"""
579 if self._map.dropfile(filename):
576 self._dirty = True
580 self._dirty = True
577 self._updatedfiles.add(f)
581 self._updatedfiles.add(filename)
578 self._map.copymap.pop(f, None)
582 self._map.copymap.pop(filename, None)
579
583
580 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
581 if exists is None:
585 if exists is None:
582 exists = os.path.lexists(os.path.join(self._root, path))
586 exists = os.path.lexists(os.path.join(self._root, path))
583 if not exists:
587 if not exists:
584 # Maybe a path component exists
588 # Maybe a path component exists
585 if not ignoremissing and b'/' in path:
589 if not ignoremissing and b'/' in path:
586 d, f = path.rsplit(b'/', 1)
590 d, f = path.rsplit(b'/', 1)
587 d = self._normalize(d, False, ignoremissing, None)
591 d = self._normalize(d, False, ignoremissing, None)
588 folded = d + b"/" + f
592 folded = d + b"/" + f
589 else:
593 else:
590 # No path components, preserve original case
594 # No path components, preserve original case
591 folded = path
595 folded = path
592 else:
596 else:
593 # recursively normalize leading directory components
597 # recursively normalize leading directory components
594 # against dirstate
598 # against dirstate
595 if b'/' in normed:
599 if b'/' in normed:
596 d, f = normed.rsplit(b'/', 1)
600 d, f = normed.rsplit(b'/', 1)
597 d = self._normalize(d, False, ignoremissing, True)
601 d = self._normalize(d, False, ignoremissing, True)
598 r = self._root + b"/" + d
602 r = self._root + b"/" + d
599 folded = d + b"/" + util.fspath(f, r)
603 folded = d + b"/" + util.fspath(f, r)
600 else:
604 else:
601 folded = util.fspath(normed, self._root)
605 folded = util.fspath(normed, self._root)
602 storemap[normed] = folded
606 storemap[normed] = folded
603
607
604 return folded
608 return folded
605
609
606 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
607 normed = util.normcase(path)
611 normed = util.normcase(path)
608 folded = self._map.filefoldmap.get(normed, None)
612 folded = self._map.filefoldmap.get(normed, None)
609 if folded is None:
613 if folded is None:
610 if isknown:
614 if isknown:
611 folded = path
615 folded = path
612 else:
616 else:
613 folded = self._discoverpath(
617 folded = self._discoverpath(
614 path, normed, ignoremissing, exists, self._map.filefoldmap
618 path, normed, ignoremissing, exists, self._map.filefoldmap
615 )
619 )
616 return folded
620 return folded
617
621
618 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
622 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
619 normed = util.normcase(path)
623 normed = util.normcase(path)
620 folded = self._map.filefoldmap.get(normed, None)
624 folded = self._map.filefoldmap.get(normed, None)
621 if folded is None:
625 if folded is None:
622 folded = self._map.dirfoldmap.get(normed, None)
626 folded = self._map.dirfoldmap.get(normed, None)
623 if folded is None:
627 if folded is None:
624 if isknown:
628 if isknown:
625 folded = path
629 folded = path
626 else:
630 else:
627 # store discovered result in dirfoldmap so that future
631 # store discovered result in dirfoldmap so that future
628 # normalizefile calls don't start matching directories
632 # normalizefile calls don't start matching directories
629 folded = self._discoverpath(
633 folded = self._discoverpath(
630 path, normed, ignoremissing, exists, self._map.dirfoldmap
634 path, normed, ignoremissing, exists, self._map.dirfoldmap
631 )
635 )
632 return folded
636 return folded
633
637
634 def normalize(self, path, isknown=False, ignoremissing=False):
638 def normalize(self, path, isknown=False, ignoremissing=False):
635 """
639 """
636 normalize the case of a pathname when on a casefolding filesystem
640 normalize the case of a pathname when on a casefolding filesystem
637
641
638 isknown specifies whether the filename came from walking the
642 isknown specifies whether the filename came from walking the
639 disk, to avoid extra filesystem access.
643 disk, to avoid extra filesystem access.
640
644
641 If ignoremissing is True, missing path are returned
645 If ignoremissing is True, missing path are returned
642 unchanged. Otherwise, we try harder to normalize possibly
646 unchanged. Otherwise, we try harder to normalize possibly
643 existing path components.
647 existing path components.
644
648
645 The normalized case is determined based on the following precedence:
649 The normalized case is determined based on the following precedence:
646
650
647 - version of name already stored in the dirstate
651 - version of name already stored in the dirstate
648 - version of name stored on disk
652 - version of name stored on disk
649 - version provided via command arguments
653 - version provided via command arguments
650 """
654 """
651
655
652 if self._checkcase:
656 if self._checkcase:
653 return self._normalize(path, isknown, ignoremissing)
657 return self._normalize(path, isknown, ignoremissing)
654 return path
658 return path
655
659
656 def clear(self):
660 def clear(self):
657 self._map.clear()
661 self._map.clear()
658 self._lastnormaltime = 0
662 self._lastnormaltime = 0
659 self._updatedfiles.clear()
663 self._updatedfiles.clear()
660 self._dirty = True
664 self._dirty = True
661
665
662 def rebuild(self, parent, allfiles, changedfiles=None):
666 def rebuild(self, parent, allfiles, changedfiles=None):
663 if changedfiles is None:
667 if changedfiles is None:
664 # Rebuild entire dirstate
668 # Rebuild entire dirstate
665 to_lookup = allfiles
669 to_lookup = allfiles
666 to_drop = []
670 to_drop = []
667 lastnormaltime = self._lastnormaltime
671 lastnormaltime = self._lastnormaltime
668 self.clear()
672 self.clear()
669 self._lastnormaltime = lastnormaltime
673 self._lastnormaltime = lastnormaltime
670 elif len(changedfiles) < 10:
674 elif len(changedfiles) < 10:
671 # Avoid turning allfiles into a set, which can be expensive if it's
675 # Avoid turning allfiles into a set, which can be expensive if it's
672 # large.
676 # large.
673 to_lookup = []
677 to_lookup = []
674 to_drop = []
678 to_drop = []
675 for f in changedfiles:
679 for f in changedfiles:
676 if f in allfiles:
680 if f in allfiles:
677 to_lookup.append(f)
681 to_lookup.append(f)
678 else:
682 else:
679 to_drop.append(f)
683 to_drop.append(f)
680 else:
684 else:
681 changedfilesset = set(changedfiles)
685 changedfilesset = set(changedfiles)
682 to_lookup = changedfilesset & set(allfiles)
686 to_lookup = changedfilesset & set(allfiles)
683 to_drop = changedfilesset - to_lookup
687 to_drop = changedfilesset - to_lookup
684
688
685 if self._origpl is None:
689 if self._origpl is None:
686 self._origpl = self._pl
690 self._origpl = self._pl
687 self._map.setparents(parent, self._nodeconstants.nullid)
691 self._map.setparents(parent, self._nodeconstants.nullid)
688
692
689 for f in to_lookup:
693 for f in to_lookup:
690 self.normallookup(f)
694 self.normallookup(f)
691 for f in to_drop:
695 for f in to_drop:
692 self.drop(f)
696 self._drop(f)
693
697
694 self._dirty = True
698 self._dirty = True
695
699
696 def identity(self):
700 def identity(self):
697 """Return identity of dirstate itself to detect changing in storage
701 """Return identity of dirstate itself to detect changing in storage
698
702
699 If identity of previous dirstate is equal to this, writing
703 If identity of previous dirstate is equal to this, writing
700 changes based on the former dirstate out can keep consistency.
704 changes based on the former dirstate out can keep consistency.
701 """
705 """
702 return self._map.identity
706 return self._map.identity
703
707
704 def write(self, tr):
708 def write(self, tr):
705 if not self._dirty:
709 if not self._dirty:
706 return
710 return
707
711
708 filename = self._filename
712 filename = self._filename
709 if tr:
713 if tr:
710 # 'dirstate.write()' is not only for writing in-memory
714 # 'dirstate.write()' is not only for writing in-memory
711 # changes out, but also for dropping ambiguous timestamp.
715 # changes out, but also for dropping ambiguous timestamp.
712 # delayed writing re-raise "ambiguous timestamp issue".
716 # delayed writing re-raise "ambiguous timestamp issue".
713 # See also the wiki page below for detail:
717 # See also the wiki page below for detail:
714 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
718 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
715
719
716 # emulate dropping timestamp in 'parsers.pack_dirstate'
720 # emulate dropping timestamp in 'parsers.pack_dirstate'
717 now = _getfsnow(self._opener)
721 now = _getfsnow(self._opener)
718 self._map.clearambiguoustimes(self._updatedfiles, now)
722 self._map.clearambiguoustimes(self._updatedfiles, now)
719
723
720 # emulate that all 'dirstate.normal' results are written out
724 # emulate that all 'dirstate.normal' results are written out
721 self._lastnormaltime = 0
725 self._lastnormaltime = 0
722 self._updatedfiles.clear()
726 self._updatedfiles.clear()
723
727
724 # delay writing in-memory changes out
728 # delay writing in-memory changes out
725 tr.addfilegenerator(
729 tr.addfilegenerator(
726 b'dirstate',
730 b'dirstate',
727 (self._filename,),
731 (self._filename,),
728 self._writedirstate,
732 self._writedirstate,
729 location=b'plain',
733 location=b'plain',
730 )
734 )
731 return
735 return
732
736
733 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
737 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
734 self._writedirstate(st)
738 self._writedirstate(st)
735
739
736 def addparentchangecallback(self, category, callback):
740 def addparentchangecallback(self, category, callback):
737 """add a callback to be called when the wd parents are changed
741 """add a callback to be called when the wd parents are changed
738
742
739 Callback will be called with the following arguments:
743 Callback will be called with the following arguments:
740 dirstate, (oldp1, oldp2), (newp1, newp2)
744 dirstate, (oldp1, oldp2), (newp1, newp2)
741
745
742 Category is a unique identifier to allow overwriting an old callback
746 Category is a unique identifier to allow overwriting an old callback
743 with a newer callback.
747 with a newer callback.
744 """
748 """
745 self._plchangecallbacks[category] = callback
749 self._plchangecallbacks[category] = callback
746
750
747 def _writedirstate(self, st):
751 def _writedirstate(self, st):
748 # notify callbacks about parents change
752 # notify callbacks about parents change
749 if self._origpl is not None and self._origpl != self._pl:
753 if self._origpl is not None and self._origpl != self._pl:
750 for c, callback in sorted(
754 for c, callback in sorted(
751 pycompat.iteritems(self._plchangecallbacks)
755 pycompat.iteritems(self._plchangecallbacks)
752 ):
756 ):
753 callback(self, self._origpl, self._pl)
757 callback(self, self._origpl, self._pl)
754 self._origpl = None
758 self._origpl = None
755 # use the modification time of the newly created temporary file as the
759 # use the modification time of the newly created temporary file as the
756 # filesystem's notion of 'now'
760 # filesystem's notion of 'now'
757 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
761 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
758
762
759 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
763 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
760 # timestamp of each entries in dirstate, because of 'now > mtime'
764 # timestamp of each entries in dirstate, because of 'now > mtime'
761 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
765 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
762 if delaywrite > 0:
766 if delaywrite > 0:
763 # do we have any files to delay for?
767 # do we have any files to delay for?
764 for f, e in pycompat.iteritems(self._map):
768 for f, e in pycompat.iteritems(self._map):
765 if e.need_delay(now):
769 if e.need_delay(now):
766 import time # to avoid useless import
770 import time # to avoid useless import
767
771
768 # rather than sleep n seconds, sleep until the next
772 # rather than sleep n seconds, sleep until the next
769 # multiple of n seconds
773 # multiple of n seconds
770 clock = time.time()
774 clock = time.time()
771 start = int(clock) - (int(clock) % delaywrite)
775 start = int(clock) - (int(clock) % delaywrite)
772 end = start + delaywrite
776 end = start + delaywrite
773 time.sleep(end - clock)
777 time.sleep(end - clock)
774 now = end # trust our estimate that the end is near now
778 now = end # trust our estimate that the end is near now
775 break
779 break
776
780
777 self._map.write(st, now)
781 self._map.write(st, now)
778 self._lastnormaltime = 0
782 self._lastnormaltime = 0
779 self._dirty = False
783 self._dirty = False
780
784
781 def _dirignore(self, f):
785 def _dirignore(self, f):
782 if self._ignore(f):
786 if self._ignore(f):
783 return True
787 return True
784 for p in pathutil.finddirs(f):
788 for p in pathutil.finddirs(f):
785 if self._ignore(p):
789 if self._ignore(p):
786 return True
790 return True
787 return False
791 return False
788
792
789 def _ignorefiles(self):
793 def _ignorefiles(self):
790 files = []
794 files = []
791 if os.path.exists(self._join(b'.hgignore')):
795 if os.path.exists(self._join(b'.hgignore')):
792 files.append(self._join(b'.hgignore'))
796 files.append(self._join(b'.hgignore'))
793 for name, path in self._ui.configitems(b"ui"):
797 for name, path in self._ui.configitems(b"ui"):
794 if name == b'ignore' or name.startswith(b'ignore.'):
798 if name == b'ignore' or name.startswith(b'ignore.'):
795 # we need to use os.path.join here rather than self._join
799 # we need to use os.path.join here rather than self._join
796 # because path is arbitrary and user-specified
800 # because path is arbitrary and user-specified
797 files.append(os.path.join(self._rootdir, util.expandpath(path)))
801 files.append(os.path.join(self._rootdir, util.expandpath(path)))
798 return files
802 return files
799
803
800 def _ignorefileandline(self, f):
804 def _ignorefileandline(self, f):
801 files = collections.deque(self._ignorefiles())
805 files = collections.deque(self._ignorefiles())
802 visited = set()
806 visited = set()
803 while files:
807 while files:
804 i = files.popleft()
808 i = files.popleft()
805 patterns = matchmod.readpatternfile(
809 patterns = matchmod.readpatternfile(
806 i, self._ui.warn, sourceinfo=True
810 i, self._ui.warn, sourceinfo=True
807 )
811 )
808 for pattern, lineno, line in patterns:
812 for pattern, lineno, line in patterns:
809 kind, p = matchmod._patsplit(pattern, b'glob')
813 kind, p = matchmod._patsplit(pattern, b'glob')
810 if kind == b"subinclude":
814 if kind == b"subinclude":
811 if p not in visited:
815 if p not in visited:
812 files.append(p)
816 files.append(p)
813 continue
817 continue
814 m = matchmod.match(
818 m = matchmod.match(
815 self._root, b'', [], [pattern], warn=self._ui.warn
819 self._root, b'', [], [pattern], warn=self._ui.warn
816 )
820 )
817 if m(f):
821 if m(f):
818 return (i, lineno, line)
822 return (i, lineno, line)
819 visited.add(i)
823 visited.add(i)
820 return (None, -1, b"")
824 return (None, -1, b"")
821
825
822 def _walkexplicit(self, match, subrepos):
826 def _walkexplicit(self, match, subrepos):
823 """Get stat data about the files explicitly specified by match.
827 """Get stat data about the files explicitly specified by match.
824
828
825 Return a triple (results, dirsfound, dirsnotfound).
829 Return a triple (results, dirsfound, dirsnotfound).
826 - results is a mapping from filename to stat result. It also contains
830 - results is a mapping from filename to stat result. It also contains
827 listings mapping subrepos and .hg to None.
831 listings mapping subrepos and .hg to None.
828 - dirsfound is a list of files found to be directories.
832 - dirsfound is a list of files found to be directories.
829 - dirsnotfound is a list of files that the dirstate thinks are
833 - dirsnotfound is a list of files that the dirstate thinks are
830 directories and that were not found."""
834 directories and that were not found."""
831
835
832 def badtype(mode):
836 def badtype(mode):
833 kind = _(b'unknown')
837 kind = _(b'unknown')
834 if stat.S_ISCHR(mode):
838 if stat.S_ISCHR(mode):
835 kind = _(b'character device')
839 kind = _(b'character device')
836 elif stat.S_ISBLK(mode):
840 elif stat.S_ISBLK(mode):
837 kind = _(b'block device')
841 kind = _(b'block device')
838 elif stat.S_ISFIFO(mode):
842 elif stat.S_ISFIFO(mode):
839 kind = _(b'fifo')
843 kind = _(b'fifo')
840 elif stat.S_ISSOCK(mode):
844 elif stat.S_ISSOCK(mode):
841 kind = _(b'socket')
845 kind = _(b'socket')
842 elif stat.S_ISDIR(mode):
846 elif stat.S_ISDIR(mode):
843 kind = _(b'directory')
847 kind = _(b'directory')
844 return _(b'unsupported file type (type is %s)') % kind
848 return _(b'unsupported file type (type is %s)') % kind
845
849
846 badfn = match.bad
850 badfn = match.bad
847 dmap = self._map
851 dmap = self._map
848 lstat = os.lstat
852 lstat = os.lstat
849 getkind = stat.S_IFMT
853 getkind = stat.S_IFMT
850 dirkind = stat.S_IFDIR
854 dirkind = stat.S_IFDIR
851 regkind = stat.S_IFREG
855 regkind = stat.S_IFREG
852 lnkkind = stat.S_IFLNK
856 lnkkind = stat.S_IFLNK
853 join = self._join
857 join = self._join
854 dirsfound = []
858 dirsfound = []
855 foundadd = dirsfound.append
859 foundadd = dirsfound.append
856 dirsnotfound = []
860 dirsnotfound = []
857 notfoundadd = dirsnotfound.append
861 notfoundadd = dirsnotfound.append
858
862
859 if not match.isexact() and self._checkcase:
863 if not match.isexact() and self._checkcase:
860 normalize = self._normalize
864 normalize = self._normalize
861 else:
865 else:
862 normalize = None
866 normalize = None
863
867
864 files = sorted(match.files())
868 files = sorted(match.files())
865 subrepos.sort()
869 subrepos.sort()
866 i, j = 0, 0
870 i, j = 0, 0
867 while i < len(files) and j < len(subrepos):
871 while i < len(files) and j < len(subrepos):
868 subpath = subrepos[j] + b"/"
872 subpath = subrepos[j] + b"/"
869 if files[i] < subpath:
873 if files[i] < subpath:
870 i += 1
874 i += 1
871 continue
875 continue
872 while i < len(files) and files[i].startswith(subpath):
876 while i < len(files) and files[i].startswith(subpath):
873 del files[i]
877 del files[i]
874 j += 1
878 j += 1
875
879
876 if not files or b'' in files:
880 if not files or b'' in files:
877 files = [b'']
881 files = [b'']
878 # constructing the foldmap is expensive, so don't do it for the
882 # constructing the foldmap is expensive, so don't do it for the
879 # common case where files is ['']
883 # common case where files is ['']
880 normalize = None
884 normalize = None
881 results = dict.fromkeys(subrepos)
885 results = dict.fromkeys(subrepos)
882 results[b'.hg'] = None
886 results[b'.hg'] = None
883
887
884 for ff in files:
888 for ff in files:
885 if normalize:
889 if normalize:
886 nf = normalize(ff, False, True)
890 nf = normalize(ff, False, True)
887 else:
891 else:
888 nf = ff
892 nf = ff
889 if nf in results:
893 if nf in results:
890 continue
894 continue
891
895
892 try:
896 try:
893 st = lstat(join(nf))
897 st = lstat(join(nf))
894 kind = getkind(st.st_mode)
898 kind = getkind(st.st_mode)
895 if kind == dirkind:
899 if kind == dirkind:
896 if nf in dmap:
900 if nf in dmap:
897 # file replaced by dir on disk but still in dirstate
901 # file replaced by dir on disk but still in dirstate
898 results[nf] = None
902 results[nf] = None
899 foundadd((nf, ff))
903 foundadd((nf, ff))
900 elif kind == regkind or kind == lnkkind:
904 elif kind == regkind or kind == lnkkind:
901 results[nf] = st
905 results[nf] = st
902 else:
906 else:
903 badfn(ff, badtype(kind))
907 badfn(ff, badtype(kind))
904 if nf in dmap:
908 if nf in dmap:
905 results[nf] = None
909 results[nf] = None
906 except OSError as inst: # nf not found on disk - it is dirstate only
910 except OSError as inst: # nf not found on disk - it is dirstate only
907 if nf in dmap: # does it exactly match a missing file?
911 if nf in dmap: # does it exactly match a missing file?
908 results[nf] = None
912 results[nf] = None
909 else: # does it match a missing directory?
913 else: # does it match a missing directory?
910 if self._map.hasdir(nf):
914 if self._map.hasdir(nf):
911 notfoundadd(nf)
915 notfoundadd(nf)
912 else:
916 else:
913 badfn(ff, encoding.strtolocal(inst.strerror))
917 badfn(ff, encoding.strtolocal(inst.strerror))
914
918
915 # match.files() may contain explicitly-specified paths that shouldn't
919 # match.files() may contain explicitly-specified paths that shouldn't
916 # be taken; drop them from the list of files found. dirsfound/notfound
920 # be taken; drop them from the list of files found. dirsfound/notfound
917 # aren't filtered here because they will be tested later.
921 # aren't filtered here because they will be tested later.
918 if match.anypats():
922 if match.anypats():
919 for f in list(results):
923 for f in list(results):
920 if f == b'.hg' or f in subrepos:
924 if f == b'.hg' or f in subrepos:
921 # keep sentinel to disable further out-of-repo walks
925 # keep sentinel to disable further out-of-repo walks
922 continue
926 continue
923 if not match(f):
927 if not match(f):
924 del results[f]
928 del results[f]
925
929
926 # Case insensitive filesystems cannot rely on lstat() failing to detect
930 # Case insensitive filesystems cannot rely on lstat() failing to detect
927 # a case-only rename. Prune the stat object for any file that does not
931 # a case-only rename. Prune the stat object for any file that does not
928 # match the case in the filesystem, if there are multiple files that
932 # match the case in the filesystem, if there are multiple files that
929 # normalize to the same path.
933 # normalize to the same path.
930 if match.isexact() and self._checkcase:
934 if match.isexact() and self._checkcase:
931 normed = {}
935 normed = {}
932
936
933 for f, st in pycompat.iteritems(results):
937 for f, st in pycompat.iteritems(results):
934 if st is None:
938 if st is None:
935 continue
939 continue
936
940
937 nc = util.normcase(f)
941 nc = util.normcase(f)
938 paths = normed.get(nc)
942 paths = normed.get(nc)
939
943
940 if paths is None:
944 if paths is None:
941 paths = set()
945 paths = set()
942 normed[nc] = paths
946 normed[nc] = paths
943
947
944 paths.add(f)
948 paths.add(f)
945
949
946 for norm, paths in pycompat.iteritems(normed):
950 for norm, paths in pycompat.iteritems(normed):
947 if len(paths) > 1:
951 if len(paths) > 1:
948 for path in paths:
952 for path in paths:
949 folded = self._discoverpath(
953 folded = self._discoverpath(
950 path, norm, True, None, self._map.dirfoldmap
954 path, norm, True, None, self._map.dirfoldmap
951 )
955 )
952 if path != folded:
956 if path != folded:
953 results[path] = None
957 results[path] = None
954
958
955 return results, dirsfound, dirsnotfound
959 return results, dirsfound, dirsnotfound
956
960
957 def walk(self, match, subrepos, unknown, ignored, full=True):
961 def walk(self, match, subrepos, unknown, ignored, full=True):
958 """
962 """
959 Walk recursively through the directory tree, finding all files
963 Walk recursively through the directory tree, finding all files
960 matched by match.
964 matched by match.
961
965
962 If full is False, maybe skip some known-clean files.
966 If full is False, maybe skip some known-clean files.
963
967
964 Return a dict mapping filename to stat-like object (either
968 Return a dict mapping filename to stat-like object (either
965 mercurial.osutil.stat instance or return value of os.stat()).
969 mercurial.osutil.stat instance or return value of os.stat()).
966
970
967 """
971 """
968 # full is a flag that extensions that hook into walk can use -- this
972 # full is a flag that extensions that hook into walk can use -- this
969 # implementation doesn't use it at all. This satisfies the contract
973 # implementation doesn't use it at all. This satisfies the contract
970 # because we only guarantee a "maybe".
974 # because we only guarantee a "maybe".
971
975
972 if ignored:
976 if ignored:
973 ignore = util.never
977 ignore = util.never
974 dirignore = util.never
978 dirignore = util.never
975 elif unknown:
979 elif unknown:
976 ignore = self._ignore
980 ignore = self._ignore
977 dirignore = self._dirignore
981 dirignore = self._dirignore
978 else:
982 else:
979 # if not unknown and not ignored, drop dir recursion and step 2
983 # if not unknown and not ignored, drop dir recursion and step 2
980 ignore = util.always
984 ignore = util.always
981 dirignore = util.always
985 dirignore = util.always
982
986
983 matchfn = match.matchfn
987 matchfn = match.matchfn
984 matchalways = match.always()
988 matchalways = match.always()
985 matchtdir = match.traversedir
989 matchtdir = match.traversedir
986 dmap = self._map
990 dmap = self._map
987 listdir = util.listdir
991 listdir = util.listdir
988 lstat = os.lstat
992 lstat = os.lstat
989 dirkind = stat.S_IFDIR
993 dirkind = stat.S_IFDIR
990 regkind = stat.S_IFREG
994 regkind = stat.S_IFREG
991 lnkkind = stat.S_IFLNK
995 lnkkind = stat.S_IFLNK
992 join = self._join
996 join = self._join
993
997
994 exact = skipstep3 = False
998 exact = skipstep3 = False
995 if match.isexact(): # match.exact
999 if match.isexact(): # match.exact
996 exact = True
1000 exact = True
997 dirignore = util.always # skip step 2
1001 dirignore = util.always # skip step 2
998 elif match.prefix(): # match.match, no patterns
1002 elif match.prefix(): # match.match, no patterns
999 skipstep3 = True
1003 skipstep3 = True
1000
1004
1001 if not exact and self._checkcase:
1005 if not exact and self._checkcase:
1002 normalize = self._normalize
1006 normalize = self._normalize
1003 normalizefile = self._normalizefile
1007 normalizefile = self._normalizefile
1004 skipstep3 = False
1008 skipstep3 = False
1005 else:
1009 else:
1006 normalize = self._normalize
1010 normalize = self._normalize
1007 normalizefile = None
1011 normalizefile = None
1008
1012
1009 # step 1: find all explicit files
1013 # step 1: find all explicit files
1010 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1014 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1011 if matchtdir:
1015 if matchtdir:
1012 for d in work:
1016 for d in work:
1013 matchtdir(d[0])
1017 matchtdir(d[0])
1014 for d in dirsnotfound:
1018 for d in dirsnotfound:
1015 matchtdir(d)
1019 matchtdir(d)
1016
1020
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1021 skipstep3 = skipstep3 and not (work or dirsnotfound)
1018 work = [d for d in work if not dirignore(d[0])]
1022 work = [d for d in work if not dirignore(d[0])]
1019
1023
1020 # step 2: visit subdirectories
1024 # step 2: visit subdirectories
1021 def traverse(work, alreadynormed):
1025 def traverse(work, alreadynormed):
1022 wadd = work.append
1026 wadd = work.append
1023 while work:
1027 while work:
1024 tracing.counter('dirstate.walk work', len(work))
1028 tracing.counter('dirstate.walk work', len(work))
1025 nd = work.pop()
1029 nd = work.pop()
1026 visitentries = match.visitchildrenset(nd)
1030 visitentries = match.visitchildrenset(nd)
1027 if not visitentries:
1031 if not visitentries:
1028 continue
1032 continue
1029 if visitentries == b'this' or visitentries == b'all':
1033 if visitentries == b'this' or visitentries == b'all':
1030 visitentries = None
1034 visitentries = None
1031 skip = None
1035 skip = None
1032 if nd != b'':
1036 if nd != b'':
1033 skip = b'.hg'
1037 skip = b'.hg'
1034 try:
1038 try:
1035 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1039 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1036 entries = listdir(join(nd), stat=True, skip=skip)
1040 entries = listdir(join(nd), stat=True, skip=skip)
1037 except OSError as inst:
1041 except OSError as inst:
1038 if inst.errno in (errno.EACCES, errno.ENOENT):
1042 if inst.errno in (errno.EACCES, errno.ENOENT):
1039 match.bad(
1043 match.bad(
1040 self.pathto(nd), encoding.strtolocal(inst.strerror)
1044 self.pathto(nd), encoding.strtolocal(inst.strerror)
1041 )
1045 )
1042 continue
1046 continue
1043 raise
1047 raise
1044 for f, kind, st in entries:
1048 for f, kind, st in entries:
1045 # Some matchers may return files in the visitentries set,
1049 # Some matchers may return files in the visitentries set,
1046 # instead of 'this', if the matcher explicitly mentions them
1050 # instead of 'this', if the matcher explicitly mentions them
1047 # and is not an exactmatcher. This is acceptable; we do not
1051 # and is not an exactmatcher. This is acceptable; we do not
1048 # make any hard assumptions about file-or-directory below
1052 # make any hard assumptions about file-or-directory below
1049 # based on the presence of `f` in visitentries. If
1053 # based on the presence of `f` in visitentries. If
1050 # visitchildrenset returned a set, we can always skip the
1054 # visitchildrenset returned a set, we can always skip the
1051 # entries *not* in the set it provided regardless of whether
1055 # entries *not* in the set it provided regardless of whether
1052 # they're actually a file or a directory.
1056 # they're actually a file or a directory.
1053 if visitentries and f not in visitentries:
1057 if visitentries and f not in visitentries:
1054 continue
1058 continue
1055 if normalizefile:
1059 if normalizefile:
1056 # even though f might be a directory, we're only
1060 # even though f might be a directory, we're only
1057 # interested in comparing it to files currently in the
1061 # interested in comparing it to files currently in the
1058 # dmap -- therefore normalizefile is enough
1062 # dmap -- therefore normalizefile is enough
1059 nf = normalizefile(
1063 nf = normalizefile(
1060 nd and (nd + b"/" + f) or f, True, True
1064 nd and (nd + b"/" + f) or f, True, True
1061 )
1065 )
1062 else:
1066 else:
1063 nf = nd and (nd + b"/" + f) or f
1067 nf = nd and (nd + b"/" + f) or f
1064 if nf not in results:
1068 if nf not in results:
1065 if kind == dirkind:
1069 if kind == dirkind:
1066 if not ignore(nf):
1070 if not ignore(nf):
1067 if matchtdir:
1071 if matchtdir:
1068 matchtdir(nf)
1072 matchtdir(nf)
1069 wadd(nf)
1073 wadd(nf)
1070 if nf in dmap and (matchalways or matchfn(nf)):
1074 if nf in dmap and (matchalways or matchfn(nf)):
1071 results[nf] = None
1075 results[nf] = None
1072 elif kind == regkind or kind == lnkkind:
1076 elif kind == regkind or kind == lnkkind:
1073 if nf in dmap:
1077 if nf in dmap:
1074 if matchalways or matchfn(nf):
1078 if matchalways or matchfn(nf):
1075 results[nf] = st
1079 results[nf] = st
1076 elif (matchalways or matchfn(nf)) and not ignore(
1080 elif (matchalways or matchfn(nf)) and not ignore(
1077 nf
1081 nf
1078 ):
1082 ):
1079 # unknown file -- normalize if necessary
1083 # unknown file -- normalize if necessary
1080 if not alreadynormed:
1084 if not alreadynormed:
1081 nf = normalize(nf, False, True)
1085 nf = normalize(nf, False, True)
1082 results[nf] = st
1086 results[nf] = st
1083 elif nf in dmap and (matchalways or matchfn(nf)):
1087 elif nf in dmap and (matchalways or matchfn(nf)):
1084 results[nf] = None
1088 results[nf] = None
1085
1089
1086 for nd, d in work:
1090 for nd, d in work:
1087 # alreadynormed means that processwork doesn't have to do any
1091 # alreadynormed means that processwork doesn't have to do any
1088 # expensive directory normalization
1092 # expensive directory normalization
1089 alreadynormed = not normalize or nd == d
1093 alreadynormed = not normalize or nd == d
1090 traverse([d], alreadynormed)
1094 traverse([d], alreadynormed)
1091
1095
1092 for s in subrepos:
1096 for s in subrepos:
1093 del results[s]
1097 del results[s]
1094 del results[b'.hg']
1098 del results[b'.hg']
1095
1099
1096 # step 3: visit remaining files from dmap
1100 # step 3: visit remaining files from dmap
1097 if not skipstep3 and not exact:
1101 if not skipstep3 and not exact:
1098 # If a dmap file is not in results yet, it was either
1102 # If a dmap file is not in results yet, it was either
1099 # a) not matching matchfn b) ignored, c) missing, or d) under a
1103 # a) not matching matchfn b) ignored, c) missing, or d) under a
1100 # symlink directory.
1104 # symlink directory.
1101 if not results and matchalways:
1105 if not results and matchalways:
1102 visit = [f for f in dmap]
1106 visit = [f for f in dmap]
1103 else:
1107 else:
1104 visit = [f for f in dmap if f not in results and matchfn(f)]
1108 visit = [f for f in dmap if f not in results and matchfn(f)]
1105 visit.sort()
1109 visit.sort()
1106
1110
1107 if unknown:
1111 if unknown:
1108 # unknown == True means we walked all dirs under the roots
1112 # unknown == True means we walked all dirs under the roots
1109 # that wasn't ignored, and everything that matched was stat'ed
1113 # that wasn't ignored, and everything that matched was stat'ed
1110 # and is already in results.
1114 # and is already in results.
1111 # The rest must thus be ignored or under a symlink.
1115 # The rest must thus be ignored or under a symlink.
1112 audit_path = pathutil.pathauditor(self._root, cached=True)
1116 audit_path = pathutil.pathauditor(self._root, cached=True)
1113
1117
1114 for nf in iter(visit):
1118 for nf in iter(visit):
1115 # If a stat for the same file was already added with a
1119 # If a stat for the same file was already added with a
1116 # different case, don't add one for this, since that would
1120 # different case, don't add one for this, since that would
1117 # make it appear as if the file exists under both names
1121 # make it appear as if the file exists under both names
1118 # on disk.
1122 # on disk.
1119 if (
1123 if (
1120 normalizefile
1124 normalizefile
1121 and normalizefile(nf, True, True) in results
1125 and normalizefile(nf, True, True) in results
1122 ):
1126 ):
1123 results[nf] = None
1127 results[nf] = None
1124 # Report ignored items in the dmap as long as they are not
1128 # Report ignored items in the dmap as long as they are not
1125 # under a symlink directory.
1129 # under a symlink directory.
1126 elif audit_path.check(nf):
1130 elif audit_path.check(nf):
1127 try:
1131 try:
1128 results[nf] = lstat(join(nf))
1132 results[nf] = lstat(join(nf))
1129 # file was just ignored, no links, and exists
1133 # file was just ignored, no links, and exists
1130 except OSError:
1134 except OSError:
1131 # file doesn't exist
1135 # file doesn't exist
1132 results[nf] = None
1136 results[nf] = None
1133 else:
1137 else:
1134 # It's either missing or under a symlink directory
1138 # It's either missing or under a symlink directory
1135 # which we in this case report as missing
1139 # which we in this case report as missing
1136 results[nf] = None
1140 results[nf] = None
1137 else:
1141 else:
1138 # We may not have walked the full directory tree above,
1142 # We may not have walked the full directory tree above,
1139 # so stat and check everything we missed.
1143 # so stat and check everything we missed.
1140 iv = iter(visit)
1144 iv = iter(visit)
1141 for st in util.statfiles([join(i) for i in visit]):
1145 for st in util.statfiles([join(i) for i in visit]):
1142 results[next(iv)] = st
1146 results[next(iv)] = st
1143 return results
1147 return results
1144
1148
1145 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1149 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1146 # Force Rayon (Rust parallelism library) to respect the number of
1150 # Force Rayon (Rust parallelism library) to respect the number of
1147 # workers. This is a temporary workaround until Rust code knows
1151 # workers. This is a temporary workaround until Rust code knows
1148 # how to read the config file.
1152 # how to read the config file.
1149 numcpus = self._ui.configint(b"worker", b"numcpus")
1153 numcpus = self._ui.configint(b"worker", b"numcpus")
1150 if numcpus is not None:
1154 if numcpus is not None:
1151 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1155 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1152
1156
1153 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1157 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1154 if not workers_enabled:
1158 if not workers_enabled:
1155 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1159 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1156
1160
1157 (
1161 (
1158 lookup,
1162 lookup,
1159 modified,
1163 modified,
1160 added,
1164 added,
1161 removed,
1165 removed,
1162 deleted,
1166 deleted,
1163 clean,
1167 clean,
1164 ignored,
1168 ignored,
1165 unknown,
1169 unknown,
1166 warnings,
1170 warnings,
1167 bad,
1171 bad,
1168 traversed,
1172 traversed,
1169 dirty,
1173 dirty,
1170 ) = rustmod.status(
1174 ) = rustmod.status(
1171 self._map._rustmap,
1175 self._map._rustmap,
1172 matcher,
1176 matcher,
1173 self._rootdir,
1177 self._rootdir,
1174 self._ignorefiles(),
1178 self._ignorefiles(),
1175 self._checkexec,
1179 self._checkexec,
1176 self._lastnormaltime,
1180 self._lastnormaltime,
1177 bool(list_clean),
1181 bool(list_clean),
1178 bool(list_ignored),
1182 bool(list_ignored),
1179 bool(list_unknown),
1183 bool(list_unknown),
1180 bool(matcher.traversedir),
1184 bool(matcher.traversedir),
1181 )
1185 )
1182
1186
1183 self._dirty |= dirty
1187 self._dirty |= dirty
1184
1188
1185 if matcher.traversedir:
1189 if matcher.traversedir:
1186 for dir in traversed:
1190 for dir in traversed:
1187 matcher.traversedir(dir)
1191 matcher.traversedir(dir)
1188
1192
1189 if self._ui.warn:
1193 if self._ui.warn:
1190 for item in warnings:
1194 for item in warnings:
1191 if isinstance(item, tuple):
1195 if isinstance(item, tuple):
1192 file_path, syntax = item
1196 file_path, syntax = item
1193 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1197 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1194 file_path,
1198 file_path,
1195 syntax,
1199 syntax,
1196 )
1200 )
1197 self._ui.warn(msg)
1201 self._ui.warn(msg)
1198 else:
1202 else:
1199 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1203 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1200 self._ui.warn(
1204 self._ui.warn(
1201 msg
1205 msg
1202 % (
1206 % (
1203 pathutil.canonpath(
1207 pathutil.canonpath(
1204 self._rootdir, self._rootdir, item
1208 self._rootdir, self._rootdir, item
1205 ),
1209 ),
1206 b"No such file or directory",
1210 b"No such file or directory",
1207 )
1211 )
1208 )
1212 )
1209
1213
1210 for (fn, message) in bad:
1214 for (fn, message) in bad:
1211 matcher.bad(fn, encoding.strtolocal(message))
1215 matcher.bad(fn, encoding.strtolocal(message))
1212
1216
1213 status = scmutil.status(
1217 status = scmutil.status(
1214 modified=modified,
1218 modified=modified,
1215 added=added,
1219 added=added,
1216 removed=removed,
1220 removed=removed,
1217 deleted=deleted,
1221 deleted=deleted,
1218 unknown=unknown,
1222 unknown=unknown,
1219 ignored=ignored,
1223 ignored=ignored,
1220 clean=clean,
1224 clean=clean,
1221 )
1225 )
1222 return (lookup, status)
1226 return (lookup, status)
1223
1227
1224 def status(self, match, subrepos, ignored, clean, unknown):
1228 def status(self, match, subrepos, ignored, clean, unknown):
1225 """Determine the status of the working copy relative to the
1229 """Determine the status of the working copy relative to the
1226 dirstate and return a pair of (unsure, status), where status is of type
1230 dirstate and return a pair of (unsure, status), where status is of type
1227 scmutil.status and:
1231 scmutil.status and:
1228
1232
1229 unsure:
1233 unsure:
1230 files that might have been modified since the dirstate was
1234 files that might have been modified since the dirstate was
1231 written, but need to be read to be sure (size is the same
1235 written, but need to be read to be sure (size is the same
1232 but mtime differs)
1236 but mtime differs)
1233 status.modified:
1237 status.modified:
1234 files that have definitely been modified since the dirstate
1238 files that have definitely been modified since the dirstate
1235 was written (different size or mode)
1239 was written (different size or mode)
1236 status.clean:
1240 status.clean:
1237 files that have definitely not been modified since the
1241 files that have definitely not been modified since the
1238 dirstate was written
1242 dirstate was written
1239 """
1243 """
1240 listignored, listclean, listunknown = ignored, clean, unknown
1244 listignored, listclean, listunknown = ignored, clean, unknown
1241 lookup, modified, added, unknown, ignored = [], [], [], [], []
1245 lookup, modified, added, unknown, ignored = [], [], [], [], []
1242 removed, deleted, clean = [], [], []
1246 removed, deleted, clean = [], [], []
1243
1247
1244 dmap = self._map
1248 dmap = self._map
1245 dmap.preload()
1249 dmap.preload()
1246
1250
1247 use_rust = True
1251 use_rust = True
1248
1252
1249 allowed_matchers = (
1253 allowed_matchers = (
1250 matchmod.alwaysmatcher,
1254 matchmod.alwaysmatcher,
1251 matchmod.exactmatcher,
1255 matchmod.exactmatcher,
1252 matchmod.includematcher,
1256 matchmod.includematcher,
1253 )
1257 )
1254
1258
1255 if rustmod is None:
1259 if rustmod is None:
1256 use_rust = False
1260 use_rust = False
1257 elif self._checkcase:
1261 elif self._checkcase:
1258 # Case-insensitive filesystems are not handled yet
1262 # Case-insensitive filesystems are not handled yet
1259 use_rust = False
1263 use_rust = False
1260 elif subrepos:
1264 elif subrepos:
1261 use_rust = False
1265 use_rust = False
1262 elif sparse.enabled:
1266 elif sparse.enabled:
1263 use_rust = False
1267 use_rust = False
1264 elif not isinstance(match, allowed_matchers):
1268 elif not isinstance(match, allowed_matchers):
1265 # Some matchers have yet to be implemented
1269 # Some matchers have yet to be implemented
1266 use_rust = False
1270 use_rust = False
1267
1271
1268 if use_rust:
1272 if use_rust:
1269 try:
1273 try:
1270 return self._rust_status(
1274 return self._rust_status(
1271 match, listclean, listignored, listunknown
1275 match, listclean, listignored, listunknown
1272 )
1276 )
1273 except rustmod.FallbackError:
1277 except rustmod.FallbackError:
1274 pass
1278 pass
1275
1279
1276 def noop(f):
1280 def noop(f):
1277 pass
1281 pass
1278
1282
1279 dcontains = dmap.__contains__
1283 dcontains = dmap.__contains__
1280 dget = dmap.__getitem__
1284 dget = dmap.__getitem__
1281 ladd = lookup.append # aka "unsure"
1285 ladd = lookup.append # aka "unsure"
1282 madd = modified.append
1286 madd = modified.append
1283 aadd = added.append
1287 aadd = added.append
1284 uadd = unknown.append if listunknown else noop
1288 uadd = unknown.append if listunknown else noop
1285 iadd = ignored.append if listignored else noop
1289 iadd = ignored.append if listignored else noop
1286 radd = removed.append
1290 radd = removed.append
1287 dadd = deleted.append
1291 dadd = deleted.append
1288 cadd = clean.append if listclean else noop
1292 cadd = clean.append if listclean else noop
1289 mexact = match.exact
1293 mexact = match.exact
1290 dirignore = self._dirignore
1294 dirignore = self._dirignore
1291 checkexec = self._checkexec
1295 checkexec = self._checkexec
1292 copymap = self._map.copymap
1296 copymap = self._map.copymap
1293 lastnormaltime = self._lastnormaltime
1297 lastnormaltime = self._lastnormaltime
1294
1298
1295 # We need to do full walks when either
1299 # We need to do full walks when either
1296 # - we're listing all clean files, or
1300 # - we're listing all clean files, or
1297 # - match.traversedir does something, because match.traversedir should
1301 # - match.traversedir does something, because match.traversedir should
1298 # be called for every dir in the working dir
1302 # be called for every dir in the working dir
1299 full = listclean or match.traversedir is not None
1303 full = listclean or match.traversedir is not None
1300 for fn, st in pycompat.iteritems(
1304 for fn, st in pycompat.iteritems(
1301 self.walk(match, subrepos, listunknown, listignored, full=full)
1305 self.walk(match, subrepos, listunknown, listignored, full=full)
1302 ):
1306 ):
1303 if not dcontains(fn):
1307 if not dcontains(fn):
1304 if (listignored or mexact(fn)) and dirignore(fn):
1308 if (listignored or mexact(fn)) and dirignore(fn):
1305 if listignored:
1309 if listignored:
1306 iadd(fn)
1310 iadd(fn)
1307 else:
1311 else:
1308 uadd(fn)
1312 uadd(fn)
1309 continue
1313 continue
1310
1314
1311 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1315 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1312 # written like that for performance reasons. dmap[fn] is not a
1316 # written like that for performance reasons. dmap[fn] is not a
1313 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1317 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1314 # opcode has fast paths when the value to be unpacked is a tuple or
1318 # opcode has fast paths when the value to be unpacked is a tuple or
1315 # a list, but falls back to creating a full-fledged iterator in
1319 # a list, but falls back to creating a full-fledged iterator in
1316 # general. That is much slower than simply accessing and storing the
1320 # general. That is much slower than simply accessing and storing the
1317 # tuple members one by one.
1321 # tuple members one by one.
1318 t = dget(fn)
1322 t = dget(fn)
1319 mode = t.mode
1323 mode = t.mode
1320 size = t.size
1324 size = t.size
1321 time = t.mtime
1325 time = t.mtime
1322
1326
1323 if not st and t.tracked:
1327 if not st and t.tracked:
1324 dadd(fn)
1328 dadd(fn)
1325 elif t.merged:
1329 elif t.merged:
1326 madd(fn)
1330 madd(fn)
1327 elif t.added:
1331 elif t.added:
1328 aadd(fn)
1332 aadd(fn)
1329 elif t.removed:
1333 elif t.removed:
1330 radd(fn)
1334 radd(fn)
1331 elif t.tracked:
1335 elif t.tracked:
1332 if (
1336 if (
1333 size >= 0
1337 size >= 0
1334 and (
1338 and (
1335 (size != st.st_size and size != st.st_size & _rangemask)
1339 (size != st.st_size and size != st.st_size & _rangemask)
1336 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1340 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1337 )
1341 )
1338 or t.from_p2
1342 or t.from_p2
1339 or fn in copymap
1343 or fn in copymap
1340 ):
1344 ):
1341 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1345 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1342 # issue6456: Size returned may be longer due to
1346 # issue6456: Size returned may be longer due to
1343 # encryption on EXT-4 fscrypt, undecided.
1347 # encryption on EXT-4 fscrypt, undecided.
1344 ladd(fn)
1348 ladd(fn)
1345 else:
1349 else:
1346 madd(fn)
1350 madd(fn)
1347 elif (
1351 elif (
1348 time != st[stat.ST_MTIME]
1352 time != st[stat.ST_MTIME]
1349 and time != st[stat.ST_MTIME] & _rangemask
1353 and time != st[stat.ST_MTIME] & _rangemask
1350 ):
1354 ):
1351 ladd(fn)
1355 ladd(fn)
1352 elif st[stat.ST_MTIME] == lastnormaltime:
1356 elif st[stat.ST_MTIME] == lastnormaltime:
1353 # fn may have just been marked as normal and it may have
1357 # fn may have just been marked as normal and it may have
1354 # changed in the same second without changing its size.
1358 # changed in the same second without changing its size.
1355 # This can happen if we quickly do multiple commits.
1359 # This can happen if we quickly do multiple commits.
1356 # Force lookup, so we don't miss such a racy file change.
1360 # Force lookup, so we don't miss such a racy file change.
1357 ladd(fn)
1361 ladd(fn)
1358 elif listclean:
1362 elif listclean:
1359 cadd(fn)
1363 cadd(fn)
1360 status = scmutil.status(
1364 status = scmutil.status(
1361 modified, added, removed, deleted, unknown, ignored, clean
1365 modified, added, removed, deleted, unknown, ignored, clean
1362 )
1366 )
1363 return (lookup, status)
1367 return (lookup, status)
1364
1368
1365 def matches(self, match):
1369 def matches(self, match):
1366 """
1370 """
1367 return files in the dirstate (in whatever state) filtered by match
1371 return files in the dirstate (in whatever state) filtered by match
1368 """
1372 """
1369 dmap = self._map
1373 dmap = self._map
1370 if rustmod is not None:
1374 if rustmod is not None:
1371 dmap = self._map._rustmap
1375 dmap = self._map._rustmap
1372
1376
1373 if match.always():
1377 if match.always():
1374 return dmap.keys()
1378 return dmap.keys()
1375 files = match.files()
1379 files = match.files()
1376 if match.isexact():
1380 if match.isexact():
1377 # fast path -- filter the other way around, since typically files is
1381 # fast path -- filter the other way around, since typically files is
1378 # much smaller than dmap
1382 # much smaller than dmap
1379 return [f for f in files if f in dmap]
1383 return [f for f in files if f in dmap]
1380 if match.prefix() and all(fn in dmap for fn in files):
1384 if match.prefix() and all(fn in dmap for fn in files):
1381 # fast path -- all the values are known to be files, so just return
1385 # fast path -- all the values are known to be files, so just return
1382 # that
1386 # that
1383 return list(files)
1387 return list(files)
1384 return [f for f in dmap if match(f)]
1388 return [f for f in dmap if match(f)]
1385
1389
1386 def _actualfilename(self, tr):
1390 def _actualfilename(self, tr):
1387 if tr:
1391 if tr:
1388 return self._pendingfilename
1392 return self._pendingfilename
1389 else:
1393 else:
1390 return self._filename
1394 return self._filename
1391
1395
1392 def savebackup(self, tr, backupname):
1396 def savebackup(self, tr, backupname):
1393 '''Save current dirstate into backup file'''
1397 '''Save current dirstate into backup file'''
1394 filename = self._actualfilename(tr)
1398 filename = self._actualfilename(tr)
1395 assert backupname != filename
1399 assert backupname != filename
1396
1400
1397 # use '_writedirstate' instead of 'write' to write changes certainly,
1401 # use '_writedirstate' instead of 'write' to write changes certainly,
1398 # because the latter omits writing out if transaction is running.
1402 # because the latter omits writing out if transaction is running.
1399 # output file will be used to create backup of dirstate at this point.
1403 # output file will be used to create backup of dirstate at this point.
1400 if self._dirty or not self._opener.exists(filename):
1404 if self._dirty or not self._opener.exists(filename):
1401 self._writedirstate(
1405 self._writedirstate(
1402 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1406 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1403 )
1407 )
1404
1408
1405 if tr:
1409 if tr:
1406 # ensure that subsequent tr.writepending returns True for
1410 # ensure that subsequent tr.writepending returns True for
1407 # changes written out above, even if dirstate is never
1411 # changes written out above, even if dirstate is never
1408 # changed after this
1412 # changed after this
1409 tr.addfilegenerator(
1413 tr.addfilegenerator(
1410 b'dirstate',
1414 b'dirstate',
1411 (self._filename,),
1415 (self._filename,),
1412 self._writedirstate,
1416 self._writedirstate,
1413 location=b'plain',
1417 location=b'plain',
1414 )
1418 )
1415
1419
1416 # ensure that pending file written above is unlinked at
1420 # ensure that pending file written above is unlinked at
1417 # failure, even if tr.writepending isn't invoked until the
1421 # failure, even if tr.writepending isn't invoked until the
1418 # end of this transaction
1422 # end of this transaction
1419 tr.registertmp(filename, location=b'plain')
1423 tr.registertmp(filename, location=b'plain')
1420
1424
1421 self._opener.tryunlink(backupname)
1425 self._opener.tryunlink(backupname)
1422 # hardlink backup is okay because _writedirstate is always called
1426 # hardlink backup is okay because _writedirstate is always called
1423 # with an "atomictemp=True" file.
1427 # with an "atomictemp=True" file.
1424 util.copyfile(
1428 util.copyfile(
1425 self._opener.join(filename),
1429 self._opener.join(filename),
1426 self._opener.join(backupname),
1430 self._opener.join(backupname),
1427 hardlink=True,
1431 hardlink=True,
1428 )
1432 )
1429
1433
1430 def restorebackup(self, tr, backupname):
1434 def restorebackup(self, tr, backupname):
1431 '''Restore dirstate by backup file'''
1435 '''Restore dirstate by backup file'''
1432 # this "invalidate()" prevents "wlock.release()" from writing
1436 # this "invalidate()" prevents "wlock.release()" from writing
1433 # changes of dirstate out after restoring from backup file
1437 # changes of dirstate out after restoring from backup file
1434 self.invalidate()
1438 self.invalidate()
1435 filename = self._actualfilename(tr)
1439 filename = self._actualfilename(tr)
1436 o = self._opener
1440 o = self._opener
1437 if util.samefile(o.join(backupname), o.join(filename)):
1441 if util.samefile(o.join(backupname), o.join(filename)):
1438 o.unlink(backupname)
1442 o.unlink(backupname)
1439 else:
1443 else:
1440 o.rename(backupname, filename, checkambig=True)
1444 o.rename(backupname, filename, checkambig=True)
1441
1445
1442 def clearbackup(self, tr, backupname):
1446 def clearbackup(self, tr, backupname):
1443 '''Clear backup file'''
1447 '''Clear backup file'''
1444 self._opener.unlink(backupname)
1448 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now