##// END OF EJS Templates
dirstate: introduce an internal `_add` method...
marmoute -
r48389:f5c24c12 default
parent child Browse files
Show More
@@ -1,1436 +1,1440 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
76 class dirstate(object):
76 class dirstate(object):
77 def __init__(
77 def __init__(
78 self,
78 self,
79 opener,
79 opener,
80 ui,
80 ui,
81 root,
81 root,
82 validate,
82 validate,
83 sparsematchfn,
83 sparsematchfn,
84 nodeconstants,
84 nodeconstants,
85 use_dirstate_v2,
85 use_dirstate_v2,
86 ):
86 ):
87 """Create a new dirstate object.
87 """Create a new dirstate object.
88
88
89 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
90 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
91 the dirstate.
91 the dirstate.
92 """
92 """
93 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
94 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
95 self._opener = opener
95 self._opener = opener
96 self._validate = validate
96 self._validate = validate
97 self._root = root
97 self._root = root
98 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
100 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
101 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
102 self._dirty = False
102 self._dirty = False
103 self._lastnormaltime = 0
103 self._lastnormaltime = 0
104 self._ui = ui
104 self._ui = ui
105 self._filecache = {}
105 self._filecache = {}
106 self._parentwriters = 0
106 self._parentwriters = 0
107 self._filename = b'dirstate'
107 self._filename = b'dirstate'
108 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
109 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
110 self._origpl = None
110 self._origpl = None
111 self._updatedfiles = set()
111 self._updatedfiles = set()
112 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
113 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
114 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
115 # raises an exception).
115 # raises an exception).
116 self._cwd
116 self._cwd
117
117
118 def prefetch_parents(self):
118 def prefetch_parents(self):
119 """make sure the parents are loaded
119 """make sure the parents are loaded
120
120
121 Used to avoid a race condition.
121 Used to avoid a race condition.
122 """
122 """
123 self._pl
123 self._pl
124
124
125 @contextlib.contextmanager
125 @contextlib.contextmanager
126 def parentchange(self):
126 def parentchange(self):
127 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
128
128
129 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
130 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
131 released.
131 released.
132 """
132 """
133 self._parentwriters += 1
133 self._parentwriters += 1
134 yield
134 yield
135 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
136 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
137 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
138 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
139 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
140 self._parentwriters -= 1
140 self._parentwriters -= 1
141
141
142 def pendingparentchange(self):
142 def pendingparentchange(self):
143 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
144 that modify the dirstate parent.
144 that modify the dirstate parent.
145 """
145 """
146 return self._parentwriters > 0
146 return self._parentwriters > 0
147
147
148 @propertycache
148 @propertycache
149 def _map(self):
149 def _map(self):
150 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
151 self._map = self._mapcls(
151 self._map = self._mapcls(
152 self._ui,
152 self._ui,
153 self._opener,
153 self._opener,
154 self._root,
154 self._root,
155 self._nodeconstants,
155 self._nodeconstants,
156 self._use_dirstate_v2,
156 self._use_dirstate_v2,
157 )
157 )
158 return self._map
158 return self._map
159
159
160 @property
160 @property
161 def _sparsematcher(self):
161 def _sparsematcher(self):
162 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
163
163
164 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
165 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
166 included in the working directory.
166 included in the working directory.
167 """
167 """
168 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
169 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
170 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
171 return self._sparsematchfn()
171 return self._sparsematchfn()
172
172
173 @repocache(b'branch')
173 @repocache(b'branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return b"default"
180 return b"default"
181
181
182 @property
182 @property
183 def _pl(self):
183 def _pl(self):
184 return self._map.parents()
184 return self._map.parents()
185
185
186 def hasdir(self, d):
186 def hasdir(self, d):
187 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
188
188
189 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
190 def _ignore(self):
190 def _ignore(self):
191 files = self._ignorefiles()
191 files = self._ignorefiles()
192 if not files:
192 if not files:
193 return matchmod.never()
193 return matchmod.never()
194
194
195 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
197
197
198 @propertycache
198 @propertycache
199 def _slash(self):
199 def _slash(self):
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
201
201
202 @propertycache
202 @propertycache
203 def _checklink(self):
203 def _checklink(self):
204 return util.checklink(self._root)
204 return util.checklink(self._root)
205
205
206 @propertycache
206 @propertycache
207 def _checkexec(self):
207 def _checkexec(self):
208 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
209
209
210 @propertycache
210 @propertycache
211 def _checkcase(self):
211 def _checkcase(self):
212 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
213
213
214 def _join(self, f):
214 def _join(self, f):
215 # much faster than os.path.join()
215 # much faster than os.path.join()
216 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
217 return self._rootdir + f
217 return self._rootdir + f
218
218
219 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
220 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 try:
223 try:
224 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
225 if util.statislink(st):
225 if util.statislink(st):
226 return b'l'
226 return b'l'
227 if util.statisexec(st):
227 if util.statisexec(st):
228 return b'x'
228 return b'x'
229 except OSError:
229 except OSError:
230 pass
230 pass
231 return b''
231 return b''
232
232
233 return f
233 return f
234
234
235 fallback = buildfallback()
235 fallback = buildfallback()
236 if self._checklink:
236 if self._checklink:
237
237
238 def f(x):
238 def f(x):
239 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
240 return b'l'
240 return b'l'
241 if b'x' in fallback(x):
241 if b'x' in fallback(x):
242 return b'x'
242 return b'x'
243 return b''
243 return b''
244
244
245 return f
245 return f
246 if self._checkexec:
246 if self._checkexec:
247
247
248 def f(x):
248 def f(x):
249 if b'l' in fallback(x):
249 if b'l' in fallback(x):
250 return b'l'
250 return b'l'
251 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 else:
256 else:
257 return fallback
257 return fallback
258
258
259 @propertycache
259 @propertycache
260 def _cwd(self):
260 def _cwd(self):
261 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
263 if forcecwd:
263 if forcecwd:
264 return forcecwd
264 return forcecwd
265 return encoding.getcwd()
265 return encoding.getcwd()
266
266
267 def getcwd(self):
267 def getcwd(self):
268 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
269
269
270 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
271 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
272 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
273 """
273 """
274 cwd = self._cwd
274 cwd = self._cwd
275 if cwd == self._root:
275 if cwd == self._root:
276 return b''
276 return b''
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
278 rootsep = self._root
278 rootsep = self._root
279 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
280 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
281 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
282 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
283 else:
283 else:
284 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
285 return cwd
285 return cwd
286
286
287 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
288 if cwd is None:
288 if cwd is None:
289 cwd = self.getcwd()
289 cwd = self.getcwd()
290 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
291 if self._slash:
291 if self._slash:
292 return util.pconvert(path)
292 return util.pconvert(path)
293 return path
293 return path
294
294
295 def __getitem__(self, key):
295 def __getitem__(self, key):
296 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
297
297
298 States are:
298 States are:
299 n normal
299 n normal
300 m needs merging
300 m needs merging
301 r marked for removal
301 r marked for removal
302 a marked for addition
302 a marked for addition
303 ? not tracked
303 ? not tracked
304
304
305 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
306 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
307 instead.
307 instead.
308 """
308 """
309 entry = self._map.get(key)
309 entry = self._map.get(key)
310 if entry is not None:
310 if entry is not None:
311 return entry.state
311 return entry.state
312 return b'?'
312 return b'?'
313
313
314 def __contains__(self, key):
314 def __contains__(self, key):
315 return key in self._map
315 return key in self._map
316
316
317 def __iter__(self):
317 def __iter__(self):
318 return iter(sorted(self._map))
318 return iter(sorted(self._map))
319
319
320 def items(self):
320 def items(self):
321 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
322
322
323 iteritems = items
323 iteritems = items
324
324
325 def directories(self):
325 def directories(self):
326 return self._map.directories()
326 return self._map.directories()
327
327
328 def parents(self):
328 def parents(self):
329 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
330
330
331 def p1(self):
331 def p1(self):
332 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
333
333
334 def p2(self):
334 def p2(self):
335 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
336
336
337 @property
337 @property
338 def in_merge(self):
338 def in_merge(self):
339 """True if a merge is in progress"""
339 """True if a merge is in progress"""
340 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
341
341
342 def branch(self):
342 def branch(self):
343 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
344
344
345 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
346 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
347
347
348 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
349 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
350 returned by the call.
350 returned by the call.
351
351
352 See localrepo.setparents()
352 See localrepo.setparents()
353 """
353 """
354 if p2 is None:
354 if p2 is None:
355 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
356 if self._parentwriters == 0:
356 if self._parentwriters == 0:
357 raise ValueError(
357 raise ValueError(
358 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
359 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
360 )
360 )
361
361
362 self._dirty = True
362 self._dirty = True
363 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
364 if self._origpl is None:
364 if self._origpl is None:
365 self._origpl = self._pl
365 self._origpl = self._pl
366 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
367 copies = {}
367 copies = {}
368 if (
368 if (
369 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
371 ):
371 ):
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
373
373
374 for f in candidatefiles:
374 for f in candidatefiles:
375 s = self._map.get(f)
375 s = self._map.get(f)
376 if s is None:
376 if s is None:
377 continue
377 continue
378
378
379 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
380 if s.merged:
380 if s.merged:
381 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
382 if source:
382 if source:
383 copies[f] = source
383 copies[f] = source
384 self.normallookup(f)
384 self.normallookup(f)
385 # Also fix up otherparent markers
385 # Also fix up otherparent markers
386 elif s.from_p2:
386 elif s.from_p2:
387 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
388 if source:
388 if source:
389 copies[f] = source
389 copies[f] = source
390 self.add(f)
390 self._add(f)
391 return copies
391 return copies
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._lastnormaltime = 0
419 self._lastnormaltime = 0
420 self._dirty = False
420 self._dirty = False
421 self._updatedfiles.clear()
421 self._updatedfiles.clear()
422 self._parentwriters = 0
422 self._parentwriters = 0
423 self._origpl = None
423 self._origpl = None
424
424
425 def copy(self, source, dest):
425 def copy(self, source, dest):
426 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
427 if source == dest:
427 if source == dest:
428 return
428 return
429 self._dirty = True
429 self._dirty = True
430 if source is not None:
430 if source is not None:
431 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
432 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
433 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
434 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
435 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
436
436
437 def copied(self, file):
437 def copied(self, file):
438 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
439
439
440 def copies(self):
440 def copies(self):
441 return self._map.copymap
441 return self._map.copymap
442
442
443 def _addpath(
443 def _addpath(
444 self,
444 self,
445 f,
445 f,
446 mode=0,
446 mode=0,
447 size=None,
447 size=None,
448 mtime=None,
448 mtime=None,
449 added=False,
449 added=False,
450 merged=False,
450 merged=False,
451 from_p2=False,
451 from_p2=False,
452 possibly_dirty=False,
452 possibly_dirty=False,
453 ):
453 ):
454 entry = self._map.get(f)
454 entry = self._map.get(f)
455 if added or entry is not None and entry.removed:
455 if added or entry is not None and entry.removed:
456 scmutil.checkfilename(f)
456 scmutil.checkfilename(f)
457 if self._map.hastrackeddir(f):
457 if self._map.hastrackeddir(f):
458 msg = _(b'directory %r already in dirstate')
458 msg = _(b'directory %r already in dirstate')
459 msg %= pycompat.bytestr(f)
459 msg %= pycompat.bytestr(f)
460 raise error.Abort(msg)
460 raise error.Abort(msg)
461 # shadows
461 # shadows
462 for d in pathutil.finddirs(f):
462 for d in pathutil.finddirs(f):
463 if self._map.hastrackeddir(d):
463 if self._map.hastrackeddir(d):
464 break
464 break
465 entry = self._map.get(d)
465 entry = self._map.get(d)
466 if entry is not None and not entry.removed:
466 if entry is not None and not entry.removed:
467 msg = _(b'file %r in dirstate clashes with %r')
467 msg = _(b'file %r in dirstate clashes with %r')
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
468 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
469 raise error.Abort(msg)
469 raise error.Abort(msg)
470 self._dirty = True
470 self._dirty = True
471 self._updatedfiles.add(f)
471 self._updatedfiles.add(f)
472 self._map.addfile(
472 self._map.addfile(
473 f,
473 f,
474 mode=mode,
474 mode=mode,
475 size=size,
475 size=size,
476 mtime=mtime,
476 mtime=mtime,
477 added=added,
477 added=added,
478 merged=merged,
478 merged=merged,
479 from_p2=from_p2,
479 from_p2=from_p2,
480 possibly_dirty=possibly_dirty,
480 possibly_dirty=possibly_dirty,
481 )
481 )
482
482
483 def normal(self, f, parentfiledata=None):
483 def normal(self, f, parentfiledata=None):
484 """Mark a file normal and clean.
484 """Mark a file normal and clean.
485
485
486 parentfiledata: (mode, size, mtime) of the clean file
486 parentfiledata: (mode, size, mtime) of the clean file
487
487
488 parentfiledata should be computed from memory (for mode,
488 parentfiledata should be computed from memory (for mode,
489 size), as or close as possible from the point where we
489 size), as or close as possible from the point where we
490 determined the file was clean, to limit the risk of the
490 determined the file was clean, to limit the risk of the
491 file having been changed by an external process between the
491 file having been changed by an external process between the
492 moment where the file was determined to be clean and now."""
492 moment where the file was determined to be clean and now."""
493 if parentfiledata:
493 if parentfiledata:
494 (mode, size, mtime) = parentfiledata
494 (mode, size, mtime) = parentfiledata
495 else:
495 else:
496 s = os.lstat(self._join(f))
496 s = os.lstat(self._join(f))
497 mode = s.st_mode
497 mode = s.st_mode
498 size = s.st_size
498 size = s.st_size
499 mtime = s[stat.ST_MTIME]
499 mtime = s[stat.ST_MTIME]
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
500 self._addpath(f, mode=mode, size=size, mtime=mtime)
501 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
502 if f in self._map.nonnormalset:
502 if f in self._map.nonnormalset:
503 self._map.nonnormalset.remove(f)
503 self._map.nonnormalset.remove(f)
504 if mtime > self._lastnormaltime:
504 if mtime > self._lastnormaltime:
505 # Remember the most recent modification timeslot for status(),
505 # Remember the most recent modification timeslot for status(),
506 # to make sure we won't miss future size-preserving file content
506 # to make sure we won't miss future size-preserving file content
507 # modifications that happen within the same timeslot.
507 # modifications that happen within the same timeslot.
508 self._lastnormaltime = mtime
508 self._lastnormaltime = mtime
509
509
510 def normallookup(self, f):
510 def normallookup(self, f):
511 '''Mark a file normal, but possibly dirty.'''
511 '''Mark a file normal, but possibly dirty.'''
512 if self.in_merge:
512 if self.in_merge:
513 # if there is a merge going on and the file was either
513 # if there is a merge going on and the file was either
514 # "merged" or coming from other parent (-2) before
514 # "merged" or coming from other parent (-2) before
515 # being removed, restore that state.
515 # being removed, restore that state.
516 entry = self._map.get(f)
516 entry = self._map.get(f)
517 if entry is not None:
517 if entry is not None:
518 # XXX this should probably be dealt with a a lower level
518 # XXX this should probably be dealt with a a lower level
519 # (see `merged_removed` and `from_p2_removed`)
519 # (see `merged_removed` and `from_p2_removed`)
520 if entry.merged_removed or entry.from_p2_removed:
520 if entry.merged_removed or entry.from_p2_removed:
521 source = self._map.copymap.get(f)
521 source = self._map.copymap.get(f)
522 if entry.merged_removed:
522 if entry.merged_removed:
523 self.merge(f)
523 self.merge(f)
524 elif entry.from_p2_removed:
524 elif entry.from_p2_removed:
525 self.otherparent(f)
525 self.otherparent(f)
526 if source is not None:
526 if source is not None:
527 self.copy(source, f)
527 self.copy(source, f)
528 return
528 return
529 elif entry.merged or entry.from_p2:
529 elif entry.merged or entry.from_p2:
530 return
530 return
531 self._addpath(f, possibly_dirty=True)
531 self._addpath(f, possibly_dirty=True)
532 self._map.copymap.pop(f, None)
532 self._map.copymap.pop(f, None)
533
533
534 def otherparent(self, f):
534 def otherparent(self, f):
535 '''Mark as coming from the other parent, always dirty.'''
535 '''Mark as coming from the other parent, always dirty.'''
536 if not self.in_merge:
536 if not self.in_merge:
537 msg = _(b"setting %r to other parent only allowed in merges") % f
537 msg = _(b"setting %r to other parent only allowed in merges") % f
538 raise error.Abort(msg)
538 raise error.Abort(msg)
539 entry = self._map.get(f)
539 entry = self._map.get(f)
540 if entry is not None and entry.tracked:
540 if entry is not None and entry.tracked:
541 # merge-like
541 # merge-like
542 self._addpath(f, merged=True)
542 self._addpath(f, merged=True)
543 else:
543 else:
544 # add-like
544 # add-like
545 self._addpath(f, from_p2=True)
545 self._addpath(f, from_p2=True)
546 self._map.copymap.pop(f, None)
546 self._map.copymap.pop(f, None)
547
547
548 def add(self, f):
548 def add(self, f):
549 '''Mark a file added.'''
549 '''Mark a file added.'''
550 self._addpath(f, added=True)
550 self._add(f)
551 self._map.copymap.pop(f, None)
551
552 def _add(self, filename):
553 """internal function to mark a file as added"""
554 self._addpath(filename, added=True)
555 self._map.copymap.pop(filename, None)
552
556
553 def remove(self, f):
557 def remove(self, f):
554 '''Mark a file removed.'''
558 '''Mark a file removed.'''
555 self._dirty = True
559 self._dirty = True
556 self._updatedfiles.add(f)
560 self._updatedfiles.add(f)
557 self._map.removefile(f, in_merge=self.in_merge)
561 self._map.removefile(f, in_merge=self.in_merge)
558
562
559 def merge(self, f):
563 def merge(self, f):
560 '''Mark a file merged.'''
564 '''Mark a file merged.'''
561 if not self.in_merge:
565 if not self.in_merge:
562 return self.normallookup(f)
566 return self.normallookup(f)
563 return self.otherparent(f)
567 return self.otherparent(f)
564
568
565 def drop(self, f):
569 def drop(self, f):
566 '''Drop a file from the dirstate'''
570 '''Drop a file from the dirstate'''
567 if self._map.dropfile(f):
571 if self._map.dropfile(f):
568 self._dirty = True
572 self._dirty = True
569 self._updatedfiles.add(f)
573 self._updatedfiles.add(f)
570 self._map.copymap.pop(f, None)
574 self._map.copymap.pop(f, None)
571
575
572 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
576 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
573 if exists is None:
577 if exists is None:
574 exists = os.path.lexists(os.path.join(self._root, path))
578 exists = os.path.lexists(os.path.join(self._root, path))
575 if not exists:
579 if not exists:
576 # Maybe a path component exists
580 # Maybe a path component exists
577 if not ignoremissing and b'/' in path:
581 if not ignoremissing and b'/' in path:
578 d, f = path.rsplit(b'/', 1)
582 d, f = path.rsplit(b'/', 1)
579 d = self._normalize(d, False, ignoremissing, None)
583 d = self._normalize(d, False, ignoremissing, None)
580 folded = d + b"/" + f
584 folded = d + b"/" + f
581 else:
585 else:
582 # No path components, preserve original case
586 # No path components, preserve original case
583 folded = path
587 folded = path
584 else:
588 else:
585 # recursively normalize leading directory components
589 # recursively normalize leading directory components
586 # against dirstate
590 # against dirstate
587 if b'/' in normed:
591 if b'/' in normed:
588 d, f = normed.rsplit(b'/', 1)
592 d, f = normed.rsplit(b'/', 1)
589 d = self._normalize(d, False, ignoremissing, True)
593 d = self._normalize(d, False, ignoremissing, True)
590 r = self._root + b"/" + d
594 r = self._root + b"/" + d
591 folded = d + b"/" + util.fspath(f, r)
595 folded = d + b"/" + util.fspath(f, r)
592 else:
596 else:
593 folded = util.fspath(normed, self._root)
597 folded = util.fspath(normed, self._root)
594 storemap[normed] = folded
598 storemap[normed] = folded
595
599
596 return folded
600 return folded
597
601
598 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
602 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
599 normed = util.normcase(path)
603 normed = util.normcase(path)
600 folded = self._map.filefoldmap.get(normed, None)
604 folded = self._map.filefoldmap.get(normed, None)
601 if folded is None:
605 if folded is None:
602 if isknown:
606 if isknown:
603 folded = path
607 folded = path
604 else:
608 else:
605 folded = self._discoverpath(
609 folded = self._discoverpath(
606 path, normed, ignoremissing, exists, self._map.filefoldmap
610 path, normed, ignoremissing, exists, self._map.filefoldmap
607 )
611 )
608 return folded
612 return folded
609
613
610 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
614 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
611 normed = util.normcase(path)
615 normed = util.normcase(path)
612 folded = self._map.filefoldmap.get(normed, None)
616 folded = self._map.filefoldmap.get(normed, None)
613 if folded is None:
617 if folded is None:
614 folded = self._map.dirfoldmap.get(normed, None)
618 folded = self._map.dirfoldmap.get(normed, None)
615 if folded is None:
619 if folded is None:
616 if isknown:
620 if isknown:
617 folded = path
621 folded = path
618 else:
622 else:
619 # store discovered result in dirfoldmap so that future
623 # store discovered result in dirfoldmap so that future
620 # normalizefile calls don't start matching directories
624 # normalizefile calls don't start matching directories
621 folded = self._discoverpath(
625 folded = self._discoverpath(
622 path, normed, ignoremissing, exists, self._map.dirfoldmap
626 path, normed, ignoremissing, exists, self._map.dirfoldmap
623 )
627 )
624 return folded
628 return folded
625
629
626 def normalize(self, path, isknown=False, ignoremissing=False):
630 def normalize(self, path, isknown=False, ignoremissing=False):
627 """
631 """
628 normalize the case of a pathname when on a casefolding filesystem
632 normalize the case of a pathname when on a casefolding filesystem
629
633
630 isknown specifies whether the filename came from walking the
634 isknown specifies whether the filename came from walking the
631 disk, to avoid extra filesystem access.
635 disk, to avoid extra filesystem access.
632
636
633 If ignoremissing is True, missing path are returned
637 If ignoremissing is True, missing path are returned
634 unchanged. Otherwise, we try harder to normalize possibly
638 unchanged. Otherwise, we try harder to normalize possibly
635 existing path components.
639 existing path components.
636
640
637 The normalized case is determined based on the following precedence:
641 The normalized case is determined based on the following precedence:
638
642
639 - version of name already stored in the dirstate
643 - version of name already stored in the dirstate
640 - version of name stored on disk
644 - version of name stored on disk
641 - version provided via command arguments
645 - version provided via command arguments
642 """
646 """
643
647
644 if self._checkcase:
648 if self._checkcase:
645 return self._normalize(path, isknown, ignoremissing)
649 return self._normalize(path, isknown, ignoremissing)
646 return path
650 return path
647
651
648 def clear(self):
652 def clear(self):
649 self._map.clear()
653 self._map.clear()
650 self._lastnormaltime = 0
654 self._lastnormaltime = 0
651 self._updatedfiles.clear()
655 self._updatedfiles.clear()
652 self._dirty = True
656 self._dirty = True
653
657
654 def rebuild(self, parent, allfiles, changedfiles=None):
658 def rebuild(self, parent, allfiles, changedfiles=None):
655 if changedfiles is None:
659 if changedfiles is None:
656 # Rebuild entire dirstate
660 # Rebuild entire dirstate
657 to_lookup = allfiles
661 to_lookup = allfiles
658 to_drop = []
662 to_drop = []
659 lastnormaltime = self._lastnormaltime
663 lastnormaltime = self._lastnormaltime
660 self.clear()
664 self.clear()
661 self._lastnormaltime = lastnormaltime
665 self._lastnormaltime = lastnormaltime
662 elif len(changedfiles) < 10:
666 elif len(changedfiles) < 10:
663 # Avoid turning allfiles into a set, which can be expensive if it's
667 # Avoid turning allfiles into a set, which can be expensive if it's
664 # large.
668 # large.
665 to_lookup = []
669 to_lookup = []
666 to_drop = []
670 to_drop = []
667 for f in changedfiles:
671 for f in changedfiles:
668 if f in allfiles:
672 if f in allfiles:
669 to_lookup.append(f)
673 to_lookup.append(f)
670 else:
674 else:
671 to_drop.append(f)
675 to_drop.append(f)
672 else:
676 else:
673 changedfilesset = set(changedfiles)
677 changedfilesset = set(changedfiles)
674 to_lookup = changedfilesset & set(allfiles)
678 to_lookup = changedfilesset & set(allfiles)
675 to_drop = changedfilesset - to_lookup
679 to_drop = changedfilesset - to_lookup
676
680
677 if self._origpl is None:
681 if self._origpl is None:
678 self._origpl = self._pl
682 self._origpl = self._pl
679 self._map.setparents(parent, self._nodeconstants.nullid)
683 self._map.setparents(parent, self._nodeconstants.nullid)
680
684
681 for f in to_lookup:
685 for f in to_lookup:
682 self.normallookup(f)
686 self.normallookup(f)
683 for f in to_drop:
687 for f in to_drop:
684 self.drop(f)
688 self.drop(f)
685
689
686 self._dirty = True
690 self._dirty = True
687
691
688 def identity(self):
692 def identity(self):
689 """Return identity of dirstate itself to detect changing in storage
693 """Return identity of dirstate itself to detect changing in storage
690
694
691 If identity of previous dirstate is equal to this, writing
695 If identity of previous dirstate is equal to this, writing
692 changes based on the former dirstate out can keep consistency.
696 changes based on the former dirstate out can keep consistency.
693 """
697 """
694 return self._map.identity
698 return self._map.identity
695
699
696 def write(self, tr):
700 def write(self, tr):
697 if not self._dirty:
701 if not self._dirty:
698 return
702 return
699
703
700 filename = self._filename
704 filename = self._filename
701 if tr:
705 if tr:
702 # 'dirstate.write()' is not only for writing in-memory
706 # 'dirstate.write()' is not only for writing in-memory
703 # changes out, but also for dropping ambiguous timestamp.
707 # changes out, but also for dropping ambiguous timestamp.
704 # delayed writing re-raise "ambiguous timestamp issue".
708 # delayed writing re-raise "ambiguous timestamp issue".
705 # See also the wiki page below for detail:
709 # See also the wiki page below for detail:
706 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
710 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
707
711
708 # emulate dropping timestamp in 'parsers.pack_dirstate'
712 # emulate dropping timestamp in 'parsers.pack_dirstate'
709 now = _getfsnow(self._opener)
713 now = _getfsnow(self._opener)
710 self._map.clearambiguoustimes(self._updatedfiles, now)
714 self._map.clearambiguoustimes(self._updatedfiles, now)
711
715
712 # emulate that all 'dirstate.normal' results are written out
716 # emulate that all 'dirstate.normal' results are written out
713 self._lastnormaltime = 0
717 self._lastnormaltime = 0
714 self._updatedfiles.clear()
718 self._updatedfiles.clear()
715
719
716 # delay writing in-memory changes out
720 # delay writing in-memory changes out
717 tr.addfilegenerator(
721 tr.addfilegenerator(
718 b'dirstate',
722 b'dirstate',
719 (self._filename,),
723 (self._filename,),
720 self._writedirstate,
724 self._writedirstate,
721 location=b'plain',
725 location=b'plain',
722 )
726 )
723 return
727 return
724
728
725 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
729 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
726 self._writedirstate(st)
730 self._writedirstate(st)
727
731
728 def addparentchangecallback(self, category, callback):
732 def addparentchangecallback(self, category, callback):
729 """add a callback to be called when the wd parents are changed
733 """add a callback to be called when the wd parents are changed
730
734
731 Callback will be called with the following arguments:
735 Callback will be called with the following arguments:
732 dirstate, (oldp1, oldp2), (newp1, newp2)
736 dirstate, (oldp1, oldp2), (newp1, newp2)
733
737
734 Category is a unique identifier to allow overwriting an old callback
738 Category is a unique identifier to allow overwriting an old callback
735 with a newer callback.
739 with a newer callback.
736 """
740 """
737 self._plchangecallbacks[category] = callback
741 self._plchangecallbacks[category] = callback
738
742
739 def _writedirstate(self, st):
743 def _writedirstate(self, st):
740 # notify callbacks about parents change
744 # notify callbacks about parents change
741 if self._origpl is not None and self._origpl != self._pl:
745 if self._origpl is not None and self._origpl != self._pl:
742 for c, callback in sorted(
746 for c, callback in sorted(
743 pycompat.iteritems(self._plchangecallbacks)
747 pycompat.iteritems(self._plchangecallbacks)
744 ):
748 ):
745 callback(self, self._origpl, self._pl)
749 callback(self, self._origpl, self._pl)
746 self._origpl = None
750 self._origpl = None
747 # use the modification time of the newly created temporary file as the
751 # use the modification time of the newly created temporary file as the
748 # filesystem's notion of 'now'
752 # filesystem's notion of 'now'
749 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
753 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
750
754
751 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
755 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
752 # timestamp of each entries in dirstate, because of 'now > mtime'
756 # timestamp of each entries in dirstate, because of 'now > mtime'
753 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
757 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
754 if delaywrite > 0:
758 if delaywrite > 0:
755 # do we have any files to delay for?
759 # do we have any files to delay for?
756 for f, e in pycompat.iteritems(self._map):
760 for f, e in pycompat.iteritems(self._map):
757 if e.need_delay(now):
761 if e.need_delay(now):
758 import time # to avoid useless import
762 import time # to avoid useless import
759
763
760 # rather than sleep n seconds, sleep until the next
764 # rather than sleep n seconds, sleep until the next
761 # multiple of n seconds
765 # multiple of n seconds
762 clock = time.time()
766 clock = time.time()
763 start = int(clock) - (int(clock) % delaywrite)
767 start = int(clock) - (int(clock) % delaywrite)
764 end = start + delaywrite
768 end = start + delaywrite
765 time.sleep(end - clock)
769 time.sleep(end - clock)
766 now = end # trust our estimate that the end is near now
770 now = end # trust our estimate that the end is near now
767 break
771 break
768
772
769 self._map.write(st, now)
773 self._map.write(st, now)
770 self._lastnormaltime = 0
774 self._lastnormaltime = 0
771 self._dirty = False
775 self._dirty = False
772
776
773 def _dirignore(self, f):
777 def _dirignore(self, f):
774 if self._ignore(f):
778 if self._ignore(f):
775 return True
779 return True
776 for p in pathutil.finddirs(f):
780 for p in pathutil.finddirs(f):
777 if self._ignore(p):
781 if self._ignore(p):
778 return True
782 return True
779 return False
783 return False
780
784
781 def _ignorefiles(self):
785 def _ignorefiles(self):
782 files = []
786 files = []
783 if os.path.exists(self._join(b'.hgignore')):
787 if os.path.exists(self._join(b'.hgignore')):
784 files.append(self._join(b'.hgignore'))
788 files.append(self._join(b'.hgignore'))
785 for name, path in self._ui.configitems(b"ui"):
789 for name, path in self._ui.configitems(b"ui"):
786 if name == b'ignore' or name.startswith(b'ignore.'):
790 if name == b'ignore' or name.startswith(b'ignore.'):
787 # we need to use os.path.join here rather than self._join
791 # we need to use os.path.join here rather than self._join
788 # because path is arbitrary and user-specified
792 # because path is arbitrary and user-specified
789 files.append(os.path.join(self._rootdir, util.expandpath(path)))
793 files.append(os.path.join(self._rootdir, util.expandpath(path)))
790 return files
794 return files
791
795
792 def _ignorefileandline(self, f):
796 def _ignorefileandline(self, f):
793 files = collections.deque(self._ignorefiles())
797 files = collections.deque(self._ignorefiles())
794 visited = set()
798 visited = set()
795 while files:
799 while files:
796 i = files.popleft()
800 i = files.popleft()
797 patterns = matchmod.readpatternfile(
801 patterns = matchmod.readpatternfile(
798 i, self._ui.warn, sourceinfo=True
802 i, self._ui.warn, sourceinfo=True
799 )
803 )
800 for pattern, lineno, line in patterns:
804 for pattern, lineno, line in patterns:
801 kind, p = matchmod._patsplit(pattern, b'glob')
805 kind, p = matchmod._patsplit(pattern, b'glob')
802 if kind == b"subinclude":
806 if kind == b"subinclude":
803 if p not in visited:
807 if p not in visited:
804 files.append(p)
808 files.append(p)
805 continue
809 continue
806 m = matchmod.match(
810 m = matchmod.match(
807 self._root, b'', [], [pattern], warn=self._ui.warn
811 self._root, b'', [], [pattern], warn=self._ui.warn
808 )
812 )
809 if m(f):
813 if m(f):
810 return (i, lineno, line)
814 return (i, lineno, line)
811 visited.add(i)
815 visited.add(i)
812 return (None, -1, b"")
816 return (None, -1, b"")
813
817
814 def _walkexplicit(self, match, subrepos):
818 def _walkexplicit(self, match, subrepos):
815 """Get stat data about the files explicitly specified by match.
819 """Get stat data about the files explicitly specified by match.
816
820
817 Return a triple (results, dirsfound, dirsnotfound).
821 Return a triple (results, dirsfound, dirsnotfound).
818 - results is a mapping from filename to stat result. It also contains
822 - results is a mapping from filename to stat result. It also contains
819 listings mapping subrepos and .hg to None.
823 listings mapping subrepos and .hg to None.
820 - dirsfound is a list of files found to be directories.
824 - dirsfound is a list of files found to be directories.
821 - dirsnotfound is a list of files that the dirstate thinks are
825 - dirsnotfound is a list of files that the dirstate thinks are
822 directories and that were not found."""
826 directories and that were not found."""
823
827
824 def badtype(mode):
828 def badtype(mode):
825 kind = _(b'unknown')
829 kind = _(b'unknown')
826 if stat.S_ISCHR(mode):
830 if stat.S_ISCHR(mode):
827 kind = _(b'character device')
831 kind = _(b'character device')
828 elif stat.S_ISBLK(mode):
832 elif stat.S_ISBLK(mode):
829 kind = _(b'block device')
833 kind = _(b'block device')
830 elif stat.S_ISFIFO(mode):
834 elif stat.S_ISFIFO(mode):
831 kind = _(b'fifo')
835 kind = _(b'fifo')
832 elif stat.S_ISSOCK(mode):
836 elif stat.S_ISSOCK(mode):
833 kind = _(b'socket')
837 kind = _(b'socket')
834 elif stat.S_ISDIR(mode):
838 elif stat.S_ISDIR(mode):
835 kind = _(b'directory')
839 kind = _(b'directory')
836 return _(b'unsupported file type (type is %s)') % kind
840 return _(b'unsupported file type (type is %s)') % kind
837
841
838 badfn = match.bad
842 badfn = match.bad
839 dmap = self._map
843 dmap = self._map
840 lstat = os.lstat
844 lstat = os.lstat
841 getkind = stat.S_IFMT
845 getkind = stat.S_IFMT
842 dirkind = stat.S_IFDIR
846 dirkind = stat.S_IFDIR
843 regkind = stat.S_IFREG
847 regkind = stat.S_IFREG
844 lnkkind = stat.S_IFLNK
848 lnkkind = stat.S_IFLNK
845 join = self._join
849 join = self._join
846 dirsfound = []
850 dirsfound = []
847 foundadd = dirsfound.append
851 foundadd = dirsfound.append
848 dirsnotfound = []
852 dirsnotfound = []
849 notfoundadd = dirsnotfound.append
853 notfoundadd = dirsnotfound.append
850
854
851 if not match.isexact() and self._checkcase:
855 if not match.isexact() and self._checkcase:
852 normalize = self._normalize
856 normalize = self._normalize
853 else:
857 else:
854 normalize = None
858 normalize = None
855
859
856 files = sorted(match.files())
860 files = sorted(match.files())
857 subrepos.sort()
861 subrepos.sort()
858 i, j = 0, 0
862 i, j = 0, 0
859 while i < len(files) and j < len(subrepos):
863 while i < len(files) and j < len(subrepos):
860 subpath = subrepos[j] + b"/"
864 subpath = subrepos[j] + b"/"
861 if files[i] < subpath:
865 if files[i] < subpath:
862 i += 1
866 i += 1
863 continue
867 continue
864 while i < len(files) and files[i].startswith(subpath):
868 while i < len(files) and files[i].startswith(subpath):
865 del files[i]
869 del files[i]
866 j += 1
870 j += 1
867
871
868 if not files or b'' in files:
872 if not files or b'' in files:
869 files = [b'']
873 files = [b'']
870 # constructing the foldmap is expensive, so don't do it for the
874 # constructing the foldmap is expensive, so don't do it for the
871 # common case where files is ['']
875 # common case where files is ['']
872 normalize = None
876 normalize = None
873 results = dict.fromkeys(subrepos)
877 results = dict.fromkeys(subrepos)
874 results[b'.hg'] = None
878 results[b'.hg'] = None
875
879
876 for ff in files:
880 for ff in files:
877 if normalize:
881 if normalize:
878 nf = normalize(ff, False, True)
882 nf = normalize(ff, False, True)
879 else:
883 else:
880 nf = ff
884 nf = ff
881 if nf in results:
885 if nf in results:
882 continue
886 continue
883
887
884 try:
888 try:
885 st = lstat(join(nf))
889 st = lstat(join(nf))
886 kind = getkind(st.st_mode)
890 kind = getkind(st.st_mode)
887 if kind == dirkind:
891 if kind == dirkind:
888 if nf in dmap:
892 if nf in dmap:
889 # file replaced by dir on disk but still in dirstate
893 # file replaced by dir on disk but still in dirstate
890 results[nf] = None
894 results[nf] = None
891 foundadd((nf, ff))
895 foundadd((nf, ff))
892 elif kind == regkind or kind == lnkkind:
896 elif kind == regkind or kind == lnkkind:
893 results[nf] = st
897 results[nf] = st
894 else:
898 else:
895 badfn(ff, badtype(kind))
899 badfn(ff, badtype(kind))
896 if nf in dmap:
900 if nf in dmap:
897 results[nf] = None
901 results[nf] = None
898 except OSError as inst: # nf not found on disk - it is dirstate only
902 except OSError as inst: # nf not found on disk - it is dirstate only
899 if nf in dmap: # does it exactly match a missing file?
903 if nf in dmap: # does it exactly match a missing file?
900 results[nf] = None
904 results[nf] = None
901 else: # does it match a missing directory?
905 else: # does it match a missing directory?
902 if self._map.hasdir(nf):
906 if self._map.hasdir(nf):
903 notfoundadd(nf)
907 notfoundadd(nf)
904 else:
908 else:
905 badfn(ff, encoding.strtolocal(inst.strerror))
909 badfn(ff, encoding.strtolocal(inst.strerror))
906
910
907 # match.files() may contain explicitly-specified paths that shouldn't
911 # match.files() may contain explicitly-specified paths that shouldn't
908 # be taken; drop them from the list of files found. dirsfound/notfound
912 # be taken; drop them from the list of files found. dirsfound/notfound
909 # aren't filtered here because they will be tested later.
913 # aren't filtered here because they will be tested later.
910 if match.anypats():
914 if match.anypats():
911 for f in list(results):
915 for f in list(results):
912 if f == b'.hg' or f in subrepos:
916 if f == b'.hg' or f in subrepos:
913 # keep sentinel to disable further out-of-repo walks
917 # keep sentinel to disable further out-of-repo walks
914 continue
918 continue
915 if not match(f):
919 if not match(f):
916 del results[f]
920 del results[f]
917
921
918 # Case insensitive filesystems cannot rely on lstat() failing to detect
922 # Case insensitive filesystems cannot rely on lstat() failing to detect
919 # a case-only rename. Prune the stat object for any file that does not
923 # a case-only rename. Prune the stat object for any file that does not
920 # match the case in the filesystem, if there are multiple files that
924 # match the case in the filesystem, if there are multiple files that
921 # normalize to the same path.
925 # normalize to the same path.
922 if match.isexact() and self._checkcase:
926 if match.isexact() and self._checkcase:
923 normed = {}
927 normed = {}
924
928
925 for f, st in pycompat.iteritems(results):
929 for f, st in pycompat.iteritems(results):
926 if st is None:
930 if st is None:
927 continue
931 continue
928
932
929 nc = util.normcase(f)
933 nc = util.normcase(f)
930 paths = normed.get(nc)
934 paths = normed.get(nc)
931
935
932 if paths is None:
936 if paths is None:
933 paths = set()
937 paths = set()
934 normed[nc] = paths
938 normed[nc] = paths
935
939
936 paths.add(f)
940 paths.add(f)
937
941
938 for norm, paths in pycompat.iteritems(normed):
942 for norm, paths in pycompat.iteritems(normed):
939 if len(paths) > 1:
943 if len(paths) > 1:
940 for path in paths:
944 for path in paths:
941 folded = self._discoverpath(
945 folded = self._discoverpath(
942 path, norm, True, None, self._map.dirfoldmap
946 path, norm, True, None, self._map.dirfoldmap
943 )
947 )
944 if path != folded:
948 if path != folded:
945 results[path] = None
949 results[path] = None
946
950
947 return results, dirsfound, dirsnotfound
951 return results, dirsfound, dirsnotfound
948
952
949 def walk(self, match, subrepos, unknown, ignored, full=True):
953 def walk(self, match, subrepos, unknown, ignored, full=True):
950 """
954 """
951 Walk recursively through the directory tree, finding all files
955 Walk recursively through the directory tree, finding all files
952 matched by match.
956 matched by match.
953
957
954 If full is False, maybe skip some known-clean files.
958 If full is False, maybe skip some known-clean files.
955
959
956 Return a dict mapping filename to stat-like object (either
960 Return a dict mapping filename to stat-like object (either
957 mercurial.osutil.stat instance or return value of os.stat()).
961 mercurial.osutil.stat instance or return value of os.stat()).
958
962
959 """
963 """
960 # full is a flag that extensions that hook into walk can use -- this
964 # full is a flag that extensions that hook into walk can use -- this
961 # implementation doesn't use it at all. This satisfies the contract
965 # implementation doesn't use it at all. This satisfies the contract
962 # because we only guarantee a "maybe".
966 # because we only guarantee a "maybe".
963
967
964 if ignored:
968 if ignored:
965 ignore = util.never
969 ignore = util.never
966 dirignore = util.never
970 dirignore = util.never
967 elif unknown:
971 elif unknown:
968 ignore = self._ignore
972 ignore = self._ignore
969 dirignore = self._dirignore
973 dirignore = self._dirignore
970 else:
974 else:
971 # if not unknown and not ignored, drop dir recursion and step 2
975 # if not unknown and not ignored, drop dir recursion and step 2
972 ignore = util.always
976 ignore = util.always
973 dirignore = util.always
977 dirignore = util.always
974
978
975 matchfn = match.matchfn
979 matchfn = match.matchfn
976 matchalways = match.always()
980 matchalways = match.always()
977 matchtdir = match.traversedir
981 matchtdir = match.traversedir
978 dmap = self._map
982 dmap = self._map
979 listdir = util.listdir
983 listdir = util.listdir
980 lstat = os.lstat
984 lstat = os.lstat
981 dirkind = stat.S_IFDIR
985 dirkind = stat.S_IFDIR
982 regkind = stat.S_IFREG
986 regkind = stat.S_IFREG
983 lnkkind = stat.S_IFLNK
987 lnkkind = stat.S_IFLNK
984 join = self._join
988 join = self._join
985
989
986 exact = skipstep3 = False
990 exact = skipstep3 = False
987 if match.isexact(): # match.exact
991 if match.isexact(): # match.exact
988 exact = True
992 exact = True
989 dirignore = util.always # skip step 2
993 dirignore = util.always # skip step 2
990 elif match.prefix(): # match.match, no patterns
994 elif match.prefix(): # match.match, no patterns
991 skipstep3 = True
995 skipstep3 = True
992
996
993 if not exact and self._checkcase:
997 if not exact and self._checkcase:
994 normalize = self._normalize
998 normalize = self._normalize
995 normalizefile = self._normalizefile
999 normalizefile = self._normalizefile
996 skipstep3 = False
1000 skipstep3 = False
997 else:
1001 else:
998 normalize = self._normalize
1002 normalize = self._normalize
999 normalizefile = None
1003 normalizefile = None
1000
1004
1001 # step 1: find all explicit files
1005 # step 1: find all explicit files
1002 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1006 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1003 if matchtdir:
1007 if matchtdir:
1004 for d in work:
1008 for d in work:
1005 matchtdir(d[0])
1009 matchtdir(d[0])
1006 for d in dirsnotfound:
1010 for d in dirsnotfound:
1007 matchtdir(d)
1011 matchtdir(d)
1008
1012
1009 skipstep3 = skipstep3 and not (work or dirsnotfound)
1013 skipstep3 = skipstep3 and not (work or dirsnotfound)
1010 work = [d for d in work if not dirignore(d[0])]
1014 work = [d for d in work if not dirignore(d[0])]
1011
1015
1012 # step 2: visit subdirectories
1016 # step 2: visit subdirectories
1013 def traverse(work, alreadynormed):
1017 def traverse(work, alreadynormed):
1014 wadd = work.append
1018 wadd = work.append
1015 while work:
1019 while work:
1016 tracing.counter('dirstate.walk work', len(work))
1020 tracing.counter('dirstate.walk work', len(work))
1017 nd = work.pop()
1021 nd = work.pop()
1018 visitentries = match.visitchildrenset(nd)
1022 visitentries = match.visitchildrenset(nd)
1019 if not visitentries:
1023 if not visitentries:
1020 continue
1024 continue
1021 if visitentries == b'this' or visitentries == b'all':
1025 if visitentries == b'this' or visitentries == b'all':
1022 visitentries = None
1026 visitentries = None
1023 skip = None
1027 skip = None
1024 if nd != b'':
1028 if nd != b'':
1025 skip = b'.hg'
1029 skip = b'.hg'
1026 try:
1030 try:
1027 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1031 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1028 entries = listdir(join(nd), stat=True, skip=skip)
1032 entries = listdir(join(nd), stat=True, skip=skip)
1029 except OSError as inst:
1033 except OSError as inst:
1030 if inst.errno in (errno.EACCES, errno.ENOENT):
1034 if inst.errno in (errno.EACCES, errno.ENOENT):
1031 match.bad(
1035 match.bad(
1032 self.pathto(nd), encoding.strtolocal(inst.strerror)
1036 self.pathto(nd), encoding.strtolocal(inst.strerror)
1033 )
1037 )
1034 continue
1038 continue
1035 raise
1039 raise
1036 for f, kind, st in entries:
1040 for f, kind, st in entries:
1037 # Some matchers may return files in the visitentries set,
1041 # Some matchers may return files in the visitentries set,
1038 # instead of 'this', if the matcher explicitly mentions them
1042 # instead of 'this', if the matcher explicitly mentions them
1039 # and is not an exactmatcher. This is acceptable; we do not
1043 # and is not an exactmatcher. This is acceptable; we do not
1040 # make any hard assumptions about file-or-directory below
1044 # make any hard assumptions about file-or-directory below
1041 # based on the presence of `f` in visitentries. If
1045 # based on the presence of `f` in visitentries. If
1042 # visitchildrenset returned a set, we can always skip the
1046 # visitchildrenset returned a set, we can always skip the
1043 # entries *not* in the set it provided regardless of whether
1047 # entries *not* in the set it provided regardless of whether
1044 # they're actually a file or a directory.
1048 # they're actually a file or a directory.
1045 if visitentries and f not in visitentries:
1049 if visitentries and f not in visitentries:
1046 continue
1050 continue
1047 if normalizefile:
1051 if normalizefile:
1048 # even though f might be a directory, we're only
1052 # even though f might be a directory, we're only
1049 # interested in comparing it to files currently in the
1053 # interested in comparing it to files currently in the
1050 # dmap -- therefore normalizefile is enough
1054 # dmap -- therefore normalizefile is enough
1051 nf = normalizefile(
1055 nf = normalizefile(
1052 nd and (nd + b"/" + f) or f, True, True
1056 nd and (nd + b"/" + f) or f, True, True
1053 )
1057 )
1054 else:
1058 else:
1055 nf = nd and (nd + b"/" + f) or f
1059 nf = nd and (nd + b"/" + f) or f
1056 if nf not in results:
1060 if nf not in results:
1057 if kind == dirkind:
1061 if kind == dirkind:
1058 if not ignore(nf):
1062 if not ignore(nf):
1059 if matchtdir:
1063 if matchtdir:
1060 matchtdir(nf)
1064 matchtdir(nf)
1061 wadd(nf)
1065 wadd(nf)
1062 if nf in dmap and (matchalways or matchfn(nf)):
1066 if nf in dmap and (matchalways or matchfn(nf)):
1063 results[nf] = None
1067 results[nf] = None
1064 elif kind == regkind or kind == lnkkind:
1068 elif kind == regkind or kind == lnkkind:
1065 if nf in dmap:
1069 if nf in dmap:
1066 if matchalways or matchfn(nf):
1070 if matchalways or matchfn(nf):
1067 results[nf] = st
1071 results[nf] = st
1068 elif (matchalways or matchfn(nf)) and not ignore(
1072 elif (matchalways or matchfn(nf)) and not ignore(
1069 nf
1073 nf
1070 ):
1074 ):
1071 # unknown file -- normalize if necessary
1075 # unknown file -- normalize if necessary
1072 if not alreadynormed:
1076 if not alreadynormed:
1073 nf = normalize(nf, False, True)
1077 nf = normalize(nf, False, True)
1074 results[nf] = st
1078 results[nf] = st
1075 elif nf in dmap and (matchalways or matchfn(nf)):
1079 elif nf in dmap and (matchalways or matchfn(nf)):
1076 results[nf] = None
1080 results[nf] = None
1077
1081
1078 for nd, d in work:
1082 for nd, d in work:
1079 # alreadynormed means that processwork doesn't have to do any
1083 # alreadynormed means that processwork doesn't have to do any
1080 # expensive directory normalization
1084 # expensive directory normalization
1081 alreadynormed = not normalize or nd == d
1085 alreadynormed = not normalize or nd == d
1082 traverse([d], alreadynormed)
1086 traverse([d], alreadynormed)
1083
1087
1084 for s in subrepos:
1088 for s in subrepos:
1085 del results[s]
1089 del results[s]
1086 del results[b'.hg']
1090 del results[b'.hg']
1087
1091
1088 # step 3: visit remaining files from dmap
1092 # step 3: visit remaining files from dmap
1089 if not skipstep3 and not exact:
1093 if not skipstep3 and not exact:
1090 # If a dmap file is not in results yet, it was either
1094 # If a dmap file is not in results yet, it was either
1091 # a) not matching matchfn b) ignored, c) missing, or d) under a
1095 # a) not matching matchfn b) ignored, c) missing, or d) under a
1092 # symlink directory.
1096 # symlink directory.
1093 if not results and matchalways:
1097 if not results and matchalways:
1094 visit = [f for f in dmap]
1098 visit = [f for f in dmap]
1095 else:
1099 else:
1096 visit = [f for f in dmap if f not in results and matchfn(f)]
1100 visit = [f for f in dmap if f not in results and matchfn(f)]
1097 visit.sort()
1101 visit.sort()
1098
1102
1099 if unknown:
1103 if unknown:
1100 # unknown == True means we walked all dirs under the roots
1104 # unknown == True means we walked all dirs under the roots
1101 # that wasn't ignored, and everything that matched was stat'ed
1105 # that wasn't ignored, and everything that matched was stat'ed
1102 # and is already in results.
1106 # and is already in results.
1103 # The rest must thus be ignored or under a symlink.
1107 # The rest must thus be ignored or under a symlink.
1104 audit_path = pathutil.pathauditor(self._root, cached=True)
1108 audit_path = pathutil.pathauditor(self._root, cached=True)
1105
1109
1106 for nf in iter(visit):
1110 for nf in iter(visit):
1107 # If a stat for the same file was already added with a
1111 # If a stat for the same file was already added with a
1108 # different case, don't add one for this, since that would
1112 # different case, don't add one for this, since that would
1109 # make it appear as if the file exists under both names
1113 # make it appear as if the file exists under both names
1110 # on disk.
1114 # on disk.
1111 if (
1115 if (
1112 normalizefile
1116 normalizefile
1113 and normalizefile(nf, True, True) in results
1117 and normalizefile(nf, True, True) in results
1114 ):
1118 ):
1115 results[nf] = None
1119 results[nf] = None
1116 # Report ignored items in the dmap as long as they are not
1120 # Report ignored items in the dmap as long as they are not
1117 # under a symlink directory.
1121 # under a symlink directory.
1118 elif audit_path.check(nf):
1122 elif audit_path.check(nf):
1119 try:
1123 try:
1120 results[nf] = lstat(join(nf))
1124 results[nf] = lstat(join(nf))
1121 # file was just ignored, no links, and exists
1125 # file was just ignored, no links, and exists
1122 except OSError:
1126 except OSError:
1123 # file doesn't exist
1127 # file doesn't exist
1124 results[nf] = None
1128 results[nf] = None
1125 else:
1129 else:
1126 # It's either missing or under a symlink directory
1130 # It's either missing or under a symlink directory
1127 # which we in this case report as missing
1131 # which we in this case report as missing
1128 results[nf] = None
1132 results[nf] = None
1129 else:
1133 else:
1130 # We may not have walked the full directory tree above,
1134 # We may not have walked the full directory tree above,
1131 # so stat and check everything we missed.
1135 # so stat and check everything we missed.
1132 iv = iter(visit)
1136 iv = iter(visit)
1133 for st in util.statfiles([join(i) for i in visit]):
1137 for st in util.statfiles([join(i) for i in visit]):
1134 results[next(iv)] = st
1138 results[next(iv)] = st
1135 return results
1139 return results
1136
1140
1137 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1141 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1138 # Force Rayon (Rust parallelism library) to respect the number of
1142 # Force Rayon (Rust parallelism library) to respect the number of
1139 # workers. This is a temporary workaround until Rust code knows
1143 # workers. This is a temporary workaround until Rust code knows
1140 # how to read the config file.
1144 # how to read the config file.
1141 numcpus = self._ui.configint(b"worker", b"numcpus")
1145 numcpus = self._ui.configint(b"worker", b"numcpus")
1142 if numcpus is not None:
1146 if numcpus is not None:
1143 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1147 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1144
1148
1145 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1149 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1146 if not workers_enabled:
1150 if not workers_enabled:
1147 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1151 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1148
1152
1149 (
1153 (
1150 lookup,
1154 lookup,
1151 modified,
1155 modified,
1152 added,
1156 added,
1153 removed,
1157 removed,
1154 deleted,
1158 deleted,
1155 clean,
1159 clean,
1156 ignored,
1160 ignored,
1157 unknown,
1161 unknown,
1158 warnings,
1162 warnings,
1159 bad,
1163 bad,
1160 traversed,
1164 traversed,
1161 dirty,
1165 dirty,
1162 ) = rustmod.status(
1166 ) = rustmod.status(
1163 self._map._rustmap,
1167 self._map._rustmap,
1164 matcher,
1168 matcher,
1165 self._rootdir,
1169 self._rootdir,
1166 self._ignorefiles(),
1170 self._ignorefiles(),
1167 self._checkexec,
1171 self._checkexec,
1168 self._lastnormaltime,
1172 self._lastnormaltime,
1169 bool(list_clean),
1173 bool(list_clean),
1170 bool(list_ignored),
1174 bool(list_ignored),
1171 bool(list_unknown),
1175 bool(list_unknown),
1172 bool(matcher.traversedir),
1176 bool(matcher.traversedir),
1173 )
1177 )
1174
1178
1175 self._dirty |= dirty
1179 self._dirty |= dirty
1176
1180
1177 if matcher.traversedir:
1181 if matcher.traversedir:
1178 for dir in traversed:
1182 for dir in traversed:
1179 matcher.traversedir(dir)
1183 matcher.traversedir(dir)
1180
1184
1181 if self._ui.warn:
1185 if self._ui.warn:
1182 for item in warnings:
1186 for item in warnings:
1183 if isinstance(item, tuple):
1187 if isinstance(item, tuple):
1184 file_path, syntax = item
1188 file_path, syntax = item
1185 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1189 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1186 file_path,
1190 file_path,
1187 syntax,
1191 syntax,
1188 )
1192 )
1189 self._ui.warn(msg)
1193 self._ui.warn(msg)
1190 else:
1194 else:
1191 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1195 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1192 self._ui.warn(
1196 self._ui.warn(
1193 msg
1197 msg
1194 % (
1198 % (
1195 pathutil.canonpath(
1199 pathutil.canonpath(
1196 self._rootdir, self._rootdir, item
1200 self._rootdir, self._rootdir, item
1197 ),
1201 ),
1198 b"No such file or directory",
1202 b"No such file or directory",
1199 )
1203 )
1200 )
1204 )
1201
1205
1202 for (fn, message) in bad:
1206 for (fn, message) in bad:
1203 matcher.bad(fn, encoding.strtolocal(message))
1207 matcher.bad(fn, encoding.strtolocal(message))
1204
1208
1205 status = scmutil.status(
1209 status = scmutil.status(
1206 modified=modified,
1210 modified=modified,
1207 added=added,
1211 added=added,
1208 removed=removed,
1212 removed=removed,
1209 deleted=deleted,
1213 deleted=deleted,
1210 unknown=unknown,
1214 unknown=unknown,
1211 ignored=ignored,
1215 ignored=ignored,
1212 clean=clean,
1216 clean=clean,
1213 )
1217 )
1214 return (lookup, status)
1218 return (lookup, status)
1215
1219
1216 def status(self, match, subrepos, ignored, clean, unknown):
1220 def status(self, match, subrepos, ignored, clean, unknown):
1217 """Determine the status of the working copy relative to the
1221 """Determine the status of the working copy relative to the
1218 dirstate and return a pair of (unsure, status), where status is of type
1222 dirstate and return a pair of (unsure, status), where status is of type
1219 scmutil.status and:
1223 scmutil.status and:
1220
1224
1221 unsure:
1225 unsure:
1222 files that might have been modified since the dirstate was
1226 files that might have been modified since the dirstate was
1223 written, but need to be read to be sure (size is the same
1227 written, but need to be read to be sure (size is the same
1224 but mtime differs)
1228 but mtime differs)
1225 status.modified:
1229 status.modified:
1226 files that have definitely been modified since the dirstate
1230 files that have definitely been modified since the dirstate
1227 was written (different size or mode)
1231 was written (different size or mode)
1228 status.clean:
1232 status.clean:
1229 files that have definitely not been modified since the
1233 files that have definitely not been modified since the
1230 dirstate was written
1234 dirstate was written
1231 """
1235 """
1232 listignored, listclean, listunknown = ignored, clean, unknown
1236 listignored, listclean, listunknown = ignored, clean, unknown
1233 lookup, modified, added, unknown, ignored = [], [], [], [], []
1237 lookup, modified, added, unknown, ignored = [], [], [], [], []
1234 removed, deleted, clean = [], [], []
1238 removed, deleted, clean = [], [], []
1235
1239
1236 dmap = self._map
1240 dmap = self._map
1237 dmap.preload()
1241 dmap.preload()
1238
1242
1239 use_rust = True
1243 use_rust = True
1240
1244
1241 allowed_matchers = (
1245 allowed_matchers = (
1242 matchmod.alwaysmatcher,
1246 matchmod.alwaysmatcher,
1243 matchmod.exactmatcher,
1247 matchmod.exactmatcher,
1244 matchmod.includematcher,
1248 matchmod.includematcher,
1245 )
1249 )
1246
1250
1247 if rustmod is None:
1251 if rustmod is None:
1248 use_rust = False
1252 use_rust = False
1249 elif self._checkcase:
1253 elif self._checkcase:
1250 # Case-insensitive filesystems are not handled yet
1254 # Case-insensitive filesystems are not handled yet
1251 use_rust = False
1255 use_rust = False
1252 elif subrepos:
1256 elif subrepos:
1253 use_rust = False
1257 use_rust = False
1254 elif sparse.enabled:
1258 elif sparse.enabled:
1255 use_rust = False
1259 use_rust = False
1256 elif not isinstance(match, allowed_matchers):
1260 elif not isinstance(match, allowed_matchers):
1257 # Some matchers have yet to be implemented
1261 # Some matchers have yet to be implemented
1258 use_rust = False
1262 use_rust = False
1259
1263
1260 if use_rust:
1264 if use_rust:
1261 try:
1265 try:
1262 return self._rust_status(
1266 return self._rust_status(
1263 match, listclean, listignored, listunknown
1267 match, listclean, listignored, listunknown
1264 )
1268 )
1265 except rustmod.FallbackError:
1269 except rustmod.FallbackError:
1266 pass
1270 pass
1267
1271
1268 def noop(f):
1272 def noop(f):
1269 pass
1273 pass
1270
1274
1271 dcontains = dmap.__contains__
1275 dcontains = dmap.__contains__
1272 dget = dmap.__getitem__
1276 dget = dmap.__getitem__
1273 ladd = lookup.append # aka "unsure"
1277 ladd = lookup.append # aka "unsure"
1274 madd = modified.append
1278 madd = modified.append
1275 aadd = added.append
1279 aadd = added.append
1276 uadd = unknown.append if listunknown else noop
1280 uadd = unknown.append if listunknown else noop
1277 iadd = ignored.append if listignored else noop
1281 iadd = ignored.append if listignored else noop
1278 radd = removed.append
1282 radd = removed.append
1279 dadd = deleted.append
1283 dadd = deleted.append
1280 cadd = clean.append if listclean else noop
1284 cadd = clean.append if listclean else noop
1281 mexact = match.exact
1285 mexact = match.exact
1282 dirignore = self._dirignore
1286 dirignore = self._dirignore
1283 checkexec = self._checkexec
1287 checkexec = self._checkexec
1284 copymap = self._map.copymap
1288 copymap = self._map.copymap
1285 lastnormaltime = self._lastnormaltime
1289 lastnormaltime = self._lastnormaltime
1286
1290
1287 # We need to do full walks when either
1291 # We need to do full walks when either
1288 # - we're listing all clean files, or
1292 # - we're listing all clean files, or
1289 # - match.traversedir does something, because match.traversedir should
1293 # - match.traversedir does something, because match.traversedir should
1290 # be called for every dir in the working dir
1294 # be called for every dir in the working dir
1291 full = listclean or match.traversedir is not None
1295 full = listclean or match.traversedir is not None
1292 for fn, st in pycompat.iteritems(
1296 for fn, st in pycompat.iteritems(
1293 self.walk(match, subrepos, listunknown, listignored, full=full)
1297 self.walk(match, subrepos, listunknown, listignored, full=full)
1294 ):
1298 ):
1295 if not dcontains(fn):
1299 if not dcontains(fn):
1296 if (listignored or mexact(fn)) and dirignore(fn):
1300 if (listignored or mexact(fn)) and dirignore(fn):
1297 if listignored:
1301 if listignored:
1298 iadd(fn)
1302 iadd(fn)
1299 else:
1303 else:
1300 uadd(fn)
1304 uadd(fn)
1301 continue
1305 continue
1302
1306
1303 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1307 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1304 # written like that for performance reasons. dmap[fn] is not a
1308 # written like that for performance reasons. dmap[fn] is not a
1305 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1309 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1306 # opcode has fast paths when the value to be unpacked is a tuple or
1310 # opcode has fast paths when the value to be unpacked is a tuple or
1307 # a list, but falls back to creating a full-fledged iterator in
1311 # a list, but falls back to creating a full-fledged iterator in
1308 # general. That is much slower than simply accessing and storing the
1312 # general. That is much slower than simply accessing and storing the
1309 # tuple members one by one.
1313 # tuple members one by one.
1310 t = dget(fn)
1314 t = dget(fn)
1311 mode = t.mode
1315 mode = t.mode
1312 size = t.size
1316 size = t.size
1313 time = t.mtime
1317 time = t.mtime
1314
1318
1315 if not st and t.tracked:
1319 if not st and t.tracked:
1316 dadd(fn)
1320 dadd(fn)
1317 elif t.merged:
1321 elif t.merged:
1318 madd(fn)
1322 madd(fn)
1319 elif t.added:
1323 elif t.added:
1320 aadd(fn)
1324 aadd(fn)
1321 elif t.removed:
1325 elif t.removed:
1322 radd(fn)
1326 radd(fn)
1323 elif t.tracked:
1327 elif t.tracked:
1324 if (
1328 if (
1325 size >= 0
1329 size >= 0
1326 and (
1330 and (
1327 (size != st.st_size and size != st.st_size & _rangemask)
1331 (size != st.st_size and size != st.st_size & _rangemask)
1328 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1332 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1329 )
1333 )
1330 or t.from_p2
1334 or t.from_p2
1331 or fn in copymap
1335 or fn in copymap
1332 ):
1336 ):
1333 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1337 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1334 # issue6456: Size returned may be longer due to
1338 # issue6456: Size returned may be longer due to
1335 # encryption on EXT-4 fscrypt, undecided.
1339 # encryption on EXT-4 fscrypt, undecided.
1336 ladd(fn)
1340 ladd(fn)
1337 else:
1341 else:
1338 madd(fn)
1342 madd(fn)
1339 elif (
1343 elif (
1340 time != st[stat.ST_MTIME]
1344 time != st[stat.ST_MTIME]
1341 and time != st[stat.ST_MTIME] & _rangemask
1345 and time != st[stat.ST_MTIME] & _rangemask
1342 ):
1346 ):
1343 ladd(fn)
1347 ladd(fn)
1344 elif st[stat.ST_MTIME] == lastnormaltime:
1348 elif st[stat.ST_MTIME] == lastnormaltime:
1345 # fn may have just been marked as normal and it may have
1349 # fn may have just been marked as normal and it may have
1346 # changed in the same second without changing its size.
1350 # changed in the same second without changing its size.
1347 # This can happen if we quickly do multiple commits.
1351 # This can happen if we quickly do multiple commits.
1348 # Force lookup, so we don't miss such a racy file change.
1352 # Force lookup, so we don't miss such a racy file change.
1349 ladd(fn)
1353 ladd(fn)
1350 elif listclean:
1354 elif listclean:
1351 cadd(fn)
1355 cadd(fn)
1352 status = scmutil.status(
1356 status = scmutil.status(
1353 modified, added, removed, deleted, unknown, ignored, clean
1357 modified, added, removed, deleted, unknown, ignored, clean
1354 )
1358 )
1355 return (lookup, status)
1359 return (lookup, status)
1356
1360
1357 def matches(self, match):
1361 def matches(self, match):
1358 """
1362 """
1359 return files in the dirstate (in whatever state) filtered by match
1363 return files in the dirstate (in whatever state) filtered by match
1360 """
1364 """
1361 dmap = self._map
1365 dmap = self._map
1362 if rustmod is not None:
1366 if rustmod is not None:
1363 dmap = self._map._rustmap
1367 dmap = self._map._rustmap
1364
1368
1365 if match.always():
1369 if match.always():
1366 return dmap.keys()
1370 return dmap.keys()
1367 files = match.files()
1371 files = match.files()
1368 if match.isexact():
1372 if match.isexact():
1369 # fast path -- filter the other way around, since typically files is
1373 # fast path -- filter the other way around, since typically files is
1370 # much smaller than dmap
1374 # much smaller than dmap
1371 return [f for f in files if f in dmap]
1375 return [f for f in files if f in dmap]
1372 if match.prefix() and all(fn in dmap for fn in files):
1376 if match.prefix() and all(fn in dmap for fn in files):
1373 # fast path -- all the values are known to be files, so just return
1377 # fast path -- all the values are known to be files, so just return
1374 # that
1378 # that
1375 return list(files)
1379 return list(files)
1376 return [f for f in dmap if match(f)]
1380 return [f for f in dmap if match(f)]
1377
1381
1378 def _actualfilename(self, tr):
1382 def _actualfilename(self, tr):
1379 if tr:
1383 if tr:
1380 return self._pendingfilename
1384 return self._pendingfilename
1381 else:
1385 else:
1382 return self._filename
1386 return self._filename
1383
1387
1384 def savebackup(self, tr, backupname):
1388 def savebackup(self, tr, backupname):
1385 '''Save current dirstate into backup file'''
1389 '''Save current dirstate into backup file'''
1386 filename = self._actualfilename(tr)
1390 filename = self._actualfilename(tr)
1387 assert backupname != filename
1391 assert backupname != filename
1388
1392
1389 # use '_writedirstate' instead of 'write' to write changes certainly,
1393 # use '_writedirstate' instead of 'write' to write changes certainly,
1390 # because the latter omits writing out if transaction is running.
1394 # because the latter omits writing out if transaction is running.
1391 # output file will be used to create backup of dirstate at this point.
1395 # output file will be used to create backup of dirstate at this point.
1392 if self._dirty or not self._opener.exists(filename):
1396 if self._dirty or not self._opener.exists(filename):
1393 self._writedirstate(
1397 self._writedirstate(
1394 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1398 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1395 )
1399 )
1396
1400
1397 if tr:
1401 if tr:
1398 # ensure that subsequent tr.writepending returns True for
1402 # ensure that subsequent tr.writepending returns True for
1399 # changes written out above, even if dirstate is never
1403 # changes written out above, even if dirstate is never
1400 # changed after this
1404 # changed after this
1401 tr.addfilegenerator(
1405 tr.addfilegenerator(
1402 b'dirstate',
1406 b'dirstate',
1403 (self._filename,),
1407 (self._filename,),
1404 self._writedirstate,
1408 self._writedirstate,
1405 location=b'plain',
1409 location=b'plain',
1406 )
1410 )
1407
1411
1408 # ensure that pending file written above is unlinked at
1412 # ensure that pending file written above is unlinked at
1409 # failure, even if tr.writepending isn't invoked until the
1413 # failure, even if tr.writepending isn't invoked until the
1410 # end of this transaction
1414 # end of this transaction
1411 tr.registertmp(filename, location=b'plain')
1415 tr.registertmp(filename, location=b'plain')
1412
1416
1413 self._opener.tryunlink(backupname)
1417 self._opener.tryunlink(backupname)
1414 # hardlink backup is okay because _writedirstate is always called
1418 # hardlink backup is okay because _writedirstate is always called
1415 # with an "atomictemp=True" file.
1419 # with an "atomictemp=True" file.
1416 util.copyfile(
1420 util.copyfile(
1417 self._opener.join(filename),
1421 self._opener.join(filename),
1418 self._opener.join(backupname),
1422 self._opener.join(backupname),
1419 hardlink=True,
1423 hardlink=True,
1420 )
1424 )
1421
1425
1422 def restorebackup(self, tr, backupname):
1426 def restorebackup(self, tr, backupname):
1423 '''Restore dirstate by backup file'''
1427 '''Restore dirstate by backup file'''
1424 # this "invalidate()" prevents "wlock.release()" from writing
1428 # this "invalidate()" prevents "wlock.release()" from writing
1425 # changes of dirstate out after restoring from backup file
1429 # changes of dirstate out after restoring from backup file
1426 self.invalidate()
1430 self.invalidate()
1427 filename = self._actualfilename(tr)
1431 filename = self._actualfilename(tr)
1428 o = self._opener
1432 o = self._opener
1429 if util.samefile(o.join(backupname), o.join(filename)):
1433 if util.samefile(o.join(backupname), o.join(filename)):
1430 o.unlink(backupname)
1434 o.unlink(backupname)
1431 else:
1435 else:
1432 o.rename(backupname, filename, checkambig=True)
1436 o.rename(backupname, filename, checkambig=True)
1433
1437
1434 def clearbackup(self, tr, backupname):
1438 def clearbackup(self, tr, backupname):
1435 '''Clear backup file'''
1439 '''Clear backup file'''
1436 self._opener.unlink(backupname)
1440 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now