##// END OF EJS Templates
dirstate: drop the now unused magic constants for the dirstate module...
marmoute -
r48311:587bb99e default
parent child Browse files
Show More
@@ -1,1445 +1,1436 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50 # a special value used internally for `size` if the file come from the other parent
51 FROM_P2 = dirstatemap.FROM_P2
52
53 # a special value used internally for `size` if the file is modified/merged/added
54 NONNORMAL = dirstatemap.NONNORMAL
55
56 # a special value used internally for `time` if the time is ambigeous
57 AMBIGUOUS_TIME = dirstatemap.AMBIGUOUS_TIME
58
59
50
60 class repocache(filecache):
51 class repocache(filecache):
61 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
62
53
63 def join(self, obj, fname):
54 def join(self, obj, fname):
64 return obj._opener.join(fname)
55 return obj._opener.join(fname)
65
56
66
57
67 class rootcache(filecache):
58 class rootcache(filecache):
68 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
69
60
70 def join(self, obj, fname):
61 def join(self, obj, fname):
71 return obj._join(fname)
62 return obj._join(fname)
72
63
73
64
74 def _getfsnow(vfs):
65 def _getfsnow(vfs):
75 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
76 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
77 try:
68 try:
78 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
79 finally:
70 finally:
80 os.close(tmpfd)
71 os.close(tmpfd)
81 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
82
73
83
74
84 @interfaceutil.implementer(intdirstate.idirstate)
75 @interfaceutil.implementer(intdirstate.idirstate)
85 class dirstate(object):
76 class dirstate(object):
86 def __init__(
77 def __init__(
87 self,
78 self,
88 opener,
79 opener,
89 ui,
80 ui,
90 root,
81 root,
91 validate,
82 validate,
92 sparsematchfn,
83 sparsematchfn,
93 nodeconstants,
84 nodeconstants,
94 use_dirstate_v2,
85 use_dirstate_v2,
95 ):
86 ):
96 """Create a new dirstate object.
87 """Create a new dirstate object.
97
88
98 opener is an open()-like callable that can be used to open the
89 opener is an open()-like callable that can be used to open the
99 dirstate file; root is the root of the directory tracked by
90 dirstate file; root is the root of the directory tracked by
100 the dirstate.
91 the dirstate.
101 """
92 """
102 self._use_dirstate_v2 = use_dirstate_v2
93 self._use_dirstate_v2 = use_dirstate_v2
103 self._nodeconstants = nodeconstants
94 self._nodeconstants = nodeconstants
104 self._opener = opener
95 self._opener = opener
105 self._validate = validate
96 self._validate = validate
106 self._root = root
97 self._root = root
107 self._sparsematchfn = sparsematchfn
98 self._sparsematchfn = sparsematchfn
108 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
99 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # UNC path pointing to root share (issue4557)
100 # UNC path pointing to root share (issue4557)
110 self._rootdir = pathutil.normasprefix(root)
101 self._rootdir = pathutil.normasprefix(root)
111 self._dirty = False
102 self._dirty = False
112 self._lastnormaltime = 0
103 self._lastnormaltime = 0
113 self._ui = ui
104 self._ui = ui
114 self._filecache = {}
105 self._filecache = {}
115 self._parentwriters = 0
106 self._parentwriters = 0
116 self._filename = b'dirstate'
107 self._filename = b'dirstate'
117 self._pendingfilename = b'%s.pending' % self._filename
108 self._pendingfilename = b'%s.pending' % self._filename
118 self._plchangecallbacks = {}
109 self._plchangecallbacks = {}
119 self._origpl = None
110 self._origpl = None
120 self._updatedfiles = set()
111 self._updatedfiles = set()
121 self._mapcls = dirstatemap.dirstatemap
112 self._mapcls = dirstatemap.dirstatemap
122 # Access and cache cwd early, so we don't access it for the first time
113 # Access and cache cwd early, so we don't access it for the first time
123 # after a working-copy update caused it to not exist (accessing it then
114 # after a working-copy update caused it to not exist (accessing it then
124 # raises an exception).
115 # raises an exception).
125 self._cwd
116 self._cwd
126
117
127 def prefetch_parents(self):
118 def prefetch_parents(self):
128 """make sure the parents are loaded
119 """make sure the parents are loaded
129
120
130 Used to avoid a race condition.
121 Used to avoid a race condition.
131 """
122 """
132 self._pl
123 self._pl
133
124
134 @contextlib.contextmanager
125 @contextlib.contextmanager
135 def parentchange(self):
126 def parentchange(self):
136 """Context manager for handling dirstate parents.
127 """Context manager for handling dirstate parents.
137
128
138 If an exception occurs in the scope of the context manager,
129 If an exception occurs in the scope of the context manager,
139 the incoherent dirstate won't be written when wlock is
130 the incoherent dirstate won't be written when wlock is
140 released.
131 released.
141 """
132 """
142 self._parentwriters += 1
133 self._parentwriters += 1
143 yield
134 yield
144 # Typically we want the "undo" step of a context manager in a
135 # Typically we want the "undo" step of a context manager in a
145 # finally block so it happens even when an exception
136 # finally block so it happens even when an exception
146 # occurs. In this case, however, we only want to decrement
137 # occurs. In this case, however, we only want to decrement
147 # parentwriters if the code in the with statement exits
138 # parentwriters if the code in the with statement exits
148 # normally, so we don't have a try/finally here on purpose.
139 # normally, so we don't have a try/finally here on purpose.
149 self._parentwriters -= 1
140 self._parentwriters -= 1
150
141
151 def pendingparentchange(self):
142 def pendingparentchange(self):
152 """Returns true if the dirstate is in the middle of a set of changes
143 """Returns true if the dirstate is in the middle of a set of changes
153 that modify the dirstate parent.
144 that modify the dirstate parent.
154 """
145 """
155 return self._parentwriters > 0
146 return self._parentwriters > 0
156
147
157 @propertycache
148 @propertycache
158 def _map(self):
149 def _map(self):
159 """Return the dirstate contents (see documentation for dirstatemap)."""
150 """Return the dirstate contents (see documentation for dirstatemap)."""
160 self._map = self._mapcls(
151 self._map = self._mapcls(
161 self._ui,
152 self._ui,
162 self._opener,
153 self._opener,
163 self._root,
154 self._root,
164 self._nodeconstants,
155 self._nodeconstants,
165 self._use_dirstate_v2,
156 self._use_dirstate_v2,
166 )
157 )
167 return self._map
158 return self._map
168
159
169 @property
160 @property
170 def _sparsematcher(self):
161 def _sparsematcher(self):
171 """The matcher for the sparse checkout.
162 """The matcher for the sparse checkout.
172
163
173 The working directory may not include every file from a manifest. The
164 The working directory may not include every file from a manifest. The
174 matcher obtained by this property will match a path if it is to be
165 matcher obtained by this property will match a path if it is to be
175 included in the working directory.
166 included in the working directory.
176 """
167 """
177 # TODO there is potential to cache this property. For now, the matcher
168 # TODO there is potential to cache this property. For now, the matcher
178 # is resolved on every access. (But the called function does use a
169 # is resolved on every access. (But the called function does use a
179 # cache to keep the lookup fast.)
170 # cache to keep the lookup fast.)
180 return self._sparsematchfn()
171 return self._sparsematchfn()
181
172
182 @repocache(b'branch')
173 @repocache(b'branch')
183 def _branch(self):
174 def _branch(self):
184 try:
175 try:
185 return self._opener.read(b"branch").strip() or b"default"
176 return self._opener.read(b"branch").strip() or b"default"
186 except IOError as inst:
177 except IOError as inst:
187 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
188 raise
179 raise
189 return b"default"
180 return b"default"
190
181
191 @property
182 @property
192 def _pl(self):
183 def _pl(self):
193 return self._map.parents()
184 return self._map.parents()
194
185
195 def hasdir(self, d):
186 def hasdir(self, d):
196 return self._map.hastrackeddir(d)
187 return self._map.hastrackeddir(d)
197
188
198 @rootcache(b'.hgignore')
189 @rootcache(b'.hgignore')
199 def _ignore(self):
190 def _ignore(self):
200 files = self._ignorefiles()
191 files = self._ignorefiles()
201 if not files:
192 if not files:
202 return matchmod.never()
193 return matchmod.never()
203
194
204 pats = [b'include:%s' % f for f in files]
195 pats = [b'include:%s' % f for f in files]
205 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
196 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206
197
207 @propertycache
198 @propertycache
208 def _slash(self):
199 def _slash(self):
209 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
200 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210
201
211 @propertycache
202 @propertycache
212 def _checklink(self):
203 def _checklink(self):
213 return util.checklink(self._root)
204 return util.checklink(self._root)
214
205
215 @propertycache
206 @propertycache
216 def _checkexec(self):
207 def _checkexec(self):
217 return bool(util.checkexec(self._root))
208 return bool(util.checkexec(self._root))
218
209
219 @propertycache
210 @propertycache
220 def _checkcase(self):
211 def _checkcase(self):
221 return not util.fscasesensitive(self._join(b'.hg'))
212 return not util.fscasesensitive(self._join(b'.hg'))
222
213
223 def _join(self, f):
214 def _join(self, f):
224 # much faster than os.path.join()
215 # much faster than os.path.join()
225 # it's safe because f is always a relative path
216 # it's safe because f is always a relative path
226 return self._rootdir + f
217 return self._rootdir + f
227
218
228 def flagfunc(self, buildfallback):
219 def flagfunc(self, buildfallback):
229 if self._checklink and self._checkexec:
220 if self._checklink and self._checkexec:
230
221
231 def f(x):
222 def f(x):
232 try:
223 try:
233 st = os.lstat(self._join(x))
224 st = os.lstat(self._join(x))
234 if util.statislink(st):
225 if util.statislink(st):
235 return b'l'
226 return b'l'
236 if util.statisexec(st):
227 if util.statisexec(st):
237 return b'x'
228 return b'x'
238 except OSError:
229 except OSError:
239 pass
230 pass
240 return b''
231 return b''
241
232
242 return f
233 return f
243
234
244 fallback = buildfallback()
235 fallback = buildfallback()
245 if self._checklink:
236 if self._checklink:
246
237
247 def f(x):
238 def f(x):
248 if os.path.islink(self._join(x)):
239 if os.path.islink(self._join(x)):
249 return b'l'
240 return b'l'
250 if b'x' in fallback(x):
241 if b'x' in fallback(x):
251 return b'x'
242 return b'x'
252 return b''
243 return b''
253
244
254 return f
245 return f
255 if self._checkexec:
246 if self._checkexec:
256
247
257 def f(x):
248 def f(x):
258 if b'l' in fallback(x):
249 if b'l' in fallback(x):
259 return b'l'
250 return b'l'
260 if util.isexec(self._join(x)):
251 if util.isexec(self._join(x)):
261 return b'x'
252 return b'x'
262 return b''
253 return b''
263
254
264 return f
255 return f
265 else:
256 else:
266 return fallback
257 return fallback
267
258
268 @propertycache
259 @propertycache
269 def _cwd(self):
260 def _cwd(self):
270 # internal config: ui.forcecwd
261 # internal config: ui.forcecwd
271 forcecwd = self._ui.config(b'ui', b'forcecwd')
262 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 if forcecwd:
263 if forcecwd:
273 return forcecwd
264 return forcecwd
274 return encoding.getcwd()
265 return encoding.getcwd()
275
266
276 def getcwd(self):
267 def getcwd(self):
277 """Return the path from which a canonical path is calculated.
268 """Return the path from which a canonical path is calculated.
278
269
279 This path should be used to resolve file patterns or to convert
270 This path should be used to resolve file patterns or to convert
280 canonical paths back to file paths for display. It shouldn't be
271 canonical paths back to file paths for display. It shouldn't be
281 used to get real file paths. Use vfs functions instead.
272 used to get real file paths. Use vfs functions instead.
282 """
273 """
283 cwd = self._cwd
274 cwd = self._cwd
284 if cwd == self._root:
275 if cwd == self._root:
285 return b''
276 return b''
286 # self._root ends with a path separator if self._root is '/' or 'C:\'
277 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 rootsep = self._root
278 rootsep = self._root
288 if not util.endswithsep(rootsep):
279 if not util.endswithsep(rootsep):
289 rootsep += pycompat.ossep
280 rootsep += pycompat.ossep
290 if cwd.startswith(rootsep):
281 if cwd.startswith(rootsep):
291 return cwd[len(rootsep) :]
282 return cwd[len(rootsep) :]
292 else:
283 else:
293 # we're outside the repo. return an absolute path.
284 # we're outside the repo. return an absolute path.
294 return cwd
285 return cwd
295
286
296 def pathto(self, f, cwd=None):
287 def pathto(self, f, cwd=None):
297 if cwd is None:
288 if cwd is None:
298 cwd = self.getcwd()
289 cwd = self.getcwd()
299 path = util.pathto(self._root, cwd, f)
290 path = util.pathto(self._root, cwd, f)
300 if self._slash:
291 if self._slash:
301 return util.pconvert(path)
292 return util.pconvert(path)
302 return path
293 return path
303
294
304 def __getitem__(self, key):
295 def __getitem__(self, key):
305 """Return the current state of key (a filename) in the dirstate.
296 """Return the current state of key (a filename) in the dirstate.
306
297
307 States are:
298 States are:
308 n normal
299 n normal
309 m needs merging
300 m needs merging
310 r marked for removal
301 r marked for removal
311 a marked for addition
302 a marked for addition
312 ? not tracked
303 ? not tracked
313
304
314 XXX The "state" is a bit obscure to be in the "public" API. we should
305 XXX The "state" is a bit obscure to be in the "public" API. we should
315 consider migrating all user of this to going through the dirstate entry
306 consider migrating all user of this to going through the dirstate entry
316 instead.
307 instead.
317 """
308 """
318 entry = self._map.get(key)
309 entry = self._map.get(key)
319 if entry is not None:
310 if entry is not None:
320 return entry.state
311 return entry.state
321 return b'?'
312 return b'?'
322
313
323 def __contains__(self, key):
314 def __contains__(self, key):
324 return key in self._map
315 return key in self._map
325
316
326 def __iter__(self):
317 def __iter__(self):
327 return iter(sorted(self._map))
318 return iter(sorted(self._map))
328
319
329 def items(self):
320 def items(self):
330 return pycompat.iteritems(self._map)
321 return pycompat.iteritems(self._map)
331
322
332 iteritems = items
323 iteritems = items
333
324
334 def directories(self):
325 def directories(self):
335 return self._map.directories()
326 return self._map.directories()
336
327
337 def parents(self):
328 def parents(self):
338 return [self._validate(p) for p in self._pl]
329 return [self._validate(p) for p in self._pl]
339
330
340 def p1(self):
331 def p1(self):
341 return self._validate(self._pl[0])
332 return self._validate(self._pl[0])
342
333
343 def p2(self):
334 def p2(self):
344 return self._validate(self._pl[1])
335 return self._validate(self._pl[1])
345
336
346 @property
337 @property
347 def in_merge(self):
338 def in_merge(self):
348 """True if a merge is in progress"""
339 """True if a merge is in progress"""
349 return self._pl[1] != self._nodeconstants.nullid
340 return self._pl[1] != self._nodeconstants.nullid
350
341
351 def branch(self):
342 def branch(self):
352 return encoding.tolocal(self._branch)
343 return encoding.tolocal(self._branch)
353
344
354 def setparents(self, p1, p2=None):
345 def setparents(self, p1, p2=None):
355 """Set dirstate parents to p1 and p2.
346 """Set dirstate parents to p1 and p2.
356
347
357 When moving from two parents to one, "merged" entries a
348 When moving from two parents to one, "merged" entries a
358 adjusted to normal and previous copy records discarded and
349 adjusted to normal and previous copy records discarded and
359 returned by the call.
350 returned by the call.
360
351
361 See localrepo.setparents()
352 See localrepo.setparents()
362 """
353 """
363 if p2 is None:
354 if p2 is None:
364 p2 = self._nodeconstants.nullid
355 p2 = self._nodeconstants.nullid
365 if self._parentwriters == 0:
356 if self._parentwriters == 0:
366 raise ValueError(
357 raise ValueError(
367 b"cannot set dirstate parent outside of "
358 b"cannot set dirstate parent outside of "
368 b"dirstate.parentchange context manager"
359 b"dirstate.parentchange context manager"
369 )
360 )
370
361
371 self._dirty = True
362 self._dirty = True
372 oldp2 = self._pl[1]
363 oldp2 = self._pl[1]
373 if self._origpl is None:
364 if self._origpl is None:
374 self._origpl = self._pl
365 self._origpl = self._pl
375 self._map.setparents(p1, p2)
366 self._map.setparents(p1, p2)
376 copies = {}
367 copies = {}
377 if (
368 if (
378 oldp2 != self._nodeconstants.nullid
369 oldp2 != self._nodeconstants.nullid
379 and p2 == self._nodeconstants.nullid
370 and p2 == self._nodeconstants.nullid
380 ):
371 ):
381 candidatefiles = self._map.non_normal_or_other_parent_paths()
372 candidatefiles = self._map.non_normal_or_other_parent_paths()
382
373
383 for f in candidatefiles:
374 for f in candidatefiles:
384 s = self._map.get(f)
375 s = self._map.get(f)
385 if s is None:
376 if s is None:
386 continue
377 continue
387
378
388 # Discard "merged" markers when moving away from a merge state
379 # Discard "merged" markers when moving away from a merge state
389 if s.merged:
380 if s.merged:
390 source = self._map.copymap.get(f)
381 source = self._map.copymap.get(f)
391 if source:
382 if source:
392 copies[f] = source
383 copies[f] = source
393 self.normallookup(f)
384 self.normallookup(f)
394 # Also fix up otherparent markers
385 # Also fix up otherparent markers
395 elif s.from_p2:
386 elif s.from_p2:
396 source = self._map.copymap.get(f)
387 source = self._map.copymap.get(f)
397 if source:
388 if source:
398 copies[f] = source
389 copies[f] = source
399 self.add(f)
390 self.add(f)
400 return copies
391 return copies
401
392
402 def setbranch(self, branch):
393 def setbranch(self, branch):
403 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
404 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
405 try:
396 try:
406 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
407 f.close()
398 f.close()
408
399
409 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
410 # replacing the underlying file
401 # replacing the underlying file
411 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
412 if ce:
403 if ce:
413 ce.refresh()
404 ce.refresh()
414 except: # re-raises
405 except: # re-raises
415 f.discard()
406 f.discard()
416 raise
407 raise
417
408
418 def invalidate(self):
409 def invalidate(self):
419 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
420
411
421 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
422 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
423 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
424
415
425 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
426 if a in self.__dict__:
417 if a in self.__dict__:
427 delattr(self, a)
418 delattr(self, a)
428 self._lastnormaltime = 0
419 self._lastnormaltime = 0
429 self._dirty = False
420 self._dirty = False
430 self._updatedfiles.clear()
421 self._updatedfiles.clear()
431 self._parentwriters = 0
422 self._parentwriters = 0
432 self._origpl = None
423 self._origpl = None
433
424
434 def copy(self, source, dest):
425 def copy(self, source, dest):
435 """Mark dest as a copy of source. Unmark dest if source is None."""
426 """Mark dest as a copy of source. Unmark dest if source is None."""
436 if source == dest:
427 if source == dest:
437 return
428 return
438 self._dirty = True
429 self._dirty = True
439 if source is not None:
430 if source is not None:
440 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
441 self._updatedfiles.add(source)
432 self._updatedfiles.add(source)
442 self._updatedfiles.add(dest)
433 self._updatedfiles.add(dest)
443 elif self._map.copymap.pop(dest, None):
434 elif self._map.copymap.pop(dest, None):
444 self._updatedfiles.add(dest)
435 self._updatedfiles.add(dest)
445
436
446 def copied(self, file):
437 def copied(self, file):
447 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
448
439
449 def copies(self):
440 def copies(self):
450 return self._map.copymap
441 return self._map.copymap
451
442
452 def _addpath(
443 def _addpath(
453 self,
444 self,
454 f,
445 f,
455 state,
446 state,
456 mode,
447 mode,
457 size=None,
448 size=None,
458 mtime=None,
449 mtime=None,
459 from_p2=False,
450 from_p2=False,
460 possibly_dirty=False,
451 possibly_dirty=False,
461 ):
452 ):
462 oldstate = self[f]
453 oldstate = self[f]
463 if state == b'a' or oldstate == b'r':
454 if state == b'a' or oldstate == b'r':
464 scmutil.checkfilename(f)
455 scmutil.checkfilename(f)
465 if self._map.hastrackeddir(f):
456 if self._map.hastrackeddir(f):
466 msg = _(b'directory %r already in dirstate')
457 msg = _(b'directory %r already in dirstate')
467 msg %= pycompat.bytestr(f)
458 msg %= pycompat.bytestr(f)
468 raise error.Abort(msg)
459 raise error.Abort(msg)
469 # shadows
460 # shadows
470 for d in pathutil.finddirs(f):
461 for d in pathutil.finddirs(f):
471 if self._map.hastrackeddir(d):
462 if self._map.hastrackeddir(d):
472 break
463 break
473 entry = self._map.get(d)
464 entry = self._map.get(d)
474 if entry is not None and not entry.removed:
465 if entry is not None and not entry.removed:
475 msg = _(b'file %r in dirstate clashes with %r')
466 msg = _(b'file %r in dirstate clashes with %r')
476 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
467 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
477 raise error.Abort(msg)
468 raise error.Abort(msg)
478 self._dirty = True
469 self._dirty = True
479 self._updatedfiles.add(f)
470 self._updatedfiles.add(f)
480 self._map.addfile(
471 self._map.addfile(
481 f,
472 f,
482 oldstate,
473 oldstate,
483 state=state,
474 state=state,
484 mode=mode,
475 mode=mode,
485 size=size,
476 size=size,
486 mtime=mtime,
477 mtime=mtime,
487 from_p2=from_p2,
478 from_p2=from_p2,
488 possibly_dirty=possibly_dirty,
479 possibly_dirty=possibly_dirty,
489 )
480 )
490
481
491 def normal(self, f, parentfiledata=None):
482 def normal(self, f, parentfiledata=None):
492 """Mark a file normal and clean.
483 """Mark a file normal and clean.
493
484
494 parentfiledata: (mode, size, mtime) of the clean file
485 parentfiledata: (mode, size, mtime) of the clean file
495
486
496 parentfiledata should be computed from memory (for mode,
487 parentfiledata should be computed from memory (for mode,
497 size), as or close as possible from the point where we
488 size), as or close as possible from the point where we
498 determined the file was clean, to limit the risk of the
489 determined the file was clean, to limit the risk of the
499 file having been changed by an external process between the
490 file having been changed by an external process between the
500 moment where the file was determined to be clean and now."""
491 moment where the file was determined to be clean and now."""
501 if parentfiledata:
492 if parentfiledata:
502 (mode, size, mtime) = parentfiledata
493 (mode, size, mtime) = parentfiledata
503 else:
494 else:
504 s = os.lstat(self._join(f))
495 s = os.lstat(self._join(f))
505 mode = s.st_mode
496 mode = s.st_mode
506 size = s.st_size
497 size = s.st_size
507 mtime = s[stat.ST_MTIME]
498 mtime = s[stat.ST_MTIME]
508 self._addpath(f, b'n', mode, size, mtime)
499 self._addpath(f, b'n', mode, size, mtime)
509 self._map.copymap.pop(f, None)
500 self._map.copymap.pop(f, None)
510 if f in self._map.nonnormalset:
501 if f in self._map.nonnormalset:
511 self._map.nonnormalset.remove(f)
502 self._map.nonnormalset.remove(f)
512 if mtime > self._lastnormaltime:
503 if mtime > self._lastnormaltime:
513 # Remember the most recent modification timeslot for status(),
504 # Remember the most recent modification timeslot for status(),
514 # to make sure we won't miss future size-preserving file content
505 # to make sure we won't miss future size-preserving file content
515 # modifications that happen within the same timeslot.
506 # modifications that happen within the same timeslot.
516 self._lastnormaltime = mtime
507 self._lastnormaltime = mtime
517
508
518 def normallookup(self, f):
509 def normallookup(self, f):
519 '''Mark a file normal, but possibly dirty.'''
510 '''Mark a file normal, but possibly dirty.'''
520 if self.in_merge:
511 if self.in_merge:
521 # if there is a merge going on and the file was either
512 # if there is a merge going on and the file was either
522 # "merged" or coming from other parent (-2) before
513 # "merged" or coming from other parent (-2) before
523 # being removed, restore that state.
514 # being removed, restore that state.
524 entry = self._map.get(f)
515 entry = self._map.get(f)
525 if entry is not None:
516 if entry is not None:
526 # XXX this should probably be dealt with a a lower level
517 # XXX this should probably be dealt with a a lower level
527 # (see `merged_removed` and `from_p2_removed`)
518 # (see `merged_removed` and `from_p2_removed`)
528 if entry.merged_removed or entry.from_p2_removed:
519 if entry.merged_removed or entry.from_p2_removed:
529 source = self._map.copymap.get(f)
520 source = self._map.copymap.get(f)
530 if entry.merged_removed:
521 if entry.merged_removed:
531 self.merge(f)
522 self.merge(f)
532 elif entry.from_p2_removed:
523 elif entry.from_p2_removed:
533 self.otherparent(f)
524 self.otherparent(f)
534 if source is not None:
525 if source is not None:
535 self.copy(source, f)
526 self.copy(source, f)
536 return
527 return
537 elif entry.merged or entry.from_p2:
528 elif entry.merged or entry.from_p2:
538 return
529 return
539 self._addpath(f, b'n', 0, possibly_dirty=True)
530 self._addpath(f, b'n', 0, possibly_dirty=True)
540 self._map.copymap.pop(f, None)
531 self._map.copymap.pop(f, None)
541
532
542 def otherparent(self, f):
533 def otherparent(self, f):
543 '''Mark as coming from the other parent, always dirty.'''
534 '''Mark as coming from the other parent, always dirty.'''
544 if not self.in_merge:
535 if not self.in_merge:
545 msg = _(b"setting %r to other parent only allowed in merges") % f
536 msg = _(b"setting %r to other parent only allowed in merges") % f
546 raise error.Abort(msg)
537 raise error.Abort(msg)
547 if f in self and self[f] == b'n':
538 if f in self and self[f] == b'n':
548 # merge-like
539 # merge-like
549 self._addpath(f, b'm', 0, from_p2=True)
540 self._addpath(f, b'm', 0, from_p2=True)
550 else:
541 else:
551 # add-like
542 # add-like
552 self._addpath(f, b'n', 0, from_p2=True)
543 self._addpath(f, b'n', 0, from_p2=True)
553 self._map.copymap.pop(f, None)
544 self._map.copymap.pop(f, None)
554
545
555 def add(self, f):
546 def add(self, f):
556 '''Mark a file added.'''
547 '''Mark a file added.'''
557 self._addpath(f, b'a', 0)
548 self._addpath(f, b'a', 0)
558 self._map.copymap.pop(f, None)
549 self._map.copymap.pop(f, None)
559
550
560 def remove(self, f):
551 def remove(self, f):
561 '''Mark a file removed.'''
552 '''Mark a file removed.'''
562 self._dirty = True
553 self._dirty = True
563 self._updatedfiles.add(f)
554 self._updatedfiles.add(f)
564 self._map.removefile(f, in_merge=self.in_merge)
555 self._map.removefile(f, in_merge=self.in_merge)
565
556
566 def merge(self, f):
557 def merge(self, f):
567 '''Mark a file merged.'''
558 '''Mark a file merged.'''
568 if not self.in_merge:
559 if not self.in_merge:
569 return self.normallookup(f)
560 return self.normallookup(f)
570 return self.otherparent(f)
561 return self.otherparent(f)
571
562
572 def drop(self, f):
563 def drop(self, f):
573 '''Drop a file from the dirstate'''
564 '''Drop a file from the dirstate'''
574 oldstate = self[f]
565 oldstate = self[f]
575 if self._map.dropfile(f, oldstate):
566 if self._map.dropfile(f, oldstate):
576 self._dirty = True
567 self._dirty = True
577 self._updatedfiles.add(f)
568 self._updatedfiles.add(f)
578 self._map.copymap.pop(f, None)
569 self._map.copymap.pop(f, None)
579
570
580 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
571 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
581 if exists is None:
572 if exists is None:
582 exists = os.path.lexists(os.path.join(self._root, path))
573 exists = os.path.lexists(os.path.join(self._root, path))
583 if not exists:
574 if not exists:
584 # Maybe a path component exists
575 # Maybe a path component exists
585 if not ignoremissing and b'/' in path:
576 if not ignoremissing and b'/' in path:
586 d, f = path.rsplit(b'/', 1)
577 d, f = path.rsplit(b'/', 1)
587 d = self._normalize(d, False, ignoremissing, None)
578 d = self._normalize(d, False, ignoremissing, None)
588 folded = d + b"/" + f
579 folded = d + b"/" + f
589 else:
580 else:
590 # No path components, preserve original case
581 # No path components, preserve original case
591 folded = path
582 folded = path
592 else:
583 else:
593 # recursively normalize leading directory components
584 # recursively normalize leading directory components
594 # against dirstate
585 # against dirstate
595 if b'/' in normed:
586 if b'/' in normed:
596 d, f = normed.rsplit(b'/', 1)
587 d, f = normed.rsplit(b'/', 1)
597 d = self._normalize(d, False, ignoremissing, True)
588 d = self._normalize(d, False, ignoremissing, True)
598 r = self._root + b"/" + d
589 r = self._root + b"/" + d
599 folded = d + b"/" + util.fspath(f, r)
590 folded = d + b"/" + util.fspath(f, r)
600 else:
591 else:
601 folded = util.fspath(normed, self._root)
592 folded = util.fspath(normed, self._root)
602 storemap[normed] = folded
593 storemap[normed] = folded
603
594
604 return folded
595 return folded
605
596
606 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
597 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
607 normed = util.normcase(path)
598 normed = util.normcase(path)
608 folded = self._map.filefoldmap.get(normed, None)
599 folded = self._map.filefoldmap.get(normed, None)
609 if folded is None:
600 if folded is None:
610 if isknown:
601 if isknown:
611 folded = path
602 folded = path
612 else:
603 else:
613 folded = self._discoverpath(
604 folded = self._discoverpath(
614 path, normed, ignoremissing, exists, self._map.filefoldmap
605 path, normed, ignoremissing, exists, self._map.filefoldmap
615 )
606 )
616 return folded
607 return folded
617
608
618 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
609 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
619 normed = util.normcase(path)
610 normed = util.normcase(path)
620 folded = self._map.filefoldmap.get(normed, None)
611 folded = self._map.filefoldmap.get(normed, None)
621 if folded is None:
612 if folded is None:
622 folded = self._map.dirfoldmap.get(normed, None)
613 folded = self._map.dirfoldmap.get(normed, None)
623 if folded is None:
614 if folded is None:
624 if isknown:
615 if isknown:
625 folded = path
616 folded = path
626 else:
617 else:
627 # store discovered result in dirfoldmap so that future
618 # store discovered result in dirfoldmap so that future
628 # normalizefile calls don't start matching directories
619 # normalizefile calls don't start matching directories
629 folded = self._discoverpath(
620 folded = self._discoverpath(
630 path, normed, ignoremissing, exists, self._map.dirfoldmap
621 path, normed, ignoremissing, exists, self._map.dirfoldmap
631 )
622 )
632 return folded
623 return folded
633
624
634 def normalize(self, path, isknown=False, ignoremissing=False):
625 def normalize(self, path, isknown=False, ignoremissing=False):
635 """
626 """
636 normalize the case of a pathname when on a casefolding filesystem
627 normalize the case of a pathname when on a casefolding filesystem
637
628
638 isknown specifies whether the filename came from walking the
629 isknown specifies whether the filename came from walking the
639 disk, to avoid extra filesystem access.
630 disk, to avoid extra filesystem access.
640
631
641 If ignoremissing is True, missing path are returned
632 If ignoremissing is True, missing path are returned
642 unchanged. Otherwise, we try harder to normalize possibly
633 unchanged. Otherwise, we try harder to normalize possibly
643 existing path components.
634 existing path components.
644
635
645 The normalized case is determined based on the following precedence:
636 The normalized case is determined based on the following precedence:
646
637
647 - version of name already stored in the dirstate
638 - version of name already stored in the dirstate
648 - version of name stored on disk
639 - version of name stored on disk
649 - version provided via command arguments
640 - version provided via command arguments
650 """
641 """
651
642
652 if self._checkcase:
643 if self._checkcase:
653 return self._normalize(path, isknown, ignoremissing)
644 return self._normalize(path, isknown, ignoremissing)
654 return path
645 return path
655
646
656 def clear(self):
647 def clear(self):
657 self._map.clear()
648 self._map.clear()
658 self._lastnormaltime = 0
649 self._lastnormaltime = 0
659 self._updatedfiles.clear()
650 self._updatedfiles.clear()
660 self._dirty = True
651 self._dirty = True
661
652
662 def rebuild(self, parent, allfiles, changedfiles=None):
653 def rebuild(self, parent, allfiles, changedfiles=None):
663 if changedfiles is None:
654 if changedfiles is None:
664 # Rebuild entire dirstate
655 # Rebuild entire dirstate
665 to_lookup = allfiles
656 to_lookup = allfiles
666 to_drop = []
657 to_drop = []
667 lastnormaltime = self._lastnormaltime
658 lastnormaltime = self._lastnormaltime
668 self.clear()
659 self.clear()
669 self._lastnormaltime = lastnormaltime
660 self._lastnormaltime = lastnormaltime
670 elif len(changedfiles) < 10:
661 elif len(changedfiles) < 10:
671 # Avoid turning allfiles into a set, which can be expensive if it's
662 # Avoid turning allfiles into a set, which can be expensive if it's
672 # large.
663 # large.
673 to_lookup = []
664 to_lookup = []
674 to_drop = []
665 to_drop = []
675 for f in changedfiles:
666 for f in changedfiles:
676 if f in allfiles:
667 if f in allfiles:
677 to_lookup.append(f)
668 to_lookup.append(f)
678 else:
669 else:
679 to_drop.append(f)
670 to_drop.append(f)
680 else:
671 else:
681 changedfilesset = set(changedfiles)
672 changedfilesset = set(changedfiles)
682 to_lookup = changedfilesset & set(allfiles)
673 to_lookup = changedfilesset & set(allfiles)
683 to_drop = changedfilesset - to_lookup
674 to_drop = changedfilesset - to_lookup
684
675
685 if self._origpl is None:
676 if self._origpl is None:
686 self._origpl = self._pl
677 self._origpl = self._pl
687 self._map.setparents(parent, self._nodeconstants.nullid)
678 self._map.setparents(parent, self._nodeconstants.nullid)
688
679
689 for f in to_lookup:
680 for f in to_lookup:
690 self.normallookup(f)
681 self.normallookup(f)
691 for f in to_drop:
682 for f in to_drop:
692 self.drop(f)
683 self.drop(f)
693
684
694 self._dirty = True
685 self._dirty = True
695
686
696 def identity(self):
687 def identity(self):
697 """Return identity of dirstate itself to detect changing in storage
688 """Return identity of dirstate itself to detect changing in storage
698
689
699 If identity of previous dirstate is equal to this, writing
690 If identity of previous dirstate is equal to this, writing
700 changes based on the former dirstate out can keep consistency.
691 changes based on the former dirstate out can keep consistency.
701 """
692 """
702 return self._map.identity
693 return self._map.identity
703
694
704 def write(self, tr):
695 def write(self, tr):
705 if not self._dirty:
696 if not self._dirty:
706 return
697 return
707
698
708 filename = self._filename
699 filename = self._filename
709 if tr:
700 if tr:
710 # 'dirstate.write()' is not only for writing in-memory
701 # 'dirstate.write()' is not only for writing in-memory
711 # changes out, but also for dropping ambiguous timestamp.
702 # changes out, but also for dropping ambiguous timestamp.
712 # delayed writing re-raise "ambiguous timestamp issue".
703 # delayed writing re-raise "ambiguous timestamp issue".
713 # See also the wiki page below for detail:
704 # See also the wiki page below for detail:
714 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
705 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
715
706
716 # emulate dropping timestamp in 'parsers.pack_dirstate'
707 # emulate dropping timestamp in 'parsers.pack_dirstate'
717 now = _getfsnow(self._opener)
708 now = _getfsnow(self._opener)
718 self._map.clearambiguoustimes(self._updatedfiles, now)
709 self._map.clearambiguoustimes(self._updatedfiles, now)
719
710
720 # emulate that all 'dirstate.normal' results are written out
711 # emulate that all 'dirstate.normal' results are written out
721 self._lastnormaltime = 0
712 self._lastnormaltime = 0
722 self._updatedfiles.clear()
713 self._updatedfiles.clear()
723
714
724 # delay writing in-memory changes out
715 # delay writing in-memory changes out
725 tr.addfilegenerator(
716 tr.addfilegenerator(
726 b'dirstate',
717 b'dirstate',
727 (self._filename,),
718 (self._filename,),
728 self._writedirstate,
719 self._writedirstate,
729 location=b'plain',
720 location=b'plain',
730 )
721 )
731 return
722 return
732
723
733 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
724 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
734 self._writedirstate(st)
725 self._writedirstate(st)
735
726
736 def addparentchangecallback(self, category, callback):
727 def addparentchangecallback(self, category, callback):
737 """add a callback to be called when the wd parents are changed
728 """add a callback to be called when the wd parents are changed
738
729
739 Callback will be called with the following arguments:
730 Callback will be called with the following arguments:
740 dirstate, (oldp1, oldp2), (newp1, newp2)
731 dirstate, (oldp1, oldp2), (newp1, newp2)
741
732
742 Category is a unique identifier to allow overwriting an old callback
733 Category is a unique identifier to allow overwriting an old callback
743 with a newer callback.
734 with a newer callback.
744 """
735 """
745 self._plchangecallbacks[category] = callback
736 self._plchangecallbacks[category] = callback
746
737
747 def _writedirstate(self, st):
738 def _writedirstate(self, st):
748 # notify callbacks about parents change
739 # notify callbacks about parents change
749 if self._origpl is not None and self._origpl != self._pl:
740 if self._origpl is not None and self._origpl != self._pl:
750 for c, callback in sorted(
741 for c, callback in sorted(
751 pycompat.iteritems(self._plchangecallbacks)
742 pycompat.iteritems(self._plchangecallbacks)
752 ):
743 ):
753 callback(self, self._origpl, self._pl)
744 callback(self, self._origpl, self._pl)
754 self._origpl = None
745 self._origpl = None
755 # use the modification time of the newly created temporary file as the
746 # use the modification time of the newly created temporary file as the
756 # filesystem's notion of 'now'
747 # filesystem's notion of 'now'
757 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
748 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
758
749
759 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
750 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
760 # timestamp of each entries in dirstate, because of 'now > mtime'
751 # timestamp of each entries in dirstate, because of 'now > mtime'
761 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
752 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
762 if delaywrite > 0:
753 if delaywrite > 0:
763 # do we have any files to delay for?
754 # do we have any files to delay for?
764 for f, e in pycompat.iteritems(self._map):
755 for f, e in pycompat.iteritems(self._map):
765 if e.state == b'n' and e[3] == now:
756 if e.state == b'n' and e[3] == now:
766 import time # to avoid useless import
757 import time # to avoid useless import
767
758
768 # rather than sleep n seconds, sleep until the next
759 # rather than sleep n seconds, sleep until the next
769 # multiple of n seconds
760 # multiple of n seconds
770 clock = time.time()
761 clock = time.time()
771 start = int(clock) - (int(clock) % delaywrite)
762 start = int(clock) - (int(clock) % delaywrite)
772 end = start + delaywrite
763 end = start + delaywrite
773 time.sleep(end - clock)
764 time.sleep(end - clock)
774 now = end # trust our estimate that the end is near now
765 now = end # trust our estimate that the end is near now
775 break
766 break
776
767
777 self._map.write(st, now)
768 self._map.write(st, now)
778 self._lastnormaltime = 0
769 self._lastnormaltime = 0
779 self._dirty = False
770 self._dirty = False
780
771
781 def _dirignore(self, f):
772 def _dirignore(self, f):
782 if self._ignore(f):
773 if self._ignore(f):
783 return True
774 return True
784 for p in pathutil.finddirs(f):
775 for p in pathutil.finddirs(f):
785 if self._ignore(p):
776 if self._ignore(p):
786 return True
777 return True
787 return False
778 return False
788
779
789 def _ignorefiles(self):
780 def _ignorefiles(self):
790 files = []
781 files = []
791 if os.path.exists(self._join(b'.hgignore')):
782 if os.path.exists(self._join(b'.hgignore')):
792 files.append(self._join(b'.hgignore'))
783 files.append(self._join(b'.hgignore'))
793 for name, path in self._ui.configitems(b"ui"):
784 for name, path in self._ui.configitems(b"ui"):
794 if name == b'ignore' or name.startswith(b'ignore.'):
785 if name == b'ignore' or name.startswith(b'ignore.'):
795 # we need to use os.path.join here rather than self._join
786 # we need to use os.path.join here rather than self._join
796 # because path is arbitrary and user-specified
787 # because path is arbitrary and user-specified
797 files.append(os.path.join(self._rootdir, util.expandpath(path)))
788 files.append(os.path.join(self._rootdir, util.expandpath(path)))
798 return files
789 return files
799
790
800 def _ignorefileandline(self, f):
791 def _ignorefileandline(self, f):
801 files = collections.deque(self._ignorefiles())
792 files = collections.deque(self._ignorefiles())
802 visited = set()
793 visited = set()
803 while files:
794 while files:
804 i = files.popleft()
795 i = files.popleft()
805 patterns = matchmod.readpatternfile(
796 patterns = matchmod.readpatternfile(
806 i, self._ui.warn, sourceinfo=True
797 i, self._ui.warn, sourceinfo=True
807 )
798 )
808 for pattern, lineno, line in patterns:
799 for pattern, lineno, line in patterns:
809 kind, p = matchmod._patsplit(pattern, b'glob')
800 kind, p = matchmod._patsplit(pattern, b'glob')
810 if kind == b"subinclude":
801 if kind == b"subinclude":
811 if p not in visited:
802 if p not in visited:
812 files.append(p)
803 files.append(p)
813 continue
804 continue
814 m = matchmod.match(
805 m = matchmod.match(
815 self._root, b'', [], [pattern], warn=self._ui.warn
806 self._root, b'', [], [pattern], warn=self._ui.warn
816 )
807 )
817 if m(f):
808 if m(f):
818 return (i, lineno, line)
809 return (i, lineno, line)
819 visited.add(i)
810 visited.add(i)
820 return (None, -1, b"")
811 return (None, -1, b"")
821
812
822 def _walkexplicit(self, match, subrepos):
813 def _walkexplicit(self, match, subrepos):
823 """Get stat data about the files explicitly specified by match.
814 """Get stat data about the files explicitly specified by match.
824
815
825 Return a triple (results, dirsfound, dirsnotfound).
816 Return a triple (results, dirsfound, dirsnotfound).
826 - results is a mapping from filename to stat result. It also contains
817 - results is a mapping from filename to stat result. It also contains
827 listings mapping subrepos and .hg to None.
818 listings mapping subrepos and .hg to None.
828 - dirsfound is a list of files found to be directories.
819 - dirsfound is a list of files found to be directories.
829 - dirsnotfound is a list of files that the dirstate thinks are
820 - dirsnotfound is a list of files that the dirstate thinks are
830 directories and that were not found."""
821 directories and that were not found."""
831
822
832 def badtype(mode):
823 def badtype(mode):
833 kind = _(b'unknown')
824 kind = _(b'unknown')
834 if stat.S_ISCHR(mode):
825 if stat.S_ISCHR(mode):
835 kind = _(b'character device')
826 kind = _(b'character device')
836 elif stat.S_ISBLK(mode):
827 elif stat.S_ISBLK(mode):
837 kind = _(b'block device')
828 kind = _(b'block device')
838 elif stat.S_ISFIFO(mode):
829 elif stat.S_ISFIFO(mode):
839 kind = _(b'fifo')
830 kind = _(b'fifo')
840 elif stat.S_ISSOCK(mode):
831 elif stat.S_ISSOCK(mode):
841 kind = _(b'socket')
832 kind = _(b'socket')
842 elif stat.S_ISDIR(mode):
833 elif stat.S_ISDIR(mode):
843 kind = _(b'directory')
834 kind = _(b'directory')
844 return _(b'unsupported file type (type is %s)') % kind
835 return _(b'unsupported file type (type is %s)') % kind
845
836
846 badfn = match.bad
837 badfn = match.bad
847 dmap = self._map
838 dmap = self._map
848 lstat = os.lstat
839 lstat = os.lstat
849 getkind = stat.S_IFMT
840 getkind = stat.S_IFMT
850 dirkind = stat.S_IFDIR
841 dirkind = stat.S_IFDIR
851 regkind = stat.S_IFREG
842 regkind = stat.S_IFREG
852 lnkkind = stat.S_IFLNK
843 lnkkind = stat.S_IFLNK
853 join = self._join
844 join = self._join
854 dirsfound = []
845 dirsfound = []
855 foundadd = dirsfound.append
846 foundadd = dirsfound.append
856 dirsnotfound = []
847 dirsnotfound = []
857 notfoundadd = dirsnotfound.append
848 notfoundadd = dirsnotfound.append
858
849
859 if not match.isexact() and self._checkcase:
850 if not match.isexact() and self._checkcase:
860 normalize = self._normalize
851 normalize = self._normalize
861 else:
852 else:
862 normalize = None
853 normalize = None
863
854
864 files = sorted(match.files())
855 files = sorted(match.files())
865 subrepos.sort()
856 subrepos.sort()
866 i, j = 0, 0
857 i, j = 0, 0
867 while i < len(files) and j < len(subrepos):
858 while i < len(files) and j < len(subrepos):
868 subpath = subrepos[j] + b"/"
859 subpath = subrepos[j] + b"/"
869 if files[i] < subpath:
860 if files[i] < subpath:
870 i += 1
861 i += 1
871 continue
862 continue
872 while i < len(files) and files[i].startswith(subpath):
863 while i < len(files) and files[i].startswith(subpath):
873 del files[i]
864 del files[i]
874 j += 1
865 j += 1
875
866
876 if not files or b'' in files:
867 if not files or b'' in files:
877 files = [b'']
868 files = [b'']
878 # constructing the foldmap is expensive, so don't do it for the
869 # constructing the foldmap is expensive, so don't do it for the
879 # common case where files is ['']
870 # common case where files is ['']
880 normalize = None
871 normalize = None
881 results = dict.fromkeys(subrepos)
872 results = dict.fromkeys(subrepos)
882 results[b'.hg'] = None
873 results[b'.hg'] = None
883
874
884 for ff in files:
875 for ff in files:
885 if normalize:
876 if normalize:
886 nf = normalize(ff, False, True)
877 nf = normalize(ff, False, True)
887 else:
878 else:
888 nf = ff
879 nf = ff
889 if nf in results:
880 if nf in results:
890 continue
881 continue
891
882
892 try:
883 try:
893 st = lstat(join(nf))
884 st = lstat(join(nf))
894 kind = getkind(st.st_mode)
885 kind = getkind(st.st_mode)
895 if kind == dirkind:
886 if kind == dirkind:
896 if nf in dmap:
887 if nf in dmap:
897 # file replaced by dir on disk but still in dirstate
888 # file replaced by dir on disk but still in dirstate
898 results[nf] = None
889 results[nf] = None
899 foundadd((nf, ff))
890 foundadd((nf, ff))
900 elif kind == regkind or kind == lnkkind:
891 elif kind == regkind or kind == lnkkind:
901 results[nf] = st
892 results[nf] = st
902 else:
893 else:
903 badfn(ff, badtype(kind))
894 badfn(ff, badtype(kind))
904 if nf in dmap:
895 if nf in dmap:
905 results[nf] = None
896 results[nf] = None
906 except OSError as inst: # nf not found on disk - it is dirstate only
897 except OSError as inst: # nf not found on disk - it is dirstate only
907 if nf in dmap: # does it exactly match a missing file?
898 if nf in dmap: # does it exactly match a missing file?
908 results[nf] = None
899 results[nf] = None
909 else: # does it match a missing directory?
900 else: # does it match a missing directory?
910 if self._map.hasdir(nf):
901 if self._map.hasdir(nf):
911 notfoundadd(nf)
902 notfoundadd(nf)
912 else:
903 else:
913 badfn(ff, encoding.strtolocal(inst.strerror))
904 badfn(ff, encoding.strtolocal(inst.strerror))
914
905
915 # match.files() may contain explicitly-specified paths that shouldn't
906 # match.files() may contain explicitly-specified paths that shouldn't
916 # be taken; drop them from the list of files found. dirsfound/notfound
907 # be taken; drop them from the list of files found. dirsfound/notfound
917 # aren't filtered here because they will be tested later.
908 # aren't filtered here because they will be tested later.
918 if match.anypats():
909 if match.anypats():
919 for f in list(results):
910 for f in list(results):
920 if f == b'.hg' or f in subrepos:
911 if f == b'.hg' or f in subrepos:
921 # keep sentinel to disable further out-of-repo walks
912 # keep sentinel to disable further out-of-repo walks
922 continue
913 continue
923 if not match(f):
914 if not match(f):
924 del results[f]
915 del results[f]
925
916
926 # Case insensitive filesystems cannot rely on lstat() failing to detect
917 # Case insensitive filesystems cannot rely on lstat() failing to detect
927 # a case-only rename. Prune the stat object for any file that does not
918 # a case-only rename. Prune the stat object for any file that does not
928 # match the case in the filesystem, if there are multiple files that
919 # match the case in the filesystem, if there are multiple files that
929 # normalize to the same path.
920 # normalize to the same path.
930 if match.isexact() and self._checkcase:
921 if match.isexact() and self._checkcase:
931 normed = {}
922 normed = {}
932
923
933 for f, st in pycompat.iteritems(results):
924 for f, st in pycompat.iteritems(results):
934 if st is None:
925 if st is None:
935 continue
926 continue
936
927
937 nc = util.normcase(f)
928 nc = util.normcase(f)
938 paths = normed.get(nc)
929 paths = normed.get(nc)
939
930
940 if paths is None:
931 if paths is None:
941 paths = set()
932 paths = set()
942 normed[nc] = paths
933 normed[nc] = paths
943
934
944 paths.add(f)
935 paths.add(f)
945
936
946 for norm, paths in pycompat.iteritems(normed):
937 for norm, paths in pycompat.iteritems(normed):
947 if len(paths) > 1:
938 if len(paths) > 1:
948 for path in paths:
939 for path in paths:
949 folded = self._discoverpath(
940 folded = self._discoverpath(
950 path, norm, True, None, self._map.dirfoldmap
941 path, norm, True, None, self._map.dirfoldmap
951 )
942 )
952 if path != folded:
943 if path != folded:
953 results[path] = None
944 results[path] = None
954
945
955 return results, dirsfound, dirsnotfound
946 return results, dirsfound, dirsnotfound
956
947
957 def walk(self, match, subrepos, unknown, ignored, full=True):
948 def walk(self, match, subrepos, unknown, ignored, full=True):
958 """
949 """
959 Walk recursively through the directory tree, finding all files
950 Walk recursively through the directory tree, finding all files
960 matched by match.
951 matched by match.
961
952
962 If full is False, maybe skip some known-clean files.
953 If full is False, maybe skip some known-clean files.
963
954
964 Return a dict mapping filename to stat-like object (either
955 Return a dict mapping filename to stat-like object (either
965 mercurial.osutil.stat instance or return value of os.stat()).
956 mercurial.osutil.stat instance or return value of os.stat()).
966
957
967 """
958 """
968 # full is a flag that extensions that hook into walk can use -- this
959 # full is a flag that extensions that hook into walk can use -- this
969 # implementation doesn't use it at all. This satisfies the contract
960 # implementation doesn't use it at all. This satisfies the contract
970 # because we only guarantee a "maybe".
961 # because we only guarantee a "maybe".
971
962
972 if ignored:
963 if ignored:
973 ignore = util.never
964 ignore = util.never
974 dirignore = util.never
965 dirignore = util.never
975 elif unknown:
966 elif unknown:
976 ignore = self._ignore
967 ignore = self._ignore
977 dirignore = self._dirignore
968 dirignore = self._dirignore
978 else:
969 else:
979 # if not unknown and not ignored, drop dir recursion and step 2
970 # if not unknown and not ignored, drop dir recursion and step 2
980 ignore = util.always
971 ignore = util.always
981 dirignore = util.always
972 dirignore = util.always
982
973
983 matchfn = match.matchfn
974 matchfn = match.matchfn
984 matchalways = match.always()
975 matchalways = match.always()
985 matchtdir = match.traversedir
976 matchtdir = match.traversedir
986 dmap = self._map
977 dmap = self._map
987 listdir = util.listdir
978 listdir = util.listdir
988 lstat = os.lstat
979 lstat = os.lstat
989 dirkind = stat.S_IFDIR
980 dirkind = stat.S_IFDIR
990 regkind = stat.S_IFREG
981 regkind = stat.S_IFREG
991 lnkkind = stat.S_IFLNK
982 lnkkind = stat.S_IFLNK
992 join = self._join
983 join = self._join
993
984
994 exact = skipstep3 = False
985 exact = skipstep3 = False
995 if match.isexact(): # match.exact
986 if match.isexact(): # match.exact
996 exact = True
987 exact = True
997 dirignore = util.always # skip step 2
988 dirignore = util.always # skip step 2
998 elif match.prefix(): # match.match, no patterns
989 elif match.prefix(): # match.match, no patterns
999 skipstep3 = True
990 skipstep3 = True
1000
991
1001 if not exact and self._checkcase:
992 if not exact and self._checkcase:
1002 normalize = self._normalize
993 normalize = self._normalize
1003 normalizefile = self._normalizefile
994 normalizefile = self._normalizefile
1004 skipstep3 = False
995 skipstep3 = False
1005 else:
996 else:
1006 normalize = self._normalize
997 normalize = self._normalize
1007 normalizefile = None
998 normalizefile = None
1008
999
1009 # step 1: find all explicit files
1000 # step 1: find all explicit files
1010 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1001 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1011 if matchtdir:
1002 if matchtdir:
1012 for d in work:
1003 for d in work:
1013 matchtdir(d[0])
1004 matchtdir(d[0])
1014 for d in dirsnotfound:
1005 for d in dirsnotfound:
1015 matchtdir(d)
1006 matchtdir(d)
1016
1007
1017 skipstep3 = skipstep3 and not (work or dirsnotfound)
1008 skipstep3 = skipstep3 and not (work or dirsnotfound)
1018 work = [d for d in work if not dirignore(d[0])]
1009 work = [d for d in work if not dirignore(d[0])]
1019
1010
1020 # step 2: visit subdirectories
1011 # step 2: visit subdirectories
1021 def traverse(work, alreadynormed):
1012 def traverse(work, alreadynormed):
1022 wadd = work.append
1013 wadd = work.append
1023 while work:
1014 while work:
1024 tracing.counter('dirstate.walk work', len(work))
1015 tracing.counter('dirstate.walk work', len(work))
1025 nd = work.pop()
1016 nd = work.pop()
1026 visitentries = match.visitchildrenset(nd)
1017 visitentries = match.visitchildrenset(nd)
1027 if not visitentries:
1018 if not visitentries:
1028 continue
1019 continue
1029 if visitentries == b'this' or visitentries == b'all':
1020 if visitentries == b'this' or visitentries == b'all':
1030 visitentries = None
1021 visitentries = None
1031 skip = None
1022 skip = None
1032 if nd != b'':
1023 if nd != b'':
1033 skip = b'.hg'
1024 skip = b'.hg'
1034 try:
1025 try:
1035 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1026 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1036 entries = listdir(join(nd), stat=True, skip=skip)
1027 entries = listdir(join(nd), stat=True, skip=skip)
1037 except OSError as inst:
1028 except OSError as inst:
1038 if inst.errno in (errno.EACCES, errno.ENOENT):
1029 if inst.errno in (errno.EACCES, errno.ENOENT):
1039 match.bad(
1030 match.bad(
1040 self.pathto(nd), encoding.strtolocal(inst.strerror)
1031 self.pathto(nd), encoding.strtolocal(inst.strerror)
1041 )
1032 )
1042 continue
1033 continue
1043 raise
1034 raise
1044 for f, kind, st in entries:
1035 for f, kind, st in entries:
1045 # Some matchers may return files in the visitentries set,
1036 # Some matchers may return files in the visitentries set,
1046 # instead of 'this', if the matcher explicitly mentions them
1037 # instead of 'this', if the matcher explicitly mentions them
1047 # and is not an exactmatcher. This is acceptable; we do not
1038 # and is not an exactmatcher. This is acceptable; we do not
1048 # make any hard assumptions about file-or-directory below
1039 # make any hard assumptions about file-or-directory below
1049 # based on the presence of `f` in visitentries. If
1040 # based on the presence of `f` in visitentries. If
1050 # visitchildrenset returned a set, we can always skip the
1041 # visitchildrenset returned a set, we can always skip the
1051 # entries *not* in the set it provided regardless of whether
1042 # entries *not* in the set it provided regardless of whether
1052 # they're actually a file or a directory.
1043 # they're actually a file or a directory.
1053 if visitentries and f not in visitentries:
1044 if visitentries and f not in visitentries:
1054 continue
1045 continue
1055 if normalizefile:
1046 if normalizefile:
1056 # even though f might be a directory, we're only
1047 # even though f might be a directory, we're only
1057 # interested in comparing it to files currently in the
1048 # interested in comparing it to files currently in the
1058 # dmap -- therefore normalizefile is enough
1049 # dmap -- therefore normalizefile is enough
1059 nf = normalizefile(
1050 nf = normalizefile(
1060 nd and (nd + b"/" + f) or f, True, True
1051 nd and (nd + b"/" + f) or f, True, True
1061 )
1052 )
1062 else:
1053 else:
1063 nf = nd and (nd + b"/" + f) or f
1054 nf = nd and (nd + b"/" + f) or f
1064 if nf not in results:
1055 if nf not in results:
1065 if kind == dirkind:
1056 if kind == dirkind:
1066 if not ignore(nf):
1057 if not ignore(nf):
1067 if matchtdir:
1058 if matchtdir:
1068 matchtdir(nf)
1059 matchtdir(nf)
1069 wadd(nf)
1060 wadd(nf)
1070 if nf in dmap and (matchalways or matchfn(nf)):
1061 if nf in dmap and (matchalways or matchfn(nf)):
1071 results[nf] = None
1062 results[nf] = None
1072 elif kind == regkind or kind == lnkkind:
1063 elif kind == regkind or kind == lnkkind:
1073 if nf in dmap:
1064 if nf in dmap:
1074 if matchalways or matchfn(nf):
1065 if matchalways or matchfn(nf):
1075 results[nf] = st
1066 results[nf] = st
1076 elif (matchalways or matchfn(nf)) and not ignore(
1067 elif (matchalways or matchfn(nf)) and not ignore(
1077 nf
1068 nf
1078 ):
1069 ):
1079 # unknown file -- normalize if necessary
1070 # unknown file -- normalize if necessary
1080 if not alreadynormed:
1071 if not alreadynormed:
1081 nf = normalize(nf, False, True)
1072 nf = normalize(nf, False, True)
1082 results[nf] = st
1073 results[nf] = st
1083 elif nf in dmap and (matchalways or matchfn(nf)):
1074 elif nf in dmap and (matchalways or matchfn(nf)):
1084 results[nf] = None
1075 results[nf] = None
1085
1076
1086 for nd, d in work:
1077 for nd, d in work:
1087 # alreadynormed means that processwork doesn't have to do any
1078 # alreadynormed means that processwork doesn't have to do any
1088 # expensive directory normalization
1079 # expensive directory normalization
1089 alreadynormed = not normalize or nd == d
1080 alreadynormed = not normalize or nd == d
1090 traverse([d], alreadynormed)
1081 traverse([d], alreadynormed)
1091
1082
1092 for s in subrepos:
1083 for s in subrepos:
1093 del results[s]
1084 del results[s]
1094 del results[b'.hg']
1085 del results[b'.hg']
1095
1086
1096 # step 3: visit remaining files from dmap
1087 # step 3: visit remaining files from dmap
1097 if not skipstep3 and not exact:
1088 if not skipstep3 and not exact:
1098 # If a dmap file is not in results yet, it was either
1089 # If a dmap file is not in results yet, it was either
1099 # a) not matching matchfn b) ignored, c) missing, or d) under a
1090 # a) not matching matchfn b) ignored, c) missing, or d) under a
1100 # symlink directory.
1091 # symlink directory.
1101 if not results and matchalways:
1092 if not results and matchalways:
1102 visit = [f for f in dmap]
1093 visit = [f for f in dmap]
1103 else:
1094 else:
1104 visit = [f for f in dmap if f not in results and matchfn(f)]
1095 visit = [f for f in dmap if f not in results and matchfn(f)]
1105 visit.sort()
1096 visit.sort()
1106
1097
1107 if unknown:
1098 if unknown:
1108 # unknown == True means we walked all dirs under the roots
1099 # unknown == True means we walked all dirs under the roots
1109 # that wasn't ignored, and everything that matched was stat'ed
1100 # that wasn't ignored, and everything that matched was stat'ed
1110 # and is already in results.
1101 # and is already in results.
1111 # The rest must thus be ignored or under a symlink.
1102 # The rest must thus be ignored or under a symlink.
1112 audit_path = pathutil.pathauditor(self._root, cached=True)
1103 audit_path = pathutil.pathauditor(self._root, cached=True)
1113
1104
1114 for nf in iter(visit):
1105 for nf in iter(visit):
1115 # If a stat for the same file was already added with a
1106 # If a stat for the same file was already added with a
1116 # different case, don't add one for this, since that would
1107 # different case, don't add one for this, since that would
1117 # make it appear as if the file exists under both names
1108 # make it appear as if the file exists under both names
1118 # on disk.
1109 # on disk.
1119 if (
1110 if (
1120 normalizefile
1111 normalizefile
1121 and normalizefile(nf, True, True) in results
1112 and normalizefile(nf, True, True) in results
1122 ):
1113 ):
1123 results[nf] = None
1114 results[nf] = None
1124 # Report ignored items in the dmap as long as they are not
1115 # Report ignored items in the dmap as long as they are not
1125 # under a symlink directory.
1116 # under a symlink directory.
1126 elif audit_path.check(nf):
1117 elif audit_path.check(nf):
1127 try:
1118 try:
1128 results[nf] = lstat(join(nf))
1119 results[nf] = lstat(join(nf))
1129 # file was just ignored, no links, and exists
1120 # file was just ignored, no links, and exists
1130 except OSError:
1121 except OSError:
1131 # file doesn't exist
1122 # file doesn't exist
1132 results[nf] = None
1123 results[nf] = None
1133 else:
1124 else:
1134 # It's either missing or under a symlink directory
1125 # It's either missing or under a symlink directory
1135 # which we in this case report as missing
1126 # which we in this case report as missing
1136 results[nf] = None
1127 results[nf] = None
1137 else:
1128 else:
1138 # We may not have walked the full directory tree above,
1129 # We may not have walked the full directory tree above,
1139 # so stat and check everything we missed.
1130 # so stat and check everything we missed.
1140 iv = iter(visit)
1131 iv = iter(visit)
1141 for st in util.statfiles([join(i) for i in visit]):
1132 for st in util.statfiles([join(i) for i in visit]):
1142 results[next(iv)] = st
1133 results[next(iv)] = st
1143 return results
1134 return results
1144
1135
1145 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1136 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1146 # Force Rayon (Rust parallelism library) to respect the number of
1137 # Force Rayon (Rust parallelism library) to respect the number of
1147 # workers. This is a temporary workaround until Rust code knows
1138 # workers. This is a temporary workaround until Rust code knows
1148 # how to read the config file.
1139 # how to read the config file.
1149 numcpus = self._ui.configint(b"worker", b"numcpus")
1140 numcpus = self._ui.configint(b"worker", b"numcpus")
1150 if numcpus is not None:
1141 if numcpus is not None:
1151 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1142 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1152
1143
1153 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1144 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1154 if not workers_enabled:
1145 if not workers_enabled:
1155 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1146 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1156
1147
1157 (
1148 (
1158 lookup,
1149 lookup,
1159 modified,
1150 modified,
1160 added,
1151 added,
1161 removed,
1152 removed,
1162 deleted,
1153 deleted,
1163 clean,
1154 clean,
1164 ignored,
1155 ignored,
1165 unknown,
1156 unknown,
1166 warnings,
1157 warnings,
1167 bad,
1158 bad,
1168 traversed,
1159 traversed,
1169 dirty,
1160 dirty,
1170 ) = rustmod.status(
1161 ) = rustmod.status(
1171 self._map._rustmap,
1162 self._map._rustmap,
1172 matcher,
1163 matcher,
1173 self._rootdir,
1164 self._rootdir,
1174 self._ignorefiles(),
1165 self._ignorefiles(),
1175 self._checkexec,
1166 self._checkexec,
1176 self._lastnormaltime,
1167 self._lastnormaltime,
1177 bool(list_clean),
1168 bool(list_clean),
1178 bool(list_ignored),
1169 bool(list_ignored),
1179 bool(list_unknown),
1170 bool(list_unknown),
1180 bool(matcher.traversedir),
1171 bool(matcher.traversedir),
1181 )
1172 )
1182
1173
1183 self._dirty |= dirty
1174 self._dirty |= dirty
1184
1175
1185 if matcher.traversedir:
1176 if matcher.traversedir:
1186 for dir in traversed:
1177 for dir in traversed:
1187 matcher.traversedir(dir)
1178 matcher.traversedir(dir)
1188
1179
1189 if self._ui.warn:
1180 if self._ui.warn:
1190 for item in warnings:
1181 for item in warnings:
1191 if isinstance(item, tuple):
1182 if isinstance(item, tuple):
1192 file_path, syntax = item
1183 file_path, syntax = item
1193 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1184 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1194 file_path,
1185 file_path,
1195 syntax,
1186 syntax,
1196 )
1187 )
1197 self._ui.warn(msg)
1188 self._ui.warn(msg)
1198 else:
1189 else:
1199 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1190 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1200 self._ui.warn(
1191 self._ui.warn(
1201 msg
1192 msg
1202 % (
1193 % (
1203 pathutil.canonpath(
1194 pathutil.canonpath(
1204 self._rootdir, self._rootdir, item
1195 self._rootdir, self._rootdir, item
1205 ),
1196 ),
1206 b"No such file or directory",
1197 b"No such file or directory",
1207 )
1198 )
1208 )
1199 )
1209
1200
1210 for (fn, message) in bad:
1201 for (fn, message) in bad:
1211 matcher.bad(fn, encoding.strtolocal(message))
1202 matcher.bad(fn, encoding.strtolocal(message))
1212
1203
1213 status = scmutil.status(
1204 status = scmutil.status(
1214 modified=modified,
1205 modified=modified,
1215 added=added,
1206 added=added,
1216 removed=removed,
1207 removed=removed,
1217 deleted=deleted,
1208 deleted=deleted,
1218 unknown=unknown,
1209 unknown=unknown,
1219 ignored=ignored,
1210 ignored=ignored,
1220 clean=clean,
1211 clean=clean,
1221 )
1212 )
1222 return (lookup, status)
1213 return (lookup, status)
1223
1214
1224 def status(self, match, subrepos, ignored, clean, unknown):
1215 def status(self, match, subrepos, ignored, clean, unknown):
1225 """Determine the status of the working copy relative to the
1216 """Determine the status of the working copy relative to the
1226 dirstate and return a pair of (unsure, status), where status is of type
1217 dirstate and return a pair of (unsure, status), where status is of type
1227 scmutil.status and:
1218 scmutil.status and:
1228
1219
1229 unsure:
1220 unsure:
1230 files that might have been modified since the dirstate was
1221 files that might have been modified since the dirstate was
1231 written, but need to be read to be sure (size is the same
1222 written, but need to be read to be sure (size is the same
1232 but mtime differs)
1223 but mtime differs)
1233 status.modified:
1224 status.modified:
1234 files that have definitely been modified since the dirstate
1225 files that have definitely been modified since the dirstate
1235 was written (different size or mode)
1226 was written (different size or mode)
1236 status.clean:
1227 status.clean:
1237 files that have definitely not been modified since the
1228 files that have definitely not been modified since the
1238 dirstate was written
1229 dirstate was written
1239 """
1230 """
1240 listignored, listclean, listunknown = ignored, clean, unknown
1231 listignored, listclean, listunknown = ignored, clean, unknown
1241 lookup, modified, added, unknown, ignored = [], [], [], [], []
1232 lookup, modified, added, unknown, ignored = [], [], [], [], []
1242 removed, deleted, clean = [], [], []
1233 removed, deleted, clean = [], [], []
1243
1234
1244 dmap = self._map
1235 dmap = self._map
1245 dmap.preload()
1236 dmap.preload()
1246
1237
1247 use_rust = True
1238 use_rust = True
1248
1239
1249 allowed_matchers = (
1240 allowed_matchers = (
1250 matchmod.alwaysmatcher,
1241 matchmod.alwaysmatcher,
1251 matchmod.exactmatcher,
1242 matchmod.exactmatcher,
1252 matchmod.includematcher,
1243 matchmod.includematcher,
1253 )
1244 )
1254
1245
1255 if rustmod is None:
1246 if rustmod is None:
1256 use_rust = False
1247 use_rust = False
1257 elif self._checkcase:
1248 elif self._checkcase:
1258 # Case-insensitive filesystems are not handled yet
1249 # Case-insensitive filesystems are not handled yet
1259 use_rust = False
1250 use_rust = False
1260 elif subrepos:
1251 elif subrepos:
1261 use_rust = False
1252 use_rust = False
1262 elif sparse.enabled:
1253 elif sparse.enabled:
1263 use_rust = False
1254 use_rust = False
1264 elif not isinstance(match, allowed_matchers):
1255 elif not isinstance(match, allowed_matchers):
1265 # Some matchers have yet to be implemented
1256 # Some matchers have yet to be implemented
1266 use_rust = False
1257 use_rust = False
1267
1258
1268 if use_rust:
1259 if use_rust:
1269 try:
1260 try:
1270 return self._rust_status(
1261 return self._rust_status(
1271 match, listclean, listignored, listunknown
1262 match, listclean, listignored, listunknown
1272 )
1263 )
1273 except rustmod.FallbackError:
1264 except rustmod.FallbackError:
1274 pass
1265 pass
1275
1266
1276 def noop(f):
1267 def noop(f):
1277 pass
1268 pass
1278
1269
1279 dcontains = dmap.__contains__
1270 dcontains = dmap.__contains__
1280 dget = dmap.__getitem__
1271 dget = dmap.__getitem__
1281 ladd = lookup.append # aka "unsure"
1272 ladd = lookup.append # aka "unsure"
1282 madd = modified.append
1273 madd = modified.append
1283 aadd = added.append
1274 aadd = added.append
1284 uadd = unknown.append if listunknown else noop
1275 uadd = unknown.append if listunknown else noop
1285 iadd = ignored.append if listignored else noop
1276 iadd = ignored.append if listignored else noop
1286 radd = removed.append
1277 radd = removed.append
1287 dadd = deleted.append
1278 dadd = deleted.append
1288 cadd = clean.append if listclean else noop
1279 cadd = clean.append if listclean else noop
1289 mexact = match.exact
1280 mexact = match.exact
1290 dirignore = self._dirignore
1281 dirignore = self._dirignore
1291 checkexec = self._checkexec
1282 checkexec = self._checkexec
1292 copymap = self._map.copymap
1283 copymap = self._map.copymap
1293 lastnormaltime = self._lastnormaltime
1284 lastnormaltime = self._lastnormaltime
1294
1285
1295 # We need to do full walks when either
1286 # We need to do full walks when either
1296 # - we're listing all clean files, or
1287 # - we're listing all clean files, or
1297 # - match.traversedir does something, because match.traversedir should
1288 # - match.traversedir does something, because match.traversedir should
1298 # be called for every dir in the working dir
1289 # be called for every dir in the working dir
1299 full = listclean or match.traversedir is not None
1290 full = listclean or match.traversedir is not None
1300 for fn, st in pycompat.iteritems(
1291 for fn, st in pycompat.iteritems(
1301 self.walk(match, subrepos, listunknown, listignored, full=full)
1292 self.walk(match, subrepos, listunknown, listignored, full=full)
1302 ):
1293 ):
1303 if not dcontains(fn):
1294 if not dcontains(fn):
1304 if (listignored or mexact(fn)) and dirignore(fn):
1295 if (listignored or mexact(fn)) and dirignore(fn):
1305 if listignored:
1296 if listignored:
1306 iadd(fn)
1297 iadd(fn)
1307 else:
1298 else:
1308 uadd(fn)
1299 uadd(fn)
1309 continue
1300 continue
1310
1301
1311 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1302 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1312 # written like that for performance reasons. dmap[fn] is not a
1303 # written like that for performance reasons. dmap[fn] is not a
1313 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1304 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1314 # opcode has fast paths when the value to be unpacked is a tuple or
1305 # opcode has fast paths when the value to be unpacked is a tuple or
1315 # a list, but falls back to creating a full-fledged iterator in
1306 # a list, but falls back to creating a full-fledged iterator in
1316 # general. That is much slower than simply accessing and storing the
1307 # general. That is much slower than simply accessing and storing the
1317 # tuple members one by one.
1308 # tuple members one by one.
1318 t = dget(fn)
1309 t = dget(fn)
1319 state = t.state
1310 state = t.state
1320 mode = t[1]
1311 mode = t[1]
1321 size = t[2]
1312 size = t[2]
1322 time = t[3]
1313 time = t[3]
1323
1314
1324 if not st and state in b"nma":
1315 if not st and state in b"nma":
1325 dadd(fn)
1316 dadd(fn)
1326 elif state == b'n':
1317 elif state == b'n':
1327 if (
1318 if (
1328 size >= 0
1319 size >= 0
1329 and (
1320 and (
1330 (size != st.st_size and size != st.st_size & _rangemask)
1321 (size != st.st_size and size != st.st_size & _rangemask)
1331 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1322 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1332 )
1323 )
1333 or t.from_p2
1324 or t.from_p2
1334 or fn in copymap
1325 or fn in copymap
1335 ):
1326 ):
1336 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1327 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1337 # issue6456: Size returned may be longer due to
1328 # issue6456: Size returned may be longer due to
1338 # encryption on EXT-4 fscrypt, undecided.
1329 # encryption on EXT-4 fscrypt, undecided.
1339 ladd(fn)
1330 ladd(fn)
1340 else:
1331 else:
1341 madd(fn)
1332 madd(fn)
1342 elif (
1333 elif (
1343 time != st[stat.ST_MTIME]
1334 time != st[stat.ST_MTIME]
1344 and time != st[stat.ST_MTIME] & _rangemask
1335 and time != st[stat.ST_MTIME] & _rangemask
1345 ):
1336 ):
1346 ladd(fn)
1337 ladd(fn)
1347 elif st[stat.ST_MTIME] == lastnormaltime:
1338 elif st[stat.ST_MTIME] == lastnormaltime:
1348 # fn may have just been marked as normal and it may have
1339 # fn may have just been marked as normal and it may have
1349 # changed in the same second without changing its size.
1340 # changed in the same second without changing its size.
1350 # This can happen if we quickly do multiple commits.
1341 # This can happen if we quickly do multiple commits.
1351 # Force lookup, so we don't miss such a racy file change.
1342 # Force lookup, so we don't miss such a racy file change.
1352 ladd(fn)
1343 ladd(fn)
1353 elif listclean:
1344 elif listclean:
1354 cadd(fn)
1345 cadd(fn)
1355 elif t.merged:
1346 elif t.merged:
1356 madd(fn)
1347 madd(fn)
1357 elif state == b'a':
1348 elif state == b'a':
1358 aadd(fn)
1349 aadd(fn)
1359 elif t.removed:
1350 elif t.removed:
1360 radd(fn)
1351 radd(fn)
1361 status = scmutil.status(
1352 status = scmutil.status(
1362 modified, added, removed, deleted, unknown, ignored, clean
1353 modified, added, removed, deleted, unknown, ignored, clean
1363 )
1354 )
1364 return (lookup, status)
1355 return (lookup, status)
1365
1356
1366 def matches(self, match):
1357 def matches(self, match):
1367 """
1358 """
1368 return files in the dirstate (in whatever state) filtered by match
1359 return files in the dirstate (in whatever state) filtered by match
1369 """
1360 """
1370 dmap = self._map
1361 dmap = self._map
1371 if rustmod is not None:
1362 if rustmod is not None:
1372 dmap = self._map._rustmap
1363 dmap = self._map._rustmap
1373
1364
1374 if match.always():
1365 if match.always():
1375 return dmap.keys()
1366 return dmap.keys()
1376 files = match.files()
1367 files = match.files()
1377 if match.isexact():
1368 if match.isexact():
1378 # fast path -- filter the other way around, since typically files is
1369 # fast path -- filter the other way around, since typically files is
1379 # much smaller than dmap
1370 # much smaller than dmap
1380 return [f for f in files if f in dmap]
1371 return [f for f in files if f in dmap]
1381 if match.prefix() and all(fn in dmap for fn in files):
1372 if match.prefix() and all(fn in dmap for fn in files):
1382 # fast path -- all the values are known to be files, so just return
1373 # fast path -- all the values are known to be files, so just return
1383 # that
1374 # that
1384 return list(files)
1375 return list(files)
1385 return [f for f in dmap if match(f)]
1376 return [f for f in dmap if match(f)]
1386
1377
1387 def _actualfilename(self, tr):
1378 def _actualfilename(self, tr):
1388 if tr:
1379 if tr:
1389 return self._pendingfilename
1380 return self._pendingfilename
1390 else:
1381 else:
1391 return self._filename
1382 return self._filename
1392
1383
1393 def savebackup(self, tr, backupname):
1384 def savebackup(self, tr, backupname):
1394 '''Save current dirstate into backup file'''
1385 '''Save current dirstate into backup file'''
1395 filename = self._actualfilename(tr)
1386 filename = self._actualfilename(tr)
1396 assert backupname != filename
1387 assert backupname != filename
1397
1388
1398 # use '_writedirstate' instead of 'write' to write changes certainly,
1389 # use '_writedirstate' instead of 'write' to write changes certainly,
1399 # because the latter omits writing out if transaction is running.
1390 # because the latter omits writing out if transaction is running.
1400 # output file will be used to create backup of dirstate at this point.
1391 # output file will be used to create backup of dirstate at this point.
1401 if self._dirty or not self._opener.exists(filename):
1392 if self._dirty or not self._opener.exists(filename):
1402 self._writedirstate(
1393 self._writedirstate(
1403 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1394 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1404 )
1395 )
1405
1396
1406 if tr:
1397 if tr:
1407 # ensure that subsequent tr.writepending returns True for
1398 # ensure that subsequent tr.writepending returns True for
1408 # changes written out above, even if dirstate is never
1399 # changes written out above, even if dirstate is never
1409 # changed after this
1400 # changed after this
1410 tr.addfilegenerator(
1401 tr.addfilegenerator(
1411 b'dirstate',
1402 b'dirstate',
1412 (self._filename,),
1403 (self._filename,),
1413 self._writedirstate,
1404 self._writedirstate,
1414 location=b'plain',
1405 location=b'plain',
1415 )
1406 )
1416
1407
1417 # ensure that pending file written above is unlinked at
1408 # ensure that pending file written above is unlinked at
1418 # failure, even if tr.writepending isn't invoked until the
1409 # failure, even if tr.writepending isn't invoked until the
1419 # end of this transaction
1410 # end of this transaction
1420 tr.registertmp(filename, location=b'plain')
1411 tr.registertmp(filename, location=b'plain')
1421
1412
1422 self._opener.tryunlink(backupname)
1413 self._opener.tryunlink(backupname)
1423 # hardlink backup is okay because _writedirstate is always called
1414 # hardlink backup is okay because _writedirstate is always called
1424 # with an "atomictemp=True" file.
1415 # with an "atomictemp=True" file.
1425 util.copyfile(
1416 util.copyfile(
1426 self._opener.join(filename),
1417 self._opener.join(filename),
1427 self._opener.join(backupname),
1418 self._opener.join(backupname),
1428 hardlink=True,
1419 hardlink=True,
1429 )
1420 )
1430
1421
1431 def restorebackup(self, tr, backupname):
1422 def restorebackup(self, tr, backupname):
1432 '''Restore dirstate by backup file'''
1423 '''Restore dirstate by backup file'''
1433 # this "invalidate()" prevents "wlock.release()" from writing
1424 # this "invalidate()" prevents "wlock.release()" from writing
1434 # changes of dirstate out after restoring from backup file
1425 # changes of dirstate out after restoring from backup file
1435 self.invalidate()
1426 self.invalidate()
1436 filename = self._actualfilename(tr)
1427 filename = self._actualfilename(tr)
1437 o = self._opener
1428 o = self._opener
1438 if util.samefile(o.join(backupname), o.join(filename)):
1429 if util.samefile(o.join(backupname), o.join(filename)):
1439 o.unlink(backupname)
1430 o.unlink(backupname)
1440 else:
1431 else:
1441 o.rename(backupname, filename, checkambig=True)
1432 o.rename(backupname, filename, checkambig=True)
1442
1433
1443 def clearbackup(self, tr, backupname):
1434 def clearbackup(self, tr, backupname):
1444 '''Clear backup file'''
1435 '''Clear backup file'''
1445 self._opener.unlink(backupname)
1436 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now