##// END OF EJS Templates
dirstate: add a `in_merge` property...
marmoute -
r48299:94c58f3a default
parent child Browse files
Show More
@@ -1,1452 +1,1457 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = 0x7FFFFFFF
46 _rangemask = 0x7FFFFFFF
47
47
48 dirstatetuple = parsers.dirstatetuple
48 dirstatetuple = parsers.dirstatetuple
49
49
50
50
51 # a special value used internally for `size` if the file come from the other parent
51 # a special value used internally for `size` if the file come from the other parent
52 FROM_P2 = dirstatemap.FROM_P2
52 FROM_P2 = dirstatemap.FROM_P2
53
53
54 # a special value used internally for `size` if the file is modified/merged/added
54 # a special value used internally for `size` if the file is modified/merged/added
55 NONNORMAL = dirstatemap.NONNORMAL
55 NONNORMAL = dirstatemap.NONNORMAL
56
56
57 # a special value used internally for `time` if the time is ambigeous
57 # a special value used internally for `time` if the time is ambigeous
58 AMBIGUOUS_TIME = dirstatemap.AMBIGUOUS_TIME
58 AMBIGUOUS_TIME = dirstatemap.AMBIGUOUS_TIME
59
59
60
60
61 class repocache(filecache):
61 class repocache(filecache):
62 """filecache for files in .hg/"""
62 """filecache for files in .hg/"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._opener.join(fname)
65 return obj._opener.join(fname)
66
66
67
67
68 class rootcache(filecache):
68 class rootcache(filecache):
69 """filecache for files in the repository root"""
69 """filecache for files in the repository root"""
70
70
71 def join(self, obj, fname):
71 def join(self, obj, fname):
72 return obj._join(fname)
72 return obj._join(fname)
73
73
74
74
75 def _getfsnow(vfs):
75 def _getfsnow(vfs):
76 '''Get "now" timestamp on filesystem'''
76 '''Get "now" timestamp on filesystem'''
77 tmpfd, tmpname = vfs.mkstemp()
77 tmpfd, tmpname = vfs.mkstemp()
78 try:
78 try:
79 return os.fstat(tmpfd)[stat.ST_MTIME]
79 return os.fstat(tmpfd)[stat.ST_MTIME]
80 finally:
80 finally:
81 os.close(tmpfd)
81 os.close(tmpfd)
82 vfs.unlink(tmpname)
82 vfs.unlink(tmpname)
83
83
84
84
85 @interfaceutil.implementer(intdirstate.idirstate)
85 @interfaceutil.implementer(intdirstate.idirstate)
86 class dirstate(object):
86 class dirstate(object):
87 def __init__(
87 def __init__(
88 self,
88 self,
89 opener,
89 opener,
90 ui,
90 ui,
91 root,
91 root,
92 validate,
92 validate,
93 sparsematchfn,
93 sparsematchfn,
94 nodeconstants,
94 nodeconstants,
95 use_dirstate_v2,
95 use_dirstate_v2,
96 ):
96 ):
97 """Create a new dirstate object.
97 """Create a new dirstate object.
98
98
99 opener is an open()-like callable that can be used to open the
99 opener is an open()-like callable that can be used to open the
100 dirstate file; root is the root of the directory tracked by
100 dirstate file; root is the root of the directory tracked by
101 the dirstate.
101 the dirstate.
102 """
102 """
103 self._use_dirstate_v2 = use_dirstate_v2
103 self._use_dirstate_v2 = use_dirstate_v2
104 self._nodeconstants = nodeconstants
104 self._nodeconstants = nodeconstants
105 self._opener = opener
105 self._opener = opener
106 self._validate = validate
106 self._validate = validate
107 self._root = root
107 self._root = root
108 self._sparsematchfn = sparsematchfn
108 self._sparsematchfn = sparsematchfn
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
109 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
110 # UNC path pointing to root share (issue4557)
110 # UNC path pointing to root share (issue4557)
111 self._rootdir = pathutil.normasprefix(root)
111 self._rootdir = pathutil.normasprefix(root)
112 self._dirty = False
112 self._dirty = False
113 self._lastnormaltime = 0
113 self._lastnormaltime = 0
114 self._ui = ui
114 self._ui = ui
115 self._filecache = {}
115 self._filecache = {}
116 self._parentwriters = 0
116 self._parentwriters = 0
117 self._filename = b'dirstate'
117 self._filename = b'dirstate'
118 self._pendingfilename = b'%s.pending' % self._filename
118 self._pendingfilename = b'%s.pending' % self._filename
119 self._plchangecallbacks = {}
119 self._plchangecallbacks = {}
120 self._origpl = None
120 self._origpl = None
121 self._updatedfiles = set()
121 self._updatedfiles = set()
122 self._mapcls = dirstatemap.dirstatemap
122 self._mapcls = dirstatemap.dirstatemap
123 # Access and cache cwd early, so we don't access it for the first time
123 # Access and cache cwd early, so we don't access it for the first time
124 # after a working-copy update caused it to not exist (accessing it then
124 # after a working-copy update caused it to not exist (accessing it then
125 # raises an exception).
125 # raises an exception).
126 self._cwd
126 self._cwd
127
127
128 def prefetch_parents(self):
128 def prefetch_parents(self):
129 """make sure the parents are loaded
129 """make sure the parents are loaded
130
130
131 Used to avoid a race condition.
131 Used to avoid a race condition.
132 """
132 """
133 self._pl
133 self._pl
134
134
135 @contextlib.contextmanager
135 @contextlib.contextmanager
136 def parentchange(self):
136 def parentchange(self):
137 """Context manager for handling dirstate parents.
137 """Context manager for handling dirstate parents.
138
138
139 If an exception occurs in the scope of the context manager,
139 If an exception occurs in the scope of the context manager,
140 the incoherent dirstate won't be written when wlock is
140 the incoherent dirstate won't be written when wlock is
141 released.
141 released.
142 """
142 """
143 self._parentwriters += 1
143 self._parentwriters += 1
144 yield
144 yield
145 # Typically we want the "undo" step of a context manager in a
145 # Typically we want the "undo" step of a context manager in a
146 # finally block so it happens even when an exception
146 # finally block so it happens even when an exception
147 # occurs. In this case, however, we only want to decrement
147 # occurs. In this case, however, we only want to decrement
148 # parentwriters if the code in the with statement exits
148 # parentwriters if the code in the with statement exits
149 # normally, so we don't have a try/finally here on purpose.
149 # normally, so we don't have a try/finally here on purpose.
150 self._parentwriters -= 1
150 self._parentwriters -= 1
151
151
152 def pendingparentchange(self):
152 def pendingparentchange(self):
153 """Returns true if the dirstate is in the middle of a set of changes
153 """Returns true if the dirstate is in the middle of a set of changes
154 that modify the dirstate parent.
154 that modify the dirstate parent.
155 """
155 """
156 return self._parentwriters > 0
156 return self._parentwriters > 0
157
157
158 @propertycache
158 @propertycache
159 def _map(self):
159 def _map(self):
160 """Return the dirstate contents (see documentation for dirstatemap)."""
160 """Return the dirstate contents (see documentation for dirstatemap)."""
161 self._map = self._mapcls(
161 self._map = self._mapcls(
162 self._ui,
162 self._ui,
163 self._opener,
163 self._opener,
164 self._root,
164 self._root,
165 self._nodeconstants,
165 self._nodeconstants,
166 self._use_dirstate_v2,
166 self._use_dirstate_v2,
167 )
167 )
168 return self._map
168 return self._map
169
169
170 @property
170 @property
171 def _sparsematcher(self):
171 def _sparsematcher(self):
172 """The matcher for the sparse checkout.
172 """The matcher for the sparse checkout.
173
173
174 The working directory may not include every file from a manifest. The
174 The working directory may not include every file from a manifest. The
175 matcher obtained by this property will match a path if it is to be
175 matcher obtained by this property will match a path if it is to be
176 included in the working directory.
176 included in the working directory.
177 """
177 """
178 # TODO there is potential to cache this property. For now, the matcher
178 # TODO there is potential to cache this property. For now, the matcher
179 # is resolved on every access. (But the called function does use a
179 # is resolved on every access. (But the called function does use a
180 # cache to keep the lookup fast.)
180 # cache to keep the lookup fast.)
181 return self._sparsematchfn()
181 return self._sparsematchfn()
182
182
183 @repocache(b'branch')
183 @repocache(b'branch')
184 def _branch(self):
184 def _branch(self):
185 try:
185 try:
186 return self._opener.read(b"branch").strip() or b"default"
186 return self._opener.read(b"branch").strip() or b"default"
187 except IOError as inst:
187 except IOError as inst:
188 if inst.errno != errno.ENOENT:
188 if inst.errno != errno.ENOENT:
189 raise
189 raise
190 return b"default"
190 return b"default"
191
191
192 @property
192 @property
193 def _pl(self):
193 def _pl(self):
194 return self._map.parents()
194 return self._map.parents()
195
195
196 def hasdir(self, d):
196 def hasdir(self, d):
197 return self._map.hastrackeddir(d)
197 return self._map.hastrackeddir(d)
198
198
199 @rootcache(b'.hgignore')
199 @rootcache(b'.hgignore')
200 def _ignore(self):
200 def _ignore(self):
201 files = self._ignorefiles()
201 files = self._ignorefiles()
202 if not files:
202 if not files:
203 return matchmod.never()
203 return matchmod.never()
204
204
205 pats = [b'include:%s' % f for f in files]
205 pats = [b'include:%s' % f for f in files]
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
206 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
207
207
208 @propertycache
208 @propertycache
209 def _slash(self):
209 def _slash(self):
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
210 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
211
211
212 @propertycache
212 @propertycache
213 def _checklink(self):
213 def _checklink(self):
214 return util.checklink(self._root)
214 return util.checklink(self._root)
215
215
216 @propertycache
216 @propertycache
217 def _checkexec(self):
217 def _checkexec(self):
218 return bool(util.checkexec(self._root))
218 return bool(util.checkexec(self._root))
219
219
220 @propertycache
220 @propertycache
221 def _checkcase(self):
221 def _checkcase(self):
222 return not util.fscasesensitive(self._join(b'.hg'))
222 return not util.fscasesensitive(self._join(b'.hg'))
223
223
224 def _join(self, f):
224 def _join(self, f):
225 # much faster than os.path.join()
225 # much faster than os.path.join()
226 # it's safe because f is always a relative path
226 # it's safe because f is always a relative path
227 return self._rootdir + f
227 return self._rootdir + f
228
228
229 def flagfunc(self, buildfallback):
229 def flagfunc(self, buildfallback):
230 if self._checklink and self._checkexec:
230 if self._checklink and self._checkexec:
231
231
232 def f(x):
232 def f(x):
233 try:
233 try:
234 st = os.lstat(self._join(x))
234 st = os.lstat(self._join(x))
235 if util.statislink(st):
235 if util.statislink(st):
236 return b'l'
236 return b'l'
237 if util.statisexec(st):
237 if util.statisexec(st):
238 return b'x'
238 return b'x'
239 except OSError:
239 except OSError:
240 pass
240 pass
241 return b''
241 return b''
242
242
243 return f
243 return f
244
244
245 fallback = buildfallback()
245 fallback = buildfallback()
246 if self._checklink:
246 if self._checklink:
247
247
248 def f(x):
248 def f(x):
249 if os.path.islink(self._join(x)):
249 if os.path.islink(self._join(x)):
250 return b'l'
250 return b'l'
251 if b'x' in fallback(x):
251 if b'x' in fallback(x):
252 return b'x'
252 return b'x'
253 return b''
253 return b''
254
254
255 return f
255 return f
256 if self._checkexec:
256 if self._checkexec:
257
257
258 def f(x):
258 def f(x):
259 if b'l' in fallback(x):
259 if b'l' in fallback(x):
260 return b'l'
260 return b'l'
261 if util.isexec(self._join(x)):
261 if util.isexec(self._join(x)):
262 return b'x'
262 return b'x'
263 return b''
263 return b''
264
264
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 # internal config: ui.forcecwd
271 # internal config: ui.forcecwd
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
272 forcecwd = self._ui.config(b'ui', b'forcecwd')
273 if forcecwd:
273 if forcecwd:
274 return forcecwd
274 return forcecwd
275 return encoding.getcwd()
275 return encoding.getcwd()
276
276
277 def getcwd(self):
277 def getcwd(self):
278 """Return the path from which a canonical path is calculated.
278 """Return the path from which a canonical path is calculated.
279
279
280 This path should be used to resolve file patterns or to convert
280 This path should be used to resolve file patterns or to convert
281 canonical paths back to file paths for display. It shouldn't be
281 canonical paths back to file paths for display. It shouldn't be
282 used to get real file paths. Use vfs functions instead.
282 used to get real file paths. Use vfs functions instead.
283 """
283 """
284 cwd = self._cwd
284 cwd = self._cwd
285 if cwd == self._root:
285 if cwd == self._root:
286 return b''
286 return b''
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
287 # self._root ends with a path separator if self._root is '/' or 'C:\'
288 rootsep = self._root
288 rootsep = self._root
289 if not util.endswithsep(rootsep):
289 if not util.endswithsep(rootsep):
290 rootsep += pycompat.ossep
290 rootsep += pycompat.ossep
291 if cwd.startswith(rootsep):
291 if cwd.startswith(rootsep):
292 return cwd[len(rootsep) :]
292 return cwd[len(rootsep) :]
293 else:
293 else:
294 # we're outside the repo. return an absolute path.
294 # we're outside the repo. return an absolute path.
295 return cwd
295 return cwd
296
296
297 def pathto(self, f, cwd=None):
297 def pathto(self, f, cwd=None):
298 if cwd is None:
298 if cwd is None:
299 cwd = self.getcwd()
299 cwd = self.getcwd()
300 path = util.pathto(self._root, cwd, f)
300 path = util.pathto(self._root, cwd, f)
301 if self._slash:
301 if self._slash:
302 return util.pconvert(path)
302 return util.pconvert(path)
303 return path
303 return path
304
304
305 def __getitem__(self, key):
305 def __getitem__(self, key):
306 """Return the current state of key (a filename) in the dirstate.
306 """Return the current state of key (a filename) in the dirstate.
307
307
308 States are:
308 States are:
309 n normal
309 n normal
310 m needs merging
310 m needs merging
311 r marked for removal
311 r marked for removal
312 a marked for addition
312 a marked for addition
313 ? not tracked
313 ? not tracked
314 """
314 """
315 return self._map.get(key, (b"?",))[0]
315 return self._map.get(key, (b"?",))[0]
316
316
317 def __contains__(self, key):
317 def __contains__(self, key):
318 return key in self._map
318 return key in self._map
319
319
320 def __iter__(self):
320 def __iter__(self):
321 return iter(sorted(self._map))
321 return iter(sorted(self._map))
322
322
323 def items(self):
323 def items(self):
324 return pycompat.iteritems(self._map)
324 return pycompat.iteritems(self._map)
325
325
326 iteritems = items
326 iteritems = items
327
327
328 def directories(self):
328 def directories(self):
329 return self._map.directories()
329 return self._map.directories()
330
330
331 def parents(self):
331 def parents(self):
332 return [self._validate(p) for p in self._pl]
332 return [self._validate(p) for p in self._pl]
333
333
334 def p1(self):
334 def p1(self):
335 return self._validate(self._pl[0])
335 return self._validate(self._pl[0])
336
336
337 def p2(self):
337 def p2(self):
338 return self._validate(self._pl[1])
338 return self._validate(self._pl[1])
339
339
340 @property
341 def in_merge(self):
342 """True if a merge is in progress"""
343 return self._pl[1] != self._nodeconstants.nullid
344
340 def branch(self):
345 def branch(self):
341 return encoding.tolocal(self._branch)
346 return encoding.tolocal(self._branch)
342
347
343 def setparents(self, p1, p2=None):
348 def setparents(self, p1, p2=None):
344 """Set dirstate parents to p1 and p2.
349 """Set dirstate parents to p1 and p2.
345
350
346 When moving from two parents to one, 'm' merged entries a
351 When moving from two parents to one, 'm' merged entries a
347 adjusted to normal and previous copy records discarded and
352 adjusted to normal and previous copy records discarded and
348 returned by the call.
353 returned by the call.
349
354
350 See localrepo.setparents()
355 See localrepo.setparents()
351 """
356 """
352 if p2 is None:
357 if p2 is None:
353 p2 = self._nodeconstants.nullid
358 p2 = self._nodeconstants.nullid
354 if self._parentwriters == 0:
359 if self._parentwriters == 0:
355 raise ValueError(
360 raise ValueError(
356 b"cannot set dirstate parent outside of "
361 b"cannot set dirstate parent outside of "
357 b"dirstate.parentchange context manager"
362 b"dirstate.parentchange context manager"
358 )
363 )
359
364
360 self._dirty = True
365 self._dirty = True
361 oldp2 = self._pl[1]
366 oldp2 = self._pl[1]
362 if self._origpl is None:
367 if self._origpl is None:
363 self._origpl = self._pl
368 self._origpl = self._pl
364 self._map.setparents(p1, p2)
369 self._map.setparents(p1, p2)
365 copies = {}
370 copies = {}
366 if (
371 if (
367 oldp2 != self._nodeconstants.nullid
372 oldp2 != self._nodeconstants.nullid
368 and p2 == self._nodeconstants.nullid
373 and p2 == self._nodeconstants.nullid
369 ):
374 ):
370 candidatefiles = self._map.non_normal_or_other_parent_paths()
375 candidatefiles = self._map.non_normal_or_other_parent_paths()
371
376
372 for f in candidatefiles:
377 for f in candidatefiles:
373 s = self._map.get(f)
378 s = self._map.get(f)
374 if s is None:
379 if s is None:
375 continue
380 continue
376
381
377 # Discard 'm' markers when moving away from a merge state
382 # Discard 'm' markers when moving away from a merge state
378 if s[0] == b'm':
383 if s[0] == b'm':
379 source = self._map.copymap.get(f)
384 source = self._map.copymap.get(f)
380 if source:
385 if source:
381 copies[f] = source
386 copies[f] = source
382 self.normallookup(f)
387 self.normallookup(f)
383 # Also fix up otherparent markers
388 # Also fix up otherparent markers
384 elif s[0] == b'n' and s[2] == FROM_P2:
389 elif s[0] == b'n' and s[2] == FROM_P2:
385 source = self._map.copymap.get(f)
390 source = self._map.copymap.get(f)
386 if source:
391 if source:
387 copies[f] = source
392 copies[f] = source
388 self.add(f)
393 self.add(f)
389 return copies
394 return copies
390
395
391 def setbranch(self, branch):
396 def setbranch(self, branch):
392 self.__class__._branch.set(self, encoding.fromlocal(branch))
397 self.__class__._branch.set(self, encoding.fromlocal(branch))
393 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
398 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
394 try:
399 try:
395 f.write(self._branch + b'\n')
400 f.write(self._branch + b'\n')
396 f.close()
401 f.close()
397
402
398 # make sure filecache has the correct stat info for _branch after
403 # make sure filecache has the correct stat info for _branch after
399 # replacing the underlying file
404 # replacing the underlying file
400 ce = self._filecache[b'_branch']
405 ce = self._filecache[b'_branch']
401 if ce:
406 if ce:
402 ce.refresh()
407 ce.refresh()
403 except: # re-raises
408 except: # re-raises
404 f.discard()
409 f.discard()
405 raise
410 raise
406
411
407 def invalidate(self):
412 def invalidate(self):
408 """Causes the next access to reread the dirstate.
413 """Causes the next access to reread the dirstate.
409
414
410 This is different from localrepo.invalidatedirstate() because it always
415 This is different from localrepo.invalidatedirstate() because it always
411 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
416 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
412 check whether the dirstate has changed before rereading it."""
417 check whether the dirstate has changed before rereading it."""
413
418
414 for a in ("_map", "_branch", "_ignore"):
419 for a in ("_map", "_branch", "_ignore"):
415 if a in self.__dict__:
420 if a in self.__dict__:
416 delattr(self, a)
421 delattr(self, a)
417 self._lastnormaltime = 0
422 self._lastnormaltime = 0
418 self._dirty = False
423 self._dirty = False
419 self._updatedfiles.clear()
424 self._updatedfiles.clear()
420 self._parentwriters = 0
425 self._parentwriters = 0
421 self._origpl = None
426 self._origpl = None
422
427
423 def copy(self, source, dest):
428 def copy(self, source, dest):
424 """Mark dest as a copy of source. Unmark dest if source is None."""
429 """Mark dest as a copy of source. Unmark dest if source is None."""
425 if source == dest:
430 if source == dest:
426 return
431 return
427 self._dirty = True
432 self._dirty = True
428 if source is not None:
433 if source is not None:
429 self._map.copymap[dest] = source
434 self._map.copymap[dest] = source
430 self._updatedfiles.add(source)
435 self._updatedfiles.add(source)
431 self._updatedfiles.add(dest)
436 self._updatedfiles.add(dest)
432 elif self._map.copymap.pop(dest, None):
437 elif self._map.copymap.pop(dest, None):
433 self._updatedfiles.add(dest)
438 self._updatedfiles.add(dest)
434
439
435 def copied(self, file):
440 def copied(self, file):
436 return self._map.copymap.get(file, None)
441 return self._map.copymap.get(file, None)
437
442
438 def copies(self):
443 def copies(self):
439 return self._map.copymap
444 return self._map.copymap
440
445
441 def _addpath(
446 def _addpath(
442 self,
447 self,
443 f,
448 f,
444 state,
449 state,
445 mode,
450 mode,
446 size=NONNORMAL,
451 size=NONNORMAL,
447 mtime=AMBIGUOUS_TIME,
452 mtime=AMBIGUOUS_TIME,
448 from_p2=False,
453 from_p2=False,
449 possibly_dirty=False,
454 possibly_dirty=False,
450 ):
455 ):
451 oldstate = self[f]
456 oldstate = self[f]
452 if state == b'a' or oldstate == b'r':
457 if state == b'a' or oldstate == b'r':
453 scmutil.checkfilename(f)
458 scmutil.checkfilename(f)
454 if self._map.hastrackeddir(f):
459 if self._map.hastrackeddir(f):
455 msg = _(b'directory %r already in dirstate')
460 msg = _(b'directory %r already in dirstate')
456 msg %= pycompat.bytestr(f)
461 msg %= pycompat.bytestr(f)
457 raise error.Abort(msg)
462 raise error.Abort(msg)
458 # shadows
463 # shadows
459 for d in pathutil.finddirs(f):
464 for d in pathutil.finddirs(f):
460 if self._map.hastrackeddir(d):
465 if self._map.hastrackeddir(d):
461 break
466 break
462 entry = self._map.get(d)
467 entry = self._map.get(d)
463 if entry is not None and entry[0] != b'r':
468 if entry is not None and entry[0] != b'r':
464 msg = _(b'file %r in dirstate clashes with %r')
469 msg = _(b'file %r in dirstate clashes with %r')
465 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
470 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
466 raise error.Abort(msg)
471 raise error.Abort(msg)
467 if state == b'a':
472 if state == b'a':
468 assert not possibly_dirty
473 assert not possibly_dirty
469 assert not from_p2
474 assert not from_p2
470 size = NONNORMAL
475 size = NONNORMAL
471 mtime = AMBIGUOUS_TIME
476 mtime = AMBIGUOUS_TIME
472 elif from_p2:
477 elif from_p2:
473 assert not possibly_dirty
478 assert not possibly_dirty
474 size = FROM_P2
479 size = FROM_P2
475 mtime = AMBIGUOUS_TIME
480 mtime = AMBIGUOUS_TIME
476 elif possibly_dirty:
481 elif possibly_dirty:
477 mtime = AMBIGUOUS_TIME
482 mtime = AMBIGUOUS_TIME
478 else:
483 else:
479 assert size != FROM_P2
484 assert size != FROM_P2
480 assert size != NONNORMAL
485 assert size != NONNORMAL
481 size = size & _rangemask
486 size = size & _rangemask
482 mtime = mtime & _rangemask
487 mtime = mtime & _rangemask
483 self._dirty = True
488 self._dirty = True
484 self._updatedfiles.add(f)
489 self._updatedfiles.add(f)
485 self._map.addfile(f, oldstate, state, mode, size, mtime)
490 self._map.addfile(f, oldstate, state, mode, size, mtime)
486
491
487 def normal(self, f, parentfiledata=None):
492 def normal(self, f, parentfiledata=None):
488 """Mark a file normal and clean.
493 """Mark a file normal and clean.
489
494
490 parentfiledata: (mode, size, mtime) of the clean file
495 parentfiledata: (mode, size, mtime) of the clean file
491
496
492 parentfiledata should be computed from memory (for mode,
497 parentfiledata should be computed from memory (for mode,
493 size), as or close as possible from the point where we
498 size), as or close as possible from the point where we
494 determined the file was clean, to limit the risk of the
499 determined the file was clean, to limit the risk of the
495 file having been changed by an external process between the
500 file having been changed by an external process between the
496 moment where the file was determined to be clean and now."""
501 moment where the file was determined to be clean and now."""
497 if parentfiledata:
502 if parentfiledata:
498 (mode, size, mtime) = parentfiledata
503 (mode, size, mtime) = parentfiledata
499 else:
504 else:
500 s = os.lstat(self._join(f))
505 s = os.lstat(self._join(f))
501 mode = s.st_mode
506 mode = s.st_mode
502 size = s.st_size
507 size = s.st_size
503 mtime = s[stat.ST_MTIME]
508 mtime = s[stat.ST_MTIME]
504 self._addpath(f, b'n', mode, size, mtime)
509 self._addpath(f, b'n', mode, size, mtime)
505 self._map.copymap.pop(f, None)
510 self._map.copymap.pop(f, None)
506 if f in self._map.nonnormalset:
511 if f in self._map.nonnormalset:
507 self._map.nonnormalset.remove(f)
512 self._map.nonnormalset.remove(f)
508 if mtime > self._lastnormaltime:
513 if mtime > self._lastnormaltime:
509 # Remember the most recent modification timeslot for status(),
514 # Remember the most recent modification timeslot for status(),
510 # to make sure we won't miss future size-preserving file content
515 # to make sure we won't miss future size-preserving file content
511 # modifications that happen within the same timeslot.
516 # modifications that happen within the same timeslot.
512 self._lastnormaltime = mtime
517 self._lastnormaltime = mtime
513
518
514 def normallookup(self, f):
519 def normallookup(self, f):
515 '''Mark a file normal, but possibly dirty.'''
520 '''Mark a file normal, but possibly dirty.'''
516 if self._pl[1] != self._nodeconstants.nullid:
521 if self.in_merge:
517 # if there is a merge going on and the file was either
522 # if there is a merge going on and the file was either
518 # in state 'm' (-1) or coming from other parent (-2) before
523 # in state 'm' (-1) or coming from other parent (-2) before
519 # being removed, restore that state.
524 # being removed, restore that state.
520 entry = self._map.get(f)
525 entry = self._map.get(f)
521 if entry is not None:
526 if entry is not None:
522 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
527 if entry[0] == b'r' and entry[2] in (NONNORMAL, FROM_P2):
523 source = self._map.copymap.get(f)
528 source = self._map.copymap.get(f)
524 if entry[2] == NONNORMAL:
529 if entry[2] == NONNORMAL:
525 self.merge(f)
530 self.merge(f)
526 elif entry[2] == FROM_P2:
531 elif entry[2] == FROM_P2:
527 self.otherparent(f)
532 self.otherparent(f)
528 if source:
533 if source:
529 self.copy(source, f)
534 self.copy(source, f)
530 return
535 return
531 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
536 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == FROM_P2:
532 return
537 return
533 self._addpath(f, b'n', 0, possibly_dirty=True)
538 self._addpath(f, b'n', 0, possibly_dirty=True)
534 self._map.copymap.pop(f, None)
539 self._map.copymap.pop(f, None)
535
540
536 def otherparent(self, f):
541 def otherparent(self, f):
537 '''Mark as coming from the other parent, always dirty.'''
542 '''Mark as coming from the other parent, always dirty.'''
538 if self._pl[1] == self._nodeconstants.nullid:
543 if not self.in_merge:
539 msg = _(b"setting %r to other parent only allowed in merges") % f
544 msg = _(b"setting %r to other parent only allowed in merges") % f
540 raise error.Abort(msg)
545 raise error.Abort(msg)
541 if f in self and self[f] == b'n':
546 if f in self and self[f] == b'n':
542 # merge-like
547 # merge-like
543 self._addpath(f, b'm', 0, from_p2=True)
548 self._addpath(f, b'm', 0, from_p2=True)
544 else:
549 else:
545 # add-like
550 # add-like
546 self._addpath(f, b'n', 0, from_p2=True)
551 self._addpath(f, b'n', 0, from_p2=True)
547 self._map.copymap.pop(f, None)
552 self._map.copymap.pop(f, None)
548
553
549 def add(self, f):
554 def add(self, f):
550 '''Mark a file added.'''
555 '''Mark a file added.'''
551 self._addpath(f, b'a', 0)
556 self._addpath(f, b'a', 0)
552 self._map.copymap.pop(f, None)
557 self._map.copymap.pop(f, None)
553
558
554 def remove(self, f):
559 def remove(self, f):
555 '''Mark a file removed.'''
560 '''Mark a file removed.'''
556 self._dirty = True
561 self._dirty = True
557 oldstate = self[f]
562 oldstate = self[f]
558 size = 0
563 size = 0
559 if self._pl[1] != self._nodeconstants.nullid:
564 if self.in_merge:
560 entry = self._map.get(f)
565 entry = self._map.get(f)
561 if entry is not None:
566 if entry is not None:
562 # backup the previous state
567 # backup the previous state
563 if entry[0] == b'm': # merge
568 if entry[0] == b'm': # merge
564 size = NONNORMAL
569 size = NONNORMAL
565 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
570 elif entry[0] == b'n' and entry[2] == FROM_P2: # other parent
566 size = FROM_P2
571 size = FROM_P2
567 self._map.otherparentset.add(f)
572 self._map.otherparentset.add(f)
568 self._updatedfiles.add(f)
573 self._updatedfiles.add(f)
569 self._map.removefile(f, oldstate, size)
574 self._map.removefile(f, oldstate, size)
570 if size == 0:
575 if size == 0:
571 self._map.copymap.pop(f, None)
576 self._map.copymap.pop(f, None)
572
577
573 def merge(self, f):
578 def merge(self, f):
574 '''Mark a file merged.'''
579 '''Mark a file merged.'''
575 if self._pl[1] == self._nodeconstants.nullid:
580 if not self.in_merge:
576 return self.normallookup(f)
581 return self.normallookup(f)
577 return self.otherparent(f)
582 return self.otherparent(f)
578
583
579 def drop(self, f):
584 def drop(self, f):
580 '''Drop a file from the dirstate'''
585 '''Drop a file from the dirstate'''
581 oldstate = self[f]
586 oldstate = self[f]
582 if self._map.dropfile(f, oldstate):
587 if self._map.dropfile(f, oldstate):
583 self._dirty = True
588 self._dirty = True
584 self._updatedfiles.add(f)
589 self._updatedfiles.add(f)
585 self._map.copymap.pop(f, None)
590 self._map.copymap.pop(f, None)
586
591
587 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
592 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
588 if exists is None:
593 if exists is None:
589 exists = os.path.lexists(os.path.join(self._root, path))
594 exists = os.path.lexists(os.path.join(self._root, path))
590 if not exists:
595 if not exists:
591 # Maybe a path component exists
596 # Maybe a path component exists
592 if not ignoremissing and b'/' in path:
597 if not ignoremissing and b'/' in path:
593 d, f = path.rsplit(b'/', 1)
598 d, f = path.rsplit(b'/', 1)
594 d = self._normalize(d, False, ignoremissing, None)
599 d = self._normalize(d, False, ignoremissing, None)
595 folded = d + b"/" + f
600 folded = d + b"/" + f
596 else:
601 else:
597 # No path components, preserve original case
602 # No path components, preserve original case
598 folded = path
603 folded = path
599 else:
604 else:
600 # recursively normalize leading directory components
605 # recursively normalize leading directory components
601 # against dirstate
606 # against dirstate
602 if b'/' in normed:
607 if b'/' in normed:
603 d, f = normed.rsplit(b'/', 1)
608 d, f = normed.rsplit(b'/', 1)
604 d = self._normalize(d, False, ignoremissing, True)
609 d = self._normalize(d, False, ignoremissing, True)
605 r = self._root + b"/" + d
610 r = self._root + b"/" + d
606 folded = d + b"/" + util.fspath(f, r)
611 folded = d + b"/" + util.fspath(f, r)
607 else:
612 else:
608 folded = util.fspath(normed, self._root)
613 folded = util.fspath(normed, self._root)
609 storemap[normed] = folded
614 storemap[normed] = folded
610
615
611 return folded
616 return folded
612
617
613 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
618 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
614 normed = util.normcase(path)
619 normed = util.normcase(path)
615 folded = self._map.filefoldmap.get(normed, None)
620 folded = self._map.filefoldmap.get(normed, None)
616 if folded is None:
621 if folded is None:
617 if isknown:
622 if isknown:
618 folded = path
623 folded = path
619 else:
624 else:
620 folded = self._discoverpath(
625 folded = self._discoverpath(
621 path, normed, ignoremissing, exists, self._map.filefoldmap
626 path, normed, ignoremissing, exists, self._map.filefoldmap
622 )
627 )
623 return folded
628 return folded
624
629
625 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
630 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
626 normed = util.normcase(path)
631 normed = util.normcase(path)
627 folded = self._map.filefoldmap.get(normed, None)
632 folded = self._map.filefoldmap.get(normed, None)
628 if folded is None:
633 if folded is None:
629 folded = self._map.dirfoldmap.get(normed, None)
634 folded = self._map.dirfoldmap.get(normed, None)
630 if folded is None:
635 if folded is None:
631 if isknown:
636 if isknown:
632 folded = path
637 folded = path
633 else:
638 else:
634 # store discovered result in dirfoldmap so that future
639 # store discovered result in dirfoldmap so that future
635 # normalizefile calls don't start matching directories
640 # normalizefile calls don't start matching directories
636 folded = self._discoverpath(
641 folded = self._discoverpath(
637 path, normed, ignoremissing, exists, self._map.dirfoldmap
642 path, normed, ignoremissing, exists, self._map.dirfoldmap
638 )
643 )
639 return folded
644 return folded
640
645
641 def normalize(self, path, isknown=False, ignoremissing=False):
646 def normalize(self, path, isknown=False, ignoremissing=False):
642 """
647 """
643 normalize the case of a pathname when on a casefolding filesystem
648 normalize the case of a pathname when on a casefolding filesystem
644
649
645 isknown specifies whether the filename came from walking the
650 isknown specifies whether the filename came from walking the
646 disk, to avoid extra filesystem access.
651 disk, to avoid extra filesystem access.
647
652
648 If ignoremissing is True, missing path are returned
653 If ignoremissing is True, missing path are returned
649 unchanged. Otherwise, we try harder to normalize possibly
654 unchanged. Otherwise, we try harder to normalize possibly
650 existing path components.
655 existing path components.
651
656
652 The normalized case is determined based on the following precedence:
657 The normalized case is determined based on the following precedence:
653
658
654 - version of name already stored in the dirstate
659 - version of name already stored in the dirstate
655 - version of name stored on disk
660 - version of name stored on disk
656 - version provided via command arguments
661 - version provided via command arguments
657 """
662 """
658
663
659 if self._checkcase:
664 if self._checkcase:
660 return self._normalize(path, isknown, ignoremissing)
665 return self._normalize(path, isknown, ignoremissing)
661 return path
666 return path
662
667
663 def clear(self):
668 def clear(self):
664 self._map.clear()
669 self._map.clear()
665 self._lastnormaltime = 0
670 self._lastnormaltime = 0
666 self._updatedfiles.clear()
671 self._updatedfiles.clear()
667 self._dirty = True
672 self._dirty = True
668
673
669 def rebuild(self, parent, allfiles, changedfiles=None):
674 def rebuild(self, parent, allfiles, changedfiles=None):
670 if changedfiles is None:
675 if changedfiles is None:
671 # Rebuild entire dirstate
676 # Rebuild entire dirstate
672 to_lookup = allfiles
677 to_lookup = allfiles
673 to_drop = []
678 to_drop = []
674 lastnormaltime = self._lastnormaltime
679 lastnormaltime = self._lastnormaltime
675 self.clear()
680 self.clear()
676 self._lastnormaltime = lastnormaltime
681 self._lastnormaltime = lastnormaltime
677 elif len(changedfiles) < 10:
682 elif len(changedfiles) < 10:
678 # Avoid turning allfiles into a set, which can be expensive if it's
683 # Avoid turning allfiles into a set, which can be expensive if it's
679 # large.
684 # large.
680 to_lookup = []
685 to_lookup = []
681 to_drop = []
686 to_drop = []
682 for f in changedfiles:
687 for f in changedfiles:
683 if f in allfiles:
688 if f in allfiles:
684 to_lookup.append(f)
689 to_lookup.append(f)
685 else:
690 else:
686 to_drop.append(f)
691 to_drop.append(f)
687 else:
692 else:
688 changedfilesset = set(changedfiles)
693 changedfilesset = set(changedfiles)
689 to_lookup = changedfilesset & set(allfiles)
694 to_lookup = changedfilesset & set(allfiles)
690 to_drop = changedfilesset - to_lookup
695 to_drop = changedfilesset - to_lookup
691
696
692 if self._origpl is None:
697 if self._origpl is None:
693 self._origpl = self._pl
698 self._origpl = self._pl
694 self._map.setparents(parent, self._nodeconstants.nullid)
699 self._map.setparents(parent, self._nodeconstants.nullid)
695
700
696 for f in to_lookup:
701 for f in to_lookup:
697 self.normallookup(f)
702 self.normallookup(f)
698 for f in to_drop:
703 for f in to_drop:
699 self.drop(f)
704 self.drop(f)
700
705
701 self._dirty = True
706 self._dirty = True
702
707
703 def identity(self):
708 def identity(self):
704 """Return identity of dirstate itself to detect changing in storage
709 """Return identity of dirstate itself to detect changing in storage
705
710
706 If identity of previous dirstate is equal to this, writing
711 If identity of previous dirstate is equal to this, writing
707 changes based on the former dirstate out can keep consistency.
712 changes based on the former dirstate out can keep consistency.
708 """
713 """
709 return self._map.identity
714 return self._map.identity
710
715
711 def write(self, tr):
716 def write(self, tr):
712 if not self._dirty:
717 if not self._dirty:
713 return
718 return
714
719
715 filename = self._filename
720 filename = self._filename
716 if tr:
721 if tr:
717 # 'dirstate.write()' is not only for writing in-memory
722 # 'dirstate.write()' is not only for writing in-memory
718 # changes out, but also for dropping ambiguous timestamp.
723 # changes out, but also for dropping ambiguous timestamp.
719 # delayed writing re-raise "ambiguous timestamp issue".
724 # delayed writing re-raise "ambiguous timestamp issue".
720 # See also the wiki page below for detail:
725 # See also the wiki page below for detail:
721 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
726 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
722
727
723 # emulate dropping timestamp in 'parsers.pack_dirstate'
728 # emulate dropping timestamp in 'parsers.pack_dirstate'
724 now = _getfsnow(self._opener)
729 now = _getfsnow(self._opener)
725 self._map.clearambiguoustimes(self._updatedfiles, now)
730 self._map.clearambiguoustimes(self._updatedfiles, now)
726
731
727 # emulate that all 'dirstate.normal' results are written out
732 # emulate that all 'dirstate.normal' results are written out
728 self._lastnormaltime = 0
733 self._lastnormaltime = 0
729 self._updatedfiles.clear()
734 self._updatedfiles.clear()
730
735
731 # delay writing in-memory changes out
736 # delay writing in-memory changes out
732 tr.addfilegenerator(
737 tr.addfilegenerator(
733 b'dirstate',
738 b'dirstate',
734 (self._filename,),
739 (self._filename,),
735 self._writedirstate,
740 self._writedirstate,
736 location=b'plain',
741 location=b'plain',
737 )
742 )
738 return
743 return
739
744
740 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
745 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
741 self._writedirstate(st)
746 self._writedirstate(st)
742
747
743 def addparentchangecallback(self, category, callback):
748 def addparentchangecallback(self, category, callback):
744 """add a callback to be called when the wd parents are changed
749 """add a callback to be called when the wd parents are changed
745
750
746 Callback will be called with the following arguments:
751 Callback will be called with the following arguments:
747 dirstate, (oldp1, oldp2), (newp1, newp2)
752 dirstate, (oldp1, oldp2), (newp1, newp2)
748
753
749 Category is a unique identifier to allow overwriting an old callback
754 Category is a unique identifier to allow overwriting an old callback
750 with a newer callback.
755 with a newer callback.
751 """
756 """
752 self._plchangecallbacks[category] = callback
757 self._plchangecallbacks[category] = callback
753
758
754 def _writedirstate(self, st):
759 def _writedirstate(self, st):
755 # notify callbacks about parents change
760 # notify callbacks about parents change
756 if self._origpl is not None and self._origpl != self._pl:
761 if self._origpl is not None and self._origpl != self._pl:
757 for c, callback in sorted(
762 for c, callback in sorted(
758 pycompat.iteritems(self._plchangecallbacks)
763 pycompat.iteritems(self._plchangecallbacks)
759 ):
764 ):
760 callback(self, self._origpl, self._pl)
765 callback(self, self._origpl, self._pl)
761 self._origpl = None
766 self._origpl = None
762 # use the modification time of the newly created temporary file as the
767 # use the modification time of the newly created temporary file as the
763 # filesystem's notion of 'now'
768 # filesystem's notion of 'now'
764 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
769 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
765
770
766 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
771 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
767 # timestamp of each entries in dirstate, because of 'now > mtime'
772 # timestamp of each entries in dirstate, because of 'now > mtime'
768 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
773 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
769 if delaywrite > 0:
774 if delaywrite > 0:
770 # do we have any files to delay for?
775 # do we have any files to delay for?
771 for f, e in pycompat.iteritems(self._map):
776 for f, e in pycompat.iteritems(self._map):
772 if e[0] == b'n' and e[3] == now:
777 if e[0] == b'n' and e[3] == now:
773 import time # to avoid useless import
778 import time # to avoid useless import
774
779
775 # rather than sleep n seconds, sleep until the next
780 # rather than sleep n seconds, sleep until the next
776 # multiple of n seconds
781 # multiple of n seconds
777 clock = time.time()
782 clock = time.time()
778 start = int(clock) - (int(clock) % delaywrite)
783 start = int(clock) - (int(clock) % delaywrite)
779 end = start + delaywrite
784 end = start + delaywrite
780 time.sleep(end - clock)
785 time.sleep(end - clock)
781 now = end # trust our estimate that the end is near now
786 now = end # trust our estimate that the end is near now
782 break
787 break
783
788
784 self._map.write(st, now)
789 self._map.write(st, now)
785 self._lastnormaltime = 0
790 self._lastnormaltime = 0
786 self._dirty = False
791 self._dirty = False
787
792
788 def _dirignore(self, f):
793 def _dirignore(self, f):
789 if self._ignore(f):
794 if self._ignore(f):
790 return True
795 return True
791 for p in pathutil.finddirs(f):
796 for p in pathutil.finddirs(f):
792 if self._ignore(p):
797 if self._ignore(p):
793 return True
798 return True
794 return False
799 return False
795
800
796 def _ignorefiles(self):
801 def _ignorefiles(self):
797 files = []
802 files = []
798 if os.path.exists(self._join(b'.hgignore')):
803 if os.path.exists(self._join(b'.hgignore')):
799 files.append(self._join(b'.hgignore'))
804 files.append(self._join(b'.hgignore'))
800 for name, path in self._ui.configitems(b"ui"):
805 for name, path in self._ui.configitems(b"ui"):
801 if name == b'ignore' or name.startswith(b'ignore.'):
806 if name == b'ignore' or name.startswith(b'ignore.'):
802 # we need to use os.path.join here rather than self._join
807 # we need to use os.path.join here rather than self._join
803 # because path is arbitrary and user-specified
808 # because path is arbitrary and user-specified
804 files.append(os.path.join(self._rootdir, util.expandpath(path)))
809 files.append(os.path.join(self._rootdir, util.expandpath(path)))
805 return files
810 return files
806
811
807 def _ignorefileandline(self, f):
812 def _ignorefileandline(self, f):
808 files = collections.deque(self._ignorefiles())
813 files = collections.deque(self._ignorefiles())
809 visited = set()
814 visited = set()
810 while files:
815 while files:
811 i = files.popleft()
816 i = files.popleft()
812 patterns = matchmod.readpatternfile(
817 patterns = matchmod.readpatternfile(
813 i, self._ui.warn, sourceinfo=True
818 i, self._ui.warn, sourceinfo=True
814 )
819 )
815 for pattern, lineno, line in patterns:
820 for pattern, lineno, line in patterns:
816 kind, p = matchmod._patsplit(pattern, b'glob')
821 kind, p = matchmod._patsplit(pattern, b'glob')
817 if kind == b"subinclude":
822 if kind == b"subinclude":
818 if p not in visited:
823 if p not in visited:
819 files.append(p)
824 files.append(p)
820 continue
825 continue
821 m = matchmod.match(
826 m = matchmod.match(
822 self._root, b'', [], [pattern], warn=self._ui.warn
827 self._root, b'', [], [pattern], warn=self._ui.warn
823 )
828 )
824 if m(f):
829 if m(f):
825 return (i, lineno, line)
830 return (i, lineno, line)
826 visited.add(i)
831 visited.add(i)
827 return (None, -1, b"")
832 return (None, -1, b"")
828
833
829 def _walkexplicit(self, match, subrepos):
834 def _walkexplicit(self, match, subrepos):
830 """Get stat data about the files explicitly specified by match.
835 """Get stat data about the files explicitly specified by match.
831
836
832 Return a triple (results, dirsfound, dirsnotfound).
837 Return a triple (results, dirsfound, dirsnotfound).
833 - results is a mapping from filename to stat result. It also contains
838 - results is a mapping from filename to stat result. It also contains
834 listings mapping subrepos and .hg to None.
839 listings mapping subrepos and .hg to None.
835 - dirsfound is a list of files found to be directories.
840 - dirsfound is a list of files found to be directories.
836 - dirsnotfound is a list of files that the dirstate thinks are
841 - dirsnotfound is a list of files that the dirstate thinks are
837 directories and that were not found."""
842 directories and that were not found."""
838
843
839 def badtype(mode):
844 def badtype(mode):
840 kind = _(b'unknown')
845 kind = _(b'unknown')
841 if stat.S_ISCHR(mode):
846 if stat.S_ISCHR(mode):
842 kind = _(b'character device')
847 kind = _(b'character device')
843 elif stat.S_ISBLK(mode):
848 elif stat.S_ISBLK(mode):
844 kind = _(b'block device')
849 kind = _(b'block device')
845 elif stat.S_ISFIFO(mode):
850 elif stat.S_ISFIFO(mode):
846 kind = _(b'fifo')
851 kind = _(b'fifo')
847 elif stat.S_ISSOCK(mode):
852 elif stat.S_ISSOCK(mode):
848 kind = _(b'socket')
853 kind = _(b'socket')
849 elif stat.S_ISDIR(mode):
854 elif stat.S_ISDIR(mode):
850 kind = _(b'directory')
855 kind = _(b'directory')
851 return _(b'unsupported file type (type is %s)') % kind
856 return _(b'unsupported file type (type is %s)') % kind
852
857
853 badfn = match.bad
858 badfn = match.bad
854 dmap = self._map
859 dmap = self._map
855 lstat = os.lstat
860 lstat = os.lstat
856 getkind = stat.S_IFMT
861 getkind = stat.S_IFMT
857 dirkind = stat.S_IFDIR
862 dirkind = stat.S_IFDIR
858 regkind = stat.S_IFREG
863 regkind = stat.S_IFREG
859 lnkkind = stat.S_IFLNK
864 lnkkind = stat.S_IFLNK
860 join = self._join
865 join = self._join
861 dirsfound = []
866 dirsfound = []
862 foundadd = dirsfound.append
867 foundadd = dirsfound.append
863 dirsnotfound = []
868 dirsnotfound = []
864 notfoundadd = dirsnotfound.append
869 notfoundadd = dirsnotfound.append
865
870
866 if not match.isexact() and self._checkcase:
871 if not match.isexact() and self._checkcase:
867 normalize = self._normalize
872 normalize = self._normalize
868 else:
873 else:
869 normalize = None
874 normalize = None
870
875
871 files = sorted(match.files())
876 files = sorted(match.files())
872 subrepos.sort()
877 subrepos.sort()
873 i, j = 0, 0
878 i, j = 0, 0
874 while i < len(files) and j < len(subrepos):
879 while i < len(files) and j < len(subrepos):
875 subpath = subrepos[j] + b"/"
880 subpath = subrepos[j] + b"/"
876 if files[i] < subpath:
881 if files[i] < subpath:
877 i += 1
882 i += 1
878 continue
883 continue
879 while i < len(files) and files[i].startswith(subpath):
884 while i < len(files) and files[i].startswith(subpath):
880 del files[i]
885 del files[i]
881 j += 1
886 j += 1
882
887
883 if not files or b'' in files:
888 if not files or b'' in files:
884 files = [b'']
889 files = [b'']
885 # constructing the foldmap is expensive, so don't do it for the
890 # constructing the foldmap is expensive, so don't do it for the
886 # common case where files is ['']
891 # common case where files is ['']
887 normalize = None
892 normalize = None
888 results = dict.fromkeys(subrepos)
893 results = dict.fromkeys(subrepos)
889 results[b'.hg'] = None
894 results[b'.hg'] = None
890
895
891 for ff in files:
896 for ff in files:
892 if normalize:
897 if normalize:
893 nf = normalize(ff, False, True)
898 nf = normalize(ff, False, True)
894 else:
899 else:
895 nf = ff
900 nf = ff
896 if nf in results:
901 if nf in results:
897 continue
902 continue
898
903
899 try:
904 try:
900 st = lstat(join(nf))
905 st = lstat(join(nf))
901 kind = getkind(st.st_mode)
906 kind = getkind(st.st_mode)
902 if kind == dirkind:
907 if kind == dirkind:
903 if nf in dmap:
908 if nf in dmap:
904 # file replaced by dir on disk but still in dirstate
909 # file replaced by dir on disk but still in dirstate
905 results[nf] = None
910 results[nf] = None
906 foundadd((nf, ff))
911 foundadd((nf, ff))
907 elif kind == regkind or kind == lnkkind:
912 elif kind == regkind or kind == lnkkind:
908 results[nf] = st
913 results[nf] = st
909 else:
914 else:
910 badfn(ff, badtype(kind))
915 badfn(ff, badtype(kind))
911 if nf in dmap:
916 if nf in dmap:
912 results[nf] = None
917 results[nf] = None
913 except OSError as inst: # nf not found on disk - it is dirstate only
918 except OSError as inst: # nf not found on disk - it is dirstate only
914 if nf in dmap: # does it exactly match a missing file?
919 if nf in dmap: # does it exactly match a missing file?
915 results[nf] = None
920 results[nf] = None
916 else: # does it match a missing directory?
921 else: # does it match a missing directory?
917 if self._map.hasdir(nf):
922 if self._map.hasdir(nf):
918 notfoundadd(nf)
923 notfoundadd(nf)
919 else:
924 else:
920 badfn(ff, encoding.strtolocal(inst.strerror))
925 badfn(ff, encoding.strtolocal(inst.strerror))
921
926
922 # match.files() may contain explicitly-specified paths that shouldn't
927 # match.files() may contain explicitly-specified paths that shouldn't
923 # be taken; drop them from the list of files found. dirsfound/notfound
928 # be taken; drop them from the list of files found. dirsfound/notfound
924 # aren't filtered here because they will be tested later.
929 # aren't filtered here because they will be tested later.
925 if match.anypats():
930 if match.anypats():
926 for f in list(results):
931 for f in list(results):
927 if f == b'.hg' or f in subrepos:
932 if f == b'.hg' or f in subrepos:
928 # keep sentinel to disable further out-of-repo walks
933 # keep sentinel to disable further out-of-repo walks
929 continue
934 continue
930 if not match(f):
935 if not match(f):
931 del results[f]
936 del results[f]
932
937
933 # Case insensitive filesystems cannot rely on lstat() failing to detect
938 # Case insensitive filesystems cannot rely on lstat() failing to detect
934 # a case-only rename. Prune the stat object for any file that does not
939 # a case-only rename. Prune the stat object for any file that does not
935 # match the case in the filesystem, if there are multiple files that
940 # match the case in the filesystem, if there are multiple files that
936 # normalize to the same path.
941 # normalize to the same path.
937 if match.isexact() and self._checkcase:
942 if match.isexact() and self._checkcase:
938 normed = {}
943 normed = {}
939
944
940 for f, st in pycompat.iteritems(results):
945 for f, st in pycompat.iteritems(results):
941 if st is None:
946 if st is None:
942 continue
947 continue
943
948
944 nc = util.normcase(f)
949 nc = util.normcase(f)
945 paths = normed.get(nc)
950 paths = normed.get(nc)
946
951
947 if paths is None:
952 if paths is None:
948 paths = set()
953 paths = set()
949 normed[nc] = paths
954 normed[nc] = paths
950
955
951 paths.add(f)
956 paths.add(f)
952
957
953 for norm, paths in pycompat.iteritems(normed):
958 for norm, paths in pycompat.iteritems(normed):
954 if len(paths) > 1:
959 if len(paths) > 1:
955 for path in paths:
960 for path in paths:
956 folded = self._discoverpath(
961 folded = self._discoverpath(
957 path, norm, True, None, self._map.dirfoldmap
962 path, norm, True, None, self._map.dirfoldmap
958 )
963 )
959 if path != folded:
964 if path != folded:
960 results[path] = None
965 results[path] = None
961
966
962 return results, dirsfound, dirsnotfound
967 return results, dirsfound, dirsnotfound
963
968
964 def walk(self, match, subrepos, unknown, ignored, full=True):
969 def walk(self, match, subrepos, unknown, ignored, full=True):
965 """
970 """
966 Walk recursively through the directory tree, finding all files
971 Walk recursively through the directory tree, finding all files
967 matched by match.
972 matched by match.
968
973
969 If full is False, maybe skip some known-clean files.
974 If full is False, maybe skip some known-clean files.
970
975
971 Return a dict mapping filename to stat-like object (either
976 Return a dict mapping filename to stat-like object (either
972 mercurial.osutil.stat instance or return value of os.stat()).
977 mercurial.osutil.stat instance or return value of os.stat()).
973
978
974 """
979 """
975 # full is a flag that extensions that hook into walk can use -- this
980 # full is a flag that extensions that hook into walk can use -- this
976 # implementation doesn't use it at all. This satisfies the contract
981 # implementation doesn't use it at all. This satisfies the contract
977 # because we only guarantee a "maybe".
982 # because we only guarantee a "maybe".
978
983
979 if ignored:
984 if ignored:
980 ignore = util.never
985 ignore = util.never
981 dirignore = util.never
986 dirignore = util.never
982 elif unknown:
987 elif unknown:
983 ignore = self._ignore
988 ignore = self._ignore
984 dirignore = self._dirignore
989 dirignore = self._dirignore
985 else:
990 else:
986 # if not unknown and not ignored, drop dir recursion and step 2
991 # if not unknown and not ignored, drop dir recursion and step 2
987 ignore = util.always
992 ignore = util.always
988 dirignore = util.always
993 dirignore = util.always
989
994
990 matchfn = match.matchfn
995 matchfn = match.matchfn
991 matchalways = match.always()
996 matchalways = match.always()
992 matchtdir = match.traversedir
997 matchtdir = match.traversedir
993 dmap = self._map
998 dmap = self._map
994 listdir = util.listdir
999 listdir = util.listdir
995 lstat = os.lstat
1000 lstat = os.lstat
996 dirkind = stat.S_IFDIR
1001 dirkind = stat.S_IFDIR
997 regkind = stat.S_IFREG
1002 regkind = stat.S_IFREG
998 lnkkind = stat.S_IFLNK
1003 lnkkind = stat.S_IFLNK
999 join = self._join
1004 join = self._join
1000
1005
1001 exact = skipstep3 = False
1006 exact = skipstep3 = False
1002 if match.isexact(): # match.exact
1007 if match.isexact(): # match.exact
1003 exact = True
1008 exact = True
1004 dirignore = util.always # skip step 2
1009 dirignore = util.always # skip step 2
1005 elif match.prefix(): # match.match, no patterns
1010 elif match.prefix(): # match.match, no patterns
1006 skipstep3 = True
1011 skipstep3 = True
1007
1012
1008 if not exact and self._checkcase:
1013 if not exact and self._checkcase:
1009 normalize = self._normalize
1014 normalize = self._normalize
1010 normalizefile = self._normalizefile
1015 normalizefile = self._normalizefile
1011 skipstep3 = False
1016 skipstep3 = False
1012 else:
1017 else:
1013 normalize = self._normalize
1018 normalize = self._normalize
1014 normalizefile = None
1019 normalizefile = None
1015
1020
1016 # step 1: find all explicit files
1021 # step 1: find all explicit files
1017 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1022 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1018 if matchtdir:
1023 if matchtdir:
1019 for d in work:
1024 for d in work:
1020 matchtdir(d[0])
1025 matchtdir(d[0])
1021 for d in dirsnotfound:
1026 for d in dirsnotfound:
1022 matchtdir(d)
1027 matchtdir(d)
1023
1028
1024 skipstep3 = skipstep3 and not (work or dirsnotfound)
1029 skipstep3 = skipstep3 and not (work or dirsnotfound)
1025 work = [d for d in work if not dirignore(d[0])]
1030 work = [d for d in work if not dirignore(d[0])]
1026
1031
1027 # step 2: visit subdirectories
1032 # step 2: visit subdirectories
1028 def traverse(work, alreadynormed):
1033 def traverse(work, alreadynormed):
1029 wadd = work.append
1034 wadd = work.append
1030 while work:
1035 while work:
1031 tracing.counter('dirstate.walk work', len(work))
1036 tracing.counter('dirstate.walk work', len(work))
1032 nd = work.pop()
1037 nd = work.pop()
1033 visitentries = match.visitchildrenset(nd)
1038 visitentries = match.visitchildrenset(nd)
1034 if not visitentries:
1039 if not visitentries:
1035 continue
1040 continue
1036 if visitentries == b'this' or visitentries == b'all':
1041 if visitentries == b'this' or visitentries == b'all':
1037 visitentries = None
1042 visitentries = None
1038 skip = None
1043 skip = None
1039 if nd != b'':
1044 if nd != b'':
1040 skip = b'.hg'
1045 skip = b'.hg'
1041 try:
1046 try:
1042 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1047 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1043 entries = listdir(join(nd), stat=True, skip=skip)
1048 entries = listdir(join(nd), stat=True, skip=skip)
1044 except OSError as inst:
1049 except OSError as inst:
1045 if inst.errno in (errno.EACCES, errno.ENOENT):
1050 if inst.errno in (errno.EACCES, errno.ENOENT):
1046 match.bad(
1051 match.bad(
1047 self.pathto(nd), encoding.strtolocal(inst.strerror)
1052 self.pathto(nd), encoding.strtolocal(inst.strerror)
1048 )
1053 )
1049 continue
1054 continue
1050 raise
1055 raise
1051 for f, kind, st in entries:
1056 for f, kind, st in entries:
1052 # Some matchers may return files in the visitentries set,
1057 # Some matchers may return files in the visitentries set,
1053 # instead of 'this', if the matcher explicitly mentions them
1058 # instead of 'this', if the matcher explicitly mentions them
1054 # and is not an exactmatcher. This is acceptable; we do not
1059 # and is not an exactmatcher. This is acceptable; we do not
1055 # make any hard assumptions about file-or-directory below
1060 # make any hard assumptions about file-or-directory below
1056 # based on the presence of `f` in visitentries. If
1061 # based on the presence of `f` in visitentries. If
1057 # visitchildrenset returned a set, we can always skip the
1062 # visitchildrenset returned a set, we can always skip the
1058 # entries *not* in the set it provided regardless of whether
1063 # entries *not* in the set it provided regardless of whether
1059 # they're actually a file or a directory.
1064 # they're actually a file or a directory.
1060 if visitentries and f not in visitentries:
1065 if visitentries and f not in visitentries:
1061 continue
1066 continue
1062 if normalizefile:
1067 if normalizefile:
1063 # even though f might be a directory, we're only
1068 # even though f might be a directory, we're only
1064 # interested in comparing it to files currently in the
1069 # interested in comparing it to files currently in the
1065 # dmap -- therefore normalizefile is enough
1070 # dmap -- therefore normalizefile is enough
1066 nf = normalizefile(
1071 nf = normalizefile(
1067 nd and (nd + b"/" + f) or f, True, True
1072 nd and (nd + b"/" + f) or f, True, True
1068 )
1073 )
1069 else:
1074 else:
1070 nf = nd and (nd + b"/" + f) or f
1075 nf = nd and (nd + b"/" + f) or f
1071 if nf not in results:
1076 if nf not in results:
1072 if kind == dirkind:
1077 if kind == dirkind:
1073 if not ignore(nf):
1078 if not ignore(nf):
1074 if matchtdir:
1079 if matchtdir:
1075 matchtdir(nf)
1080 matchtdir(nf)
1076 wadd(nf)
1081 wadd(nf)
1077 if nf in dmap and (matchalways or matchfn(nf)):
1082 if nf in dmap and (matchalways or matchfn(nf)):
1078 results[nf] = None
1083 results[nf] = None
1079 elif kind == regkind or kind == lnkkind:
1084 elif kind == regkind or kind == lnkkind:
1080 if nf in dmap:
1085 if nf in dmap:
1081 if matchalways or matchfn(nf):
1086 if matchalways or matchfn(nf):
1082 results[nf] = st
1087 results[nf] = st
1083 elif (matchalways or matchfn(nf)) and not ignore(
1088 elif (matchalways or matchfn(nf)) and not ignore(
1084 nf
1089 nf
1085 ):
1090 ):
1086 # unknown file -- normalize if necessary
1091 # unknown file -- normalize if necessary
1087 if not alreadynormed:
1092 if not alreadynormed:
1088 nf = normalize(nf, False, True)
1093 nf = normalize(nf, False, True)
1089 results[nf] = st
1094 results[nf] = st
1090 elif nf in dmap and (matchalways or matchfn(nf)):
1095 elif nf in dmap and (matchalways or matchfn(nf)):
1091 results[nf] = None
1096 results[nf] = None
1092
1097
1093 for nd, d in work:
1098 for nd, d in work:
1094 # alreadynormed means that processwork doesn't have to do any
1099 # alreadynormed means that processwork doesn't have to do any
1095 # expensive directory normalization
1100 # expensive directory normalization
1096 alreadynormed = not normalize or nd == d
1101 alreadynormed = not normalize or nd == d
1097 traverse([d], alreadynormed)
1102 traverse([d], alreadynormed)
1098
1103
1099 for s in subrepos:
1104 for s in subrepos:
1100 del results[s]
1105 del results[s]
1101 del results[b'.hg']
1106 del results[b'.hg']
1102
1107
1103 # step 3: visit remaining files from dmap
1108 # step 3: visit remaining files from dmap
1104 if not skipstep3 and not exact:
1109 if not skipstep3 and not exact:
1105 # If a dmap file is not in results yet, it was either
1110 # If a dmap file is not in results yet, it was either
1106 # a) not matching matchfn b) ignored, c) missing, or d) under a
1111 # a) not matching matchfn b) ignored, c) missing, or d) under a
1107 # symlink directory.
1112 # symlink directory.
1108 if not results and matchalways:
1113 if not results and matchalways:
1109 visit = [f for f in dmap]
1114 visit = [f for f in dmap]
1110 else:
1115 else:
1111 visit = [f for f in dmap if f not in results and matchfn(f)]
1116 visit = [f for f in dmap if f not in results and matchfn(f)]
1112 visit.sort()
1117 visit.sort()
1113
1118
1114 if unknown:
1119 if unknown:
1115 # unknown == True means we walked all dirs under the roots
1120 # unknown == True means we walked all dirs under the roots
1116 # that wasn't ignored, and everything that matched was stat'ed
1121 # that wasn't ignored, and everything that matched was stat'ed
1117 # and is already in results.
1122 # and is already in results.
1118 # The rest must thus be ignored or under a symlink.
1123 # The rest must thus be ignored or under a symlink.
1119 audit_path = pathutil.pathauditor(self._root, cached=True)
1124 audit_path = pathutil.pathauditor(self._root, cached=True)
1120
1125
1121 for nf in iter(visit):
1126 for nf in iter(visit):
1122 # If a stat for the same file was already added with a
1127 # If a stat for the same file was already added with a
1123 # different case, don't add one for this, since that would
1128 # different case, don't add one for this, since that would
1124 # make it appear as if the file exists under both names
1129 # make it appear as if the file exists under both names
1125 # on disk.
1130 # on disk.
1126 if (
1131 if (
1127 normalizefile
1132 normalizefile
1128 and normalizefile(nf, True, True) in results
1133 and normalizefile(nf, True, True) in results
1129 ):
1134 ):
1130 results[nf] = None
1135 results[nf] = None
1131 # Report ignored items in the dmap as long as they are not
1136 # Report ignored items in the dmap as long as they are not
1132 # under a symlink directory.
1137 # under a symlink directory.
1133 elif audit_path.check(nf):
1138 elif audit_path.check(nf):
1134 try:
1139 try:
1135 results[nf] = lstat(join(nf))
1140 results[nf] = lstat(join(nf))
1136 # file was just ignored, no links, and exists
1141 # file was just ignored, no links, and exists
1137 except OSError:
1142 except OSError:
1138 # file doesn't exist
1143 # file doesn't exist
1139 results[nf] = None
1144 results[nf] = None
1140 else:
1145 else:
1141 # It's either missing or under a symlink directory
1146 # It's either missing or under a symlink directory
1142 # which we in this case report as missing
1147 # which we in this case report as missing
1143 results[nf] = None
1148 results[nf] = None
1144 else:
1149 else:
1145 # We may not have walked the full directory tree above,
1150 # We may not have walked the full directory tree above,
1146 # so stat and check everything we missed.
1151 # so stat and check everything we missed.
1147 iv = iter(visit)
1152 iv = iter(visit)
1148 for st in util.statfiles([join(i) for i in visit]):
1153 for st in util.statfiles([join(i) for i in visit]):
1149 results[next(iv)] = st
1154 results[next(iv)] = st
1150 return results
1155 return results
1151
1156
1152 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1157 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1153 # Force Rayon (Rust parallelism library) to respect the number of
1158 # Force Rayon (Rust parallelism library) to respect the number of
1154 # workers. This is a temporary workaround until Rust code knows
1159 # workers. This is a temporary workaround until Rust code knows
1155 # how to read the config file.
1160 # how to read the config file.
1156 numcpus = self._ui.configint(b"worker", b"numcpus")
1161 numcpus = self._ui.configint(b"worker", b"numcpus")
1157 if numcpus is not None:
1162 if numcpus is not None:
1158 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1163 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1159
1164
1160 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1165 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1161 if not workers_enabled:
1166 if not workers_enabled:
1162 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1167 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1163
1168
1164 (
1169 (
1165 lookup,
1170 lookup,
1166 modified,
1171 modified,
1167 added,
1172 added,
1168 removed,
1173 removed,
1169 deleted,
1174 deleted,
1170 clean,
1175 clean,
1171 ignored,
1176 ignored,
1172 unknown,
1177 unknown,
1173 warnings,
1178 warnings,
1174 bad,
1179 bad,
1175 traversed,
1180 traversed,
1176 dirty,
1181 dirty,
1177 ) = rustmod.status(
1182 ) = rustmod.status(
1178 self._map._rustmap,
1183 self._map._rustmap,
1179 matcher,
1184 matcher,
1180 self._rootdir,
1185 self._rootdir,
1181 self._ignorefiles(),
1186 self._ignorefiles(),
1182 self._checkexec,
1187 self._checkexec,
1183 self._lastnormaltime,
1188 self._lastnormaltime,
1184 bool(list_clean),
1189 bool(list_clean),
1185 bool(list_ignored),
1190 bool(list_ignored),
1186 bool(list_unknown),
1191 bool(list_unknown),
1187 bool(matcher.traversedir),
1192 bool(matcher.traversedir),
1188 )
1193 )
1189
1194
1190 self._dirty |= dirty
1195 self._dirty |= dirty
1191
1196
1192 if matcher.traversedir:
1197 if matcher.traversedir:
1193 for dir in traversed:
1198 for dir in traversed:
1194 matcher.traversedir(dir)
1199 matcher.traversedir(dir)
1195
1200
1196 if self._ui.warn:
1201 if self._ui.warn:
1197 for item in warnings:
1202 for item in warnings:
1198 if isinstance(item, tuple):
1203 if isinstance(item, tuple):
1199 file_path, syntax = item
1204 file_path, syntax = item
1200 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1205 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1201 file_path,
1206 file_path,
1202 syntax,
1207 syntax,
1203 )
1208 )
1204 self._ui.warn(msg)
1209 self._ui.warn(msg)
1205 else:
1210 else:
1206 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1211 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1207 self._ui.warn(
1212 self._ui.warn(
1208 msg
1213 msg
1209 % (
1214 % (
1210 pathutil.canonpath(
1215 pathutil.canonpath(
1211 self._rootdir, self._rootdir, item
1216 self._rootdir, self._rootdir, item
1212 ),
1217 ),
1213 b"No such file or directory",
1218 b"No such file or directory",
1214 )
1219 )
1215 )
1220 )
1216
1221
1217 for (fn, message) in bad:
1222 for (fn, message) in bad:
1218 matcher.bad(fn, encoding.strtolocal(message))
1223 matcher.bad(fn, encoding.strtolocal(message))
1219
1224
1220 status = scmutil.status(
1225 status = scmutil.status(
1221 modified=modified,
1226 modified=modified,
1222 added=added,
1227 added=added,
1223 removed=removed,
1228 removed=removed,
1224 deleted=deleted,
1229 deleted=deleted,
1225 unknown=unknown,
1230 unknown=unknown,
1226 ignored=ignored,
1231 ignored=ignored,
1227 clean=clean,
1232 clean=clean,
1228 )
1233 )
1229 return (lookup, status)
1234 return (lookup, status)
1230
1235
1231 def status(self, match, subrepos, ignored, clean, unknown):
1236 def status(self, match, subrepos, ignored, clean, unknown):
1232 """Determine the status of the working copy relative to the
1237 """Determine the status of the working copy relative to the
1233 dirstate and return a pair of (unsure, status), where status is of type
1238 dirstate and return a pair of (unsure, status), where status is of type
1234 scmutil.status and:
1239 scmutil.status and:
1235
1240
1236 unsure:
1241 unsure:
1237 files that might have been modified since the dirstate was
1242 files that might have been modified since the dirstate was
1238 written, but need to be read to be sure (size is the same
1243 written, but need to be read to be sure (size is the same
1239 but mtime differs)
1244 but mtime differs)
1240 status.modified:
1245 status.modified:
1241 files that have definitely been modified since the dirstate
1246 files that have definitely been modified since the dirstate
1242 was written (different size or mode)
1247 was written (different size or mode)
1243 status.clean:
1248 status.clean:
1244 files that have definitely not been modified since the
1249 files that have definitely not been modified since the
1245 dirstate was written
1250 dirstate was written
1246 """
1251 """
1247 listignored, listclean, listunknown = ignored, clean, unknown
1252 listignored, listclean, listunknown = ignored, clean, unknown
1248 lookup, modified, added, unknown, ignored = [], [], [], [], []
1253 lookup, modified, added, unknown, ignored = [], [], [], [], []
1249 removed, deleted, clean = [], [], []
1254 removed, deleted, clean = [], [], []
1250
1255
1251 dmap = self._map
1256 dmap = self._map
1252 dmap.preload()
1257 dmap.preload()
1253
1258
1254 use_rust = True
1259 use_rust = True
1255
1260
1256 allowed_matchers = (
1261 allowed_matchers = (
1257 matchmod.alwaysmatcher,
1262 matchmod.alwaysmatcher,
1258 matchmod.exactmatcher,
1263 matchmod.exactmatcher,
1259 matchmod.includematcher,
1264 matchmod.includematcher,
1260 )
1265 )
1261
1266
1262 if rustmod is None:
1267 if rustmod is None:
1263 use_rust = False
1268 use_rust = False
1264 elif self._checkcase:
1269 elif self._checkcase:
1265 # Case-insensitive filesystems are not handled yet
1270 # Case-insensitive filesystems are not handled yet
1266 use_rust = False
1271 use_rust = False
1267 elif subrepos:
1272 elif subrepos:
1268 use_rust = False
1273 use_rust = False
1269 elif sparse.enabled:
1274 elif sparse.enabled:
1270 use_rust = False
1275 use_rust = False
1271 elif not isinstance(match, allowed_matchers):
1276 elif not isinstance(match, allowed_matchers):
1272 # Some matchers have yet to be implemented
1277 # Some matchers have yet to be implemented
1273 use_rust = False
1278 use_rust = False
1274
1279
1275 if use_rust:
1280 if use_rust:
1276 try:
1281 try:
1277 return self._rust_status(
1282 return self._rust_status(
1278 match, listclean, listignored, listunknown
1283 match, listclean, listignored, listunknown
1279 )
1284 )
1280 except rustmod.FallbackError:
1285 except rustmod.FallbackError:
1281 pass
1286 pass
1282
1287
1283 def noop(f):
1288 def noop(f):
1284 pass
1289 pass
1285
1290
1286 dcontains = dmap.__contains__
1291 dcontains = dmap.__contains__
1287 dget = dmap.__getitem__
1292 dget = dmap.__getitem__
1288 ladd = lookup.append # aka "unsure"
1293 ladd = lookup.append # aka "unsure"
1289 madd = modified.append
1294 madd = modified.append
1290 aadd = added.append
1295 aadd = added.append
1291 uadd = unknown.append if listunknown else noop
1296 uadd = unknown.append if listunknown else noop
1292 iadd = ignored.append if listignored else noop
1297 iadd = ignored.append if listignored else noop
1293 radd = removed.append
1298 radd = removed.append
1294 dadd = deleted.append
1299 dadd = deleted.append
1295 cadd = clean.append if listclean else noop
1300 cadd = clean.append if listclean else noop
1296 mexact = match.exact
1301 mexact = match.exact
1297 dirignore = self._dirignore
1302 dirignore = self._dirignore
1298 checkexec = self._checkexec
1303 checkexec = self._checkexec
1299 copymap = self._map.copymap
1304 copymap = self._map.copymap
1300 lastnormaltime = self._lastnormaltime
1305 lastnormaltime = self._lastnormaltime
1301
1306
1302 # We need to do full walks when either
1307 # We need to do full walks when either
1303 # - we're listing all clean files, or
1308 # - we're listing all clean files, or
1304 # - match.traversedir does something, because match.traversedir should
1309 # - match.traversedir does something, because match.traversedir should
1305 # be called for every dir in the working dir
1310 # be called for every dir in the working dir
1306 full = listclean or match.traversedir is not None
1311 full = listclean or match.traversedir is not None
1307 for fn, st in pycompat.iteritems(
1312 for fn, st in pycompat.iteritems(
1308 self.walk(match, subrepos, listunknown, listignored, full=full)
1313 self.walk(match, subrepos, listunknown, listignored, full=full)
1309 ):
1314 ):
1310 if not dcontains(fn):
1315 if not dcontains(fn):
1311 if (listignored or mexact(fn)) and dirignore(fn):
1316 if (listignored or mexact(fn)) and dirignore(fn):
1312 if listignored:
1317 if listignored:
1313 iadd(fn)
1318 iadd(fn)
1314 else:
1319 else:
1315 uadd(fn)
1320 uadd(fn)
1316 continue
1321 continue
1317
1322
1318 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1323 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1319 # written like that for performance reasons. dmap[fn] is not a
1324 # written like that for performance reasons. dmap[fn] is not a
1320 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1325 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1321 # opcode has fast paths when the value to be unpacked is a tuple or
1326 # opcode has fast paths when the value to be unpacked is a tuple or
1322 # a list, but falls back to creating a full-fledged iterator in
1327 # a list, but falls back to creating a full-fledged iterator in
1323 # general. That is much slower than simply accessing and storing the
1328 # general. That is much slower than simply accessing and storing the
1324 # tuple members one by one.
1329 # tuple members one by one.
1325 t = dget(fn)
1330 t = dget(fn)
1326 state = t[0]
1331 state = t[0]
1327 mode = t[1]
1332 mode = t[1]
1328 size = t[2]
1333 size = t[2]
1329 time = t[3]
1334 time = t[3]
1330
1335
1331 if not st and state in b"nma":
1336 if not st and state in b"nma":
1332 dadd(fn)
1337 dadd(fn)
1333 elif state == b'n':
1338 elif state == b'n':
1334 if (
1339 if (
1335 size >= 0
1340 size >= 0
1336 and (
1341 and (
1337 (size != st.st_size and size != st.st_size & _rangemask)
1342 (size != st.st_size and size != st.st_size & _rangemask)
1338 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1343 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1339 )
1344 )
1340 or size == FROM_P2 # other parent
1345 or size == FROM_P2 # other parent
1341 or fn in copymap
1346 or fn in copymap
1342 ):
1347 ):
1343 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1348 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1344 # issue6456: Size returned may be longer due to
1349 # issue6456: Size returned may be longer due to
1345 # encryption on EXT-4 fscrypt, undecided.
1350 # encryption on EXT-4 fscrypt, undecided.
1346 ladd(fn)
1351 ladd(fn)
1347 else:
1352 else:
1348 madd(fn)
1353 madd(fn)
1349 elif (
1354 elif (
1350 time != st[stat.ST_MTIME]
1355 time != st[stat.ST_MTIME]
1351 and time != st[stat.ST_MTIME] & _rangemask
1356 and time != st[stat.ST_MTIME] & _rangemask
1352 ):
1357 ):
1353 ladd(fn)
1358 ladd(fn)
1354 elif st[stat.ST_MTIME] == lastnormaltime:
1359 elif st[stat.ST_MTIME] == lastnormaltime:
1355 # fn may have just been marked as normal and it may have
1360 # fn may have just been marked as normal and it may have
1356 # changed in the same second without changing its size.
1361 # changed in the same second without changing its size.
1357 # This can happen if we quickly do multiple commits.
1362 # This can happen if we quickly do multiple commits.
1358 # Force lookup, so we don't miss such a racy file change.
1363 # Force lookup, so we don't miss such a racy file change.
1359 ladd(fn)
1364 ladd(fn)
1360 elif listclean:
1365 elif listclean:
1361 cadd(fn)
1366 cadd(fn)
1362 elif state == b'm':
1367 elif state == b'm':
1363 madd(fn)
1368 madd(fn)
1364 elif state == b'a':
1369 elif state == b'a':
1365 aadd(fn)
1370 aadd(fn)
1366 elif state == b'r':
1371 elif state == b'r':
1367 radd(fn)
1372 radd(fn)
1368 status = scmutil.status(
1373 status = scmutil.status(
1369 modified, added, removed, deleted, unknown, ignored, clean
1374 modified, added, removed, deleted, unknown, ignored, clean
1370 )
1375 )
1371 return (lookup, status)
1376 return (lookup, status)
1372
1377
1373 def matches(self, match):
1378 def matches(self, match):
1374 """
1379 """
1375 return files in the dirstate (in whatever state) filtered by match
1380 return files in the dirstate (in whatever state) filtered by match
1376 """
1381 """
1377 dmap = self._map
1382 dmap = self._map
1378 if rustmod is not None:
1383 if rustmod is not None:
1379 dmap = self._map._rustmap
1384 dmap = self._map._rustmap
1380
1385
1381 if match.always():
1386 if match.always():
1382 return dmap.keys()
1387 return dmap.keys()
1383 files = match.files()
1388 files = match.files()
1384 if match.isexact():
1389 if match.isexact():
1385 # fast path -- filter the other way around, since typically files is
1390 # fast path -- filter the other way around, since typically files is
1386 # much smaller than dmap
1391 # much smaller than dmap
1387 return [f for f in files if f in dmap]
1392 return [f for f in files if f in dmap]
1388 if match.prefix() and all(fn in dmap for fn in files):
1393 if match.prefix() and all(fn in dmap for fn in files):
1389 # fast path -- all the values are known to be files, so just return
1394 # fast path -- all the values are known to be files, so just return
1390 # that
1395 # that
1391 return list(files)
1396 return list(files)
1392 return [f for f in dmap if match(f)]
1397 return [f for f in dmap if match(f)]
1393
1398
1394 def _actualfilename(self, tr):
1399 def _actualfilename(self, tr):
1395 if tr:
1400 if tr:
1396 return self._pendingfilename
1401 return self._pendingfilename
1397 else:
1402 else:
1398 return self._filename
1403 return self._filename
1399
1404
1400 def savebackup(self, tr, backupname):
1405 def savebackup(self, tr, backupname):
1401 '''Save current dirstate into backup file'''
1406 '''Save current dirstate into backup file'''
1402 filename = self._actualfilename(tr)
1407 filename = self._actualfilename(tr)
1403 assert backupname != filename
1408 assert backupname != filename
1404
1409
1405 # use '_writedirstate' instead of 'write' to write changes certainly,
1410 # use '_writedirstate' instead of 'write' to write changes certainly,
1406 # because the latter omits writing out if transaction is running.
1411 # because the latter omits writing out if transaction is running.
1407 # output file will be used to create backup of dirstate at this point.
1412 # output file will be used to create backup of dirstate at this point.
1408 if self._dirty or not self._opener.exists(filename):
1413 if self._dirty or not self._opener.exists(filename):
1409 self._writedirstate(
1414 self._writedirstate(
1410 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1415 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1411 )
1416 )
1412
1417
1413 if tr:
1418 if tr:
1414 # ensure that subsequent tr.writepending returns True for
1419 # ensure that subsequent tr.writepending returns True for
1415 # changes written out above, even if dirstate is never
1420 # changes written out above, even if dirstate is never
1416 # changed after this
1421 # changed after this
1417 tr.addfilegenerator(
1422 tr.addfilegenerator(
1418 b'dirstate',
1423 b'dirstate',
1419 (self._filename,),
1424 (self._filename,),
1420 self._writedirstate,
1425 self._writedirstate,
1421 location=b'plain',
1426 location=b'plain',
1422 )
1427 )
1423
1428
1424 # ensure that pending file written above is unlinked at
1429 # ensure that pending file written above is unlinked at
1425 # failure, even if tr.writepending isn't invoked until the
1430 # failure, even if tr.writepending isn't invoked until the
1426 # end of this transaction
1431 # end of this transaction
1427 tr.registertmp(filename, location=b'plain')
1432 tr.registertmp(filename, location=b'plain')
1428
1433
1429 self._opener.tryunlink(backupname)
1434 self._opener.tryunlink(backupname)
1430 # hardlink backup is okay because _writedirstate is always called
1435 # hardlink backup is okay because _writedirstate is always called
1431 # with an "atomictemp=True" file.
1436 # with an "atomictemp=True" file.
1432 util.copyfile(
1437 util.copyfile(
1433 self._opener.join(filename),
1438 self._opener.join(filename),
1434 self._opener.join(backupname),
1439 self._opener.join(backupname),
1435 hardlink=True,
1440 hardlink=True,
1436 )
1441 )
1437
1442
1438 def restorebackup(self, tr, backupname):
1443 def restorebackup(self, tr, backupname):
1439 '''Restore dirstate by backup file'''
1444 '''Restore dirstate by backup file'''
1440 # this "invalidate()" prevents "wlock.release()" from writing
1445 # this "invalidate()" prevents "wlock.release()" from writing
1441 # changes of dirstate out after restoring from backup file
1446 # changes of dirstate out after restoring from backup file
1442 self.invalidate()
1447 self.invalidate()
1443 filename = self._actualfilename(tr)
1448 filename = self._actualfilename(tr)
1444 o = self._opener
1449 o = self._opener
1445 if util.samefile(o.join(backupname), o.join(filename)):
1450 if util.samefile(o.join(backupname), o.join(filename)):
1446 o.unlink(backupname)
1451 o.unlink(backupname)
1447 else:
1452 else:
1448 o.rename(backupname, filename, checkambig=True)
1453 o.rename(backupname, filename, checkambig=True)
1449
1454
1450 def clearbackup(self, tr, backupname):
1455 def clearbackup(self, tr, backupname):
1451 '''Clear backup file'''
1456 '''Clear backup file'''
1452 self._opener.unlink(backupname)
1457 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now