##// END OF EJS Templates
dirstate: add a `get_entry` method to the dirstate...
marmoute -
r48897:6a78715e default
parent child Browse files
Show More
@@ -1,1520 +1,1527 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = dirstatemap.DirstateItem
48 DirstateItem = dirstatemap.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._mapcls = dirstatemap.dirstatemap
133 self._mapcls = dirstatemap.dirstatemap
134 # Access and cache cwd early, so we don't access it for the first time
134 # Access and cache cwd early, so we don't access it for the first time
135 # after a working-copy update caused it to not exist (accessing it then
135 # after a working-copy update caused it to not exist (accessing it then
136 # raises an exception).
136 # raises an exception).
137 self._cwd
137 self._cwd
138
138
139 def prefetch_parents(self):
139 def prefetch_parents(self):
140 """make sure the parents are loaded
140 """make sure the parents are loaded
141
141
142 Used to avoid a race condition.
142 Used to avoid a race condition.
143 """
143 """
144 self._pl
144 self._pl
145
145
146 @contextlib.contextmanager
146 @contextlib.contextmanager
147 def parentchange(self):
147 def parentchange(self):
148 """Context manager for handling dirstate parents.
148 """Context manager for handling dirstate parents.
149
149
150 If an exception occurs in the scope of the context manager,
150 If an exception occurs in the scope of the context manager,
151 the incoherent dirstate won't be written when wlock is
151 the incoherent dirstate won't be written when wlock is
152 released.
152 released.
153 """
153 """
154 self._parentwriters += 1
154 self._parentwriters += 1
155 yield
155 yield
156 # Typically we want the "undo" step of a context manager in a
156 # Typically we want the "undo" step of a context manager in a
157 # finally block so it happens even when an exception
157 # finally block so it happens even when an exception
158 # occurs. In this case, however, we only want to decrement
158 # occurs. In this case, however, we only want to decrement
159 # parentwriters if the code in the with statement exits
159 # parentwriters if the code in the with statement exits
160 # normally, so we don't have a try/finally here on purpose.
160 # normally, so we don't have a try/finally here on purpose.
161 self._parentwriters -= 1
161 self._parentwriters -= 1
162
162
163 def pendingparentchange(self):
163 def pendingparentchange(self):
164 """Returns true if the dirstate is in the middle of a set of changes
164 """Returns true if the dirstate is in the middle of a set of changes
165 that modify the dirstate parent.
165 that modify the dirstate parent.
166 """
166 """
167 return self._parentwriters > 0
167 return self._parentwriters > 0
168
168
169 @propertycache
169 @propertycache
170 def _map(self):
170 def _map(self):
171 """Return the dirstate contents (see documentation for dirstatemap)."""
171 """Return the dirstate contents (see documentation for dirstatemap)."""
172 self._map = self._mapcls(
172 self._map = self._mapcls(
173 self._ui,
173 self._ui,
174 self._opener,
174 self._opener,
175 self._root,
175 self._root,
176 self._nodeconstants,
176 self._nodeconstants,
177 self._use_dirstate_v2,
177 self._use_dirstate_v2,
178 )
178 )
179 return self._map
179 return self._map
180
180
181 @property
181 @property
182 def _sparsematcher(self):
182 def _sparsematcher(self):
183 """The matcher for the sparse checkout.
183 """The matcher for the sparse checkout.
184
184
185 The working directory may not include every file from a manifest. The
185 The working directory may not include every file from a manifest. The
186 matcher obtained by this property will match a path if it is to be
186 matcher obtained by this property will match a path if it is to be
187 included in the working directory.
187 included in the working directory.
188 """
188 """
189 # TODO there is potential to cache this property. For now, the matcher
189 # TODO there is potential to cache this property. For now, the matcher
190 # is resolved on every access. (But the called function does use a
190 # is resolved on every access. (But the called function does use a
191 # cache to keep the lookup fast.)
191 # cache to keep the lookup fast.)
192 return self._sparsematchfn()
192 return self._sparsematchfn()
193
193
194 @repocache(b'branch')
194 @repocache(b'branch')
195 def _branch(self):
195 def _branch(self):
196 try:
196 try:
197 return self._opener.read(b"branch").strip() or b"default"
197 return self._opener.read(b"branch").strip() or b"default"
198 except IOError as inst:
198 except IOError as inst:
199 if inst.errno != errno.ENOENT:
199 if inst.errno != errno.ENOENT:
200 raise
200 raise
201 return b"default"
201 return b"default"
202
202
203 @property
203 @property
204 def _pl(self):
204 def _pl(self):
205 return self._map.parents()
205 return self._map.parents()
206
206
207 def hasdir(self, d):
207 def hasdir(self, d):
208 return self._map.hastrackeddir(d)
208 return self._map.hastrackeddir(d)
209
209
210 @rootcache(b'.hgignore')
210 @rootcache(b'.hgignore')
211 def _ignore(self):
211 def _ignore(self):
212 files = self._ignorefiles()
212 files = self._ignorefiles()
213 if not files:
213 if not files:
214 return matchmod.never()
214 return matchmod.never()
215
215
216 pats = [b'include:%s' % f for f in files]
216 pats = [b'include:%s' % f for f in files]
217 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
217 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218
218
219 @propertycache
219 @propertycache
220 def _slash(self):
220 def _slash(self):
221 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
221 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222
222
223 @propertycache
223 @propertycache
224 def _checklink(self):
224 def _checklink(self):
225 return util.checklink(self._root)
225 return util.checklink(self._root)
226
226
227 @propertycache
227 @propertycache
228 def _checkexec(self):
228 def _checkexec(self):
229 return bool(util.checkexec(self._root))
229 return bool(util.checkexec(self._root))
230
230
231 @propertycache
231 @propertycache
232 def _checkcase(self):
232 def _checkcase(self):
233 return not util.fscasesensitive(self._join(b'.hg'))
233 return not util.fscasesensitive(self._join(b'.hg'))
234
234
235 def _join(self, f):
235 def _join(self, f):
236 # much faster than os.path.join()
236 # much faster than os.path.join()
237 # it's safe because f is always a relative path
237 # it's safe because f is always a relative path
238 return self._rootdir + f
238 return self._rootdir + f
239
239
240 def flagfunc(self, buildfallback):
240 def flagfunc(self, buildfallback):
241 if self._checklink and self._checkexec:
241 if self._checklink and self._checkexec:
242
242
243 def f(x):
243 def f(x):
244 try:
244 try:
245 st = os.lstat(self._join(x))
245 st = os.lstat(self._join(x))
246 if util.statislink(st):
246 if util.statislink(st):
247 return b'l'
247 return b'l'
248 if util.statisexec(st):
248 if util.statisexec(st):
249 return b'x'
249 return b'x'
250 except OSError:
250 except OSError:
251 pass
251 pass
252 return b''
252 return b''
253
253
254 return f
254 return f
255
255
256 fallback = buildfallback()
256 fallback = buildfallback()
257 if self._checklink:
257 if self._checklink:
258
258
259 def f(x):
259 def f(x):
260 if os.path.islink(self._join(x)):
260 if os.path.islink(self._join(x)):
261 return b'l'
261 return b'l'
262 if b'x' in fallback(x):
262 if b'x' in fallback(x):
263 return b'x'
263 return b'x'
264 return b''
264 return b''
265
265
266 return f
266 return f
267 if self._checkexec:
267 if self._checkexec:
268
268
269 def f(x):
269 def f(x):
270 if b'l' in fallback(x):
270 if b'l' in fallback(x):
271 return b'l'
271 return b'l'
272 if util.isexec(self._join(x)):
272 if util.isexec(self._join(x)):
273 return b'x'
273 return b'x'
274 return b''
274 return b''
275
275
276 return f
276 return f
277 else:
277 else:
278 return fallback
278 return fallback
279
279
280 @propertycache
280 @propertycache
281 def _cwd(self):
281 def _cwd(self):
282 # internal config: ui.forcecwd
282 # internal config: ui.forcecwd
283 forcecwd = self._ui.config(b'ui', b'forcecwd')
283 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 if forcecwd:
284 if forcecwd:
285 return forcecwd
285 return forcecwd
286 return encoding.getcwd()
286 return encoding.getcwd()
287
287
288 def getcwd(self):
288 def getcwd(self):
289 """Return the path from which a canonical path is calculated.
289 """Return the path from which a canonical path is calculated.
290
290
291 This path should be used to resolve file patterns or to convert
291 This path should be used to resolve file patterns or to convert
292 canonical paths back to file paths for display. It shouldn't be
292 canonical paths back to file paths for display. It shouldn't be
293 used to get real file paths. Use vfs functions instead.
293 used to get real file paths. Use vfs functions instead.
294 """
294 """
295 cwd = self._cwd
295 cwd = self._cwd
296 if cwd == self._root:
296 if cwd == self._root:
297 return b''
297 return b''
298 # self._root ends with a path separator if self._root is '/' or 'C:\'
298 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 rootsep = self._root
299 rootsep = self._root
300 if not util.endswithsep(rootsep):
300 if not util.endswithsep(rootsep):
301 rootsep += pycompat.ossep
301 rootsep += pycompat.ossep
302 if cwd.startswith(rootsep):
302 if cwd.startswith(rootsep):
303 return cwd[len(rootsep) :]
303 return cwd[len(rootsep) :]
304 else:
304 else:
305 # we're outside the repo. return an absolute path.
305 # we're outside the repo. return an absolute path.
306 return cwd
306 return cwd
307
307
308 def pathto(self, f, cwd=None):
308 def pathto(self, f, cwd=None):
309 if cwd is None:
309 if cwd is None:
310 cwd = self.getcwd()
310 cwd = self.getcwd()
311 path = util.pathto(self._root, cwd, f)
311 path = util.pathto(self._root, cwd, f)
312 if self._slash:
312 if self._slash:
313 return util.pconvert(path)
313 return util.pconvert(path)
314 return path
314 return path
315
315
316 def __getitem__(self, key):
316 def __getitem__(self, key):
317 """Return the current state of key (a filename) in the dirstate.
317 """Return the current state of key (a filename) in the dirstate.
318
318
319 States are:
319 States are:
320 n normal
320 n normal
321 m needs merging
321 m needs merging
322 r marked for removal
322 r marked for removal
323 a marked for addition
323 a marked for addition
324 ? not tracked
324 ? not tracked
325
325
326 XXX The "state" is a bit obscure to be in the "public" API. we should
326 XXX The "state" is a bit obscure to be in the "public" API. we should
327 consider migrating all user of this to going through the dirstate entry
327 consider migrating all user of this to going through the dirstate entry
328 instead.
328 instead.
329 """
329 """
330 entry = self._map.get(key)
330 entry = self._map.get(key)
331 if entry is not None:
331 if entry is not None:
332 return entry.state
332 return entry.state
333 return b'?'
333 return b'?'
334
334
335 def get_entry(self, path):
336 """return a DirstateItem for the associated path"""
337 entry = self._map.get(path)
338 if entry is None:
339 return DirstateItem()
340 return entry
341
335 def __contains__(self, key):
342 def __contains__(self, key):
336 return key in self._map
343 return key in self._map
337
344
338 def __iter__(self):
345 def __iter__(self):
339 return iter(sorted(self._map))
346 return iter(sorted(self._map))
340
347
341 def items(self):
348 def items(self):
342 return pycompat.iteritems(self._map)
349 return pycompat.iteritems(self._map)
343
350
344 iteritems = items
351 iteritems = items
345
352
346 def parents(self):
353 def parents(self):
347 return [self._validate(p) for p in self._pl]
354 return [self._validate(p) for p in self._pl]
348
355
349 def p1(self):
356 def p1(self):
350 return self._validate(self._pl[0])
357 return self._validate(self._pl[0])
351
358
352 def p2(self):
359 def p2(self):
353 return self._validate(self._pl[1])
360 return self._validate(self._pl[1])
354
361
355 @property
362 @property
356 def in_merge(self):
363 def in_merge(self):
357 """True if a merge is in progress"""
364 """True if a merge is in progress"""
358 return self._pl[1] != self._nodeconstants.nullid
365 return self._pl[1] != self._nodeconstants.nullid
359
366
360 def branch(self):
367 def branch(self):
361 return encoding.tolocal(self._branch)
368 return encoding.tolocal(self._branch)
362
369
363 def setparents(self, p1, p2=None):
370 def setparents(self, p1, p2=None):
364 """Set dirstate parents to p1 and p2.
371 """Set dirstate parents to p1 and p2.
365
372
366 When moving from two parents to one, "merged" entries a
373 When moving from two parents to one, "merged" entries a
367 adjusted to normal and previous copy records discarded and
374 adjusted to normal and previous copy records discarded and
368 returned by the call.
375 returned by the call.
369
376
370 See localrepo.setparents()
377 See localrepo.setparents()
371 """
378 """
372 if p2 is None:
379 if p2 is None:
373 p2 = self._nodeconstants.nullid
380 p2 = self._nodeconstants.nullid
374 if self._parentwriters == 0:
381 if self._parentwriters == 0:
375 raise ValueError(
382 raise ValueError(
376 b"cannot set dirstate parent outside of "
383 b"cannot set dirstate parent outside of "
377 b"dirstate.parentchange context manager"
384 b"dirstate.parentchange context manager"
378 )
385 )
379
386
380 self._dirty = True
387 self._dirty = True
381 oldp2 = self._pl[1]
388 oldp2 = self._pl[1]
382 if self._origpl is None:
389 if self._origpl is None:
383 self._origpl = self._pl
390 self._origpl = self._pl
384 nullid = self._nodeconstants.nullid
391 nullid = self._nodeconstants.nullid
385 # True if we need to fold p2 related state back to a linear case
392 # True if we need to fold p2 related state back to a linear case
386 fold_p2 = oldp2 != nullid and p2 == nullid
393 fold_p2 = oldp2 != nullid and p2 == nullid
387 return self._map.setparents(p1, p2, fold_p2=fold_p2)
394 return self._map.setparents(p1, p2, fold_p2=fold_p2)
388
395
389 def setbranch(self, branch):
396 def setbranch(self, branch):
390 self.__class__._branch.set(self, encoding.fromlocal(branch))
397 self.__class__._branch.set(self, encoding.fromlocal(branch))
391 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
398 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
392 try:
399 try:
393 f.write(self._branch + b'\n')
400 f.write(self._branch + b'\n')
394 f.close()
401 f.close()
395
402
396 # make sure filecache has the correct stat info for _branch after
403 # make sure filecache has the correct stat info for _branch after
397 # replacing the underlying file
404 # replacing the underlying file
398 ce = self._filecache[b'_branch']
405 ce = self._filecache[b'_branch']
399 if ce:
406 if ce:
400 ce.refresh()
407 ce.refresh()
401 except: # re-raises
408 except: # re-raises
402 f.discard()
409 f.discard()
403 raise
410 raise
404
411
405 def invalidate(self):
412 def invalidate(self):
406 """Causes the next access to reread the dirstate.
413 """Causes the next access to reread the dirstate.
407
414
408 This is different from localrepo.invalidatedirstate() because it always
415 This is different from localrepo.invalidatedirstate() because it always
409 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
416 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
410 check whether the dirstate has changed before rereading it."""
417 check whether the dirstate has changed before rereading it."""
411
418
412 for a in ("_map", "_branch", "_ignore"):
419 for a in ("_map", "_branch", "_ignore"):
413 if a in self.__dict__:
420 if a in self.__dict__:
414 delattr(self, a)
421 delattr(self, a)
415 self._lastnormaltime = 0
422 self._lastnormaltime = 0
416 self._dirty = False
423 self._dirty = False
417 self._parentwriters = 0
424 self._parentwriters = 0
418 self._origpl = None
425 self._origpl = None
419
426
420 def copy(self, source, dest):
427 def copy(self, source, dest):
421 """Mark dest as a copy of source. Unmark dest if source is None."""
428 """Mark dest as a copy of source. Unmark dest if source is None."""
422 if source == dest:
429 if source == dest:
423 return
430 return
424 self._dirty = True
431 self._dirty = True
425 if source is not None:
432 if source is not None:
426 self._map.copymap[dest] = source
433 self._map.copymap[dest] = source
427 else:
434 else:
428 self._map.copymap.pop(dest, None)
435 self._map.copymap.pop(dest, None)
429
436
430 def copied(self, file):
437 def copied(self, file):
431 return self._map.copymap.get(file, None)
438 return self._map.copymap.get(file, None)
432
439
433 def copies(self):
440 def copies(self):
434 return self._map.copymap
441 return self._map.copymap
435
442
436 @requires_no_parents_change
443 @requires_no_parents_change
437 def set_tracked(self, filename):
444 def set_tracked(self, filename):
438 """a "public" method for generic code to mark a file as tracked
445 """a "public" method for generic code to mark a file as tracked
439
446
440 This function is to be called outside of "update/merge" case. For
447 This function is to be called outside of "update/merge" case. For
441 example by a command like `hg add X`.
448 example by a command like `hg add X`.
442
449
443 return True the file was previously untracked, False otherwise.
450 return True the file was previously untracked, False otherwise.
444 """
451 """
445 self._dirty = True
452 self._dirty = True
446 entry = self._map.get(filename)
453 entry = self._map.get(filename)
447 if entry is None or not entry.tracked:
454 if entry is None or not entry.tracked:
448 self._check_new_tracked_filename(filename)
455 self._check_new_tracked_filename(filename)
449 return self._map.set_tracked(filename)
456 return self._map.set_tracked(filename)
450
457
451 @requires_no_parents_change
458 @requires_no_parents_change
452 def set_untracked(self, filename):
459 def set_untracked(self, filename):
453 """a "public" method for generic code to mark a file as untracked
460 """a "public" method for generic code to mark a file as untracked
454
461
455 This function is to be called outside of "update/merge" case. For
462 This function is to be called outside of "update/merge" case. For
456 example by a command like `hg remove X`.
463 example by a command like `hg remove X`.
457
464
458 return True the file was previously tracked, False otherwise.
465 return True the file was previously tracked, False otherwise.
459 """
466 """
460 ret = self._map.set_untracked(filename)
467 ret = self._map.set_untracked(filename)
461 if ret:
468 if ret:
462 self._dirty = True
469 self._dirty = True
463 return ret
470 return ret
464
471
465 @requires_no_parents_change
472 @requires_no_parents_change
466 def set_clean(self, filename, parentfiledata=None):
473 def set_clean(self, filename, parentfiledata=None):
467 """record that the current state of the file on disk is known to be clean"""
474 """record that the current state of the file on disk is known to be clean"""
468 self._dirty = True
475 self._dirty = True
469 if parentfiledata:
476 if parentfiledata:
470 (mode, size, mtime) = parentfiledata
477 (mode, size, mtime) = parentfiledata
471 else:
478 else:
472 (mode, size, mtime) = self._get_filedata(filename)
479 (mode, size, mtime) = self._get_filedata(filename)
473 if not self._map[filename].tracked:
480 if not self._map[filename].tracked:
474 self._check_new_tracked_filename(filename)
481 self._check_new_tracked_filename(filename)
475 self._map.set_clean(filename, mode, size, mtime)
482 self._map.set_clean(filename, mode, size, mtime)
476 if mtime > self._lastnormaltime:
483 if mtime > self._lastnormaltime:
477 # Remember the most recent modification timeslot for status(),
484 # Remember the most recent modification timeslot for status(),
478 # to make sure we won't miss future size-preserving file content
485 # to make sure we won't miss future size-preserving file content
479 # modifications that happen within the same timeslot.
486 # modifications that happen within the same timeslot.
480 self._lastnormaltime = mtime
487 self._lastnormaltime = mtime
481
488
482 @requires_no_parents_change
489 @requires_no_parents_change
483 def set_possibly_dirty(self, filename):
490 def set_possibly_dirty(self, filename):
484 """record that the current state of the file on disk is unknown"""
491 """record that the current state of the file on disk is unknown"""
485 self._dirty = True
492 self._dirty = True
486 self._map.set_possibly_dirty(filename)
493 self._map.set_possibly_dirty(filename)
487
494
488 @requires_parents_change
495 @requires_parents_change
489 def update_file_p1(
496 def update_file_p1(
490 self,
497 self,
491 filename,
498 filename,
492 p1_tracked,
499 p1_tracked,
493 ):
500 ):
494 """Set a file as tracked in the parent (or not)
501 """Set a file as tracked in the parent (or not)
495
502
496 This is to be called when adjust the dirstate to a new parent after an history
503 This is to be called when adjust the dirstate to a new parent after an history
497 rewriting operation.
504 rewriting operation.
498
505
499 It should not be called during a merge (p2 != nullid) and only within
506 It should not be called during a merge (p2 != nullid) and only within
500 a `with dirstate.parentchange():` context.
507 a `with dirstate.parentchange():` context.
501 """
508 """
502 if self.in_merge:
509 if self.in_merge:
503 msg = b'update_file_reference should not be called when merging'
510 msg = b'update_file_reference should not be called when merging'
504 raise error.ProgrammingError(msg)
511 raise error.ProgrammingError(msg)
505 entry = self._map.get(filename)
512 entry = self._map.get(filename)
506 if entry is None:
513 if entry is None:
507 wc_tracked = False
514 wc_tracked = False
508 else:
515 else:
509 wc_tracked = entry.tracked
516 wc_tracked = entry.tracked
510 possibly_dirty = False
517 possibly_dirty = False
511 if p1_tracked and wc_tracked:
518 if p1_tracked and wc_tracked:
512 # the underlying reference might have changed, we will have to
519 # the underlying reference might have changed, we will have to
513 # check it.
520 # check it.
514 possibly_dirty = True
521 possibly_dirty = True
515 elif not (p1_tracked or wc_tracked):
522 elif not (p1_tracked or wc_tracked):
516 # the file is no longer relevant to anyone
523 # the file is no longer relevant to anyone
517 if self._map.get(filename) is not None:
524 if self._map.get(filename) is not None:
518 self._map.reset_state(filename)
525 self._map.reset_state(filename)
519 self._dirty = True
526 self._dirty = True
520 elif (not p1_tracked) and wc_tracked:
527 elif (not p1_tracked) and wc_tracked:
521 if entry is not None and entry.added:
528 if entry is not None and entry.added:
522 return # avoid dropping copy information (maybe?)
529 return # avoid dropping copy information (maybe?)
523 elif p1_tracked and not wc_tracked:
530 elif p1_tracked and not wc_tracked:
524 pass
531 pass
525 else:
532 else:
526 assert False, 'unreachable'
533 assert False, 'unreachable'
527
534
528 # this mean we are doing call for file we do not really care about the
535 # this mean we are doing call for file we do not really care about the
529 # data (eg: added or removed), however this should be a minor overhead
536 # data (eg: added or removed), however this should be a minor overhead
530 # compared to the overall update process calling this.
537 # compared to the overall update process calling this.
531 parentfiledata = None
538 parentfiledata = None
532 if wc_tracked:
539 if wc_tracked:
533 parentfiledata = self._get_filedata(filename)
540 parentfiledata = self._get_filedata(filename)
534
541
535 self._map.reset_state(
542 self._map.reset_state(
536 filename,
543 filename,
537 wc_tracked,
544 wc_tracked,
538 p1_tracked,
545 p1_tracked,
539 possibly_dirty=possibly_dirty,
546 possibly_dirty=possibly_dirty,
540 parentfiledata=parentfiledata,
547 parentfiledata=parentfiledata,
541 )
548 )
542 if (
549 if (
543 parentfiledata is not None
550 parentfiledata is not None
544 and parentfiledata[2] > self._lastnormaltime
551 and parentfiledata[2] > self._lastnormaltime
545 ):
552 ):
546 # Remember the most recent modification timeslot for status(),
553 # Remember the most recent modification timeslot for status(),
547 # to make sure we won't miss future size-preserving file content
554 # to make sure we won't miss future size-preserving file content
548 # modifications that happen within the same timeslot.
555 # modifications that happen within the same timeslot.
549 self._lastnormaltime = parentfiledata[2]
556 self._lastnormaltime = parentfiledata[2]
550
557
551 @requires_parents_change
558 @requires_parents_change
552 def update_file(
559 def update_file(
553 self,
560 self,
554 filename,
561 filename,
555 wc_tracked,
562 wc_tracked,
556 p1_tracked,
563 p1_tracked,
557 p2_tracked=False,
564 p2_tracked=False,
558 merged=False,
565 merged=False,
559 clean_p1=False,
566 clean_p1=False,
560 clean_p2=False,
567 clean_p2=False,
561 possibly_dirty=False,
568 possibly_dirty=False,
562 parentfiledata=None,
569 parentfiledata=None,
563 ):
570 ):
564 """update the information about a file in the dirstate
571 """update the information about a file in the dirstate
565
572
566 This is to be called when the direstates parent changes to keep track
573 This is to be called when the direstates parent changes to keep track
567 of what is the file situation in regards to the working copy and its parent.
574 of what is the file situation in regards to the working copy and its parent.
568
575
569 This function must be called within a `dirstate.parentchange` context.
576 This function must be called within a `dirstate.parentchange` context.
570
577
571 note: the API is at an early stage and we might need to adjust it
578 note: the API is at an early stage and we might need to adjust it
572 depending of what information ends up being relevant and useful to
579 depending of what information ends up being relevant and useful to
573 other processing.
580 other processing.
574 """
581 """
575 if merged and (clean_p1 or clean_p2):
582 if merged and (clean_p1 or clean_p2):
576 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
583 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
577 raise error.ProgrammingError(msg)
584 raise error.ProgrammingError(msg)
578
585
579 # note: I do not think we need to double check name clash here since we
586 # note: I do not think we need to double check name clash here since we
580 # are in a update/merge case that should already have taken care of
587 # are in a update/merge case that should already have taken care of
581 # this. The test agrees
588 # this. The test agrees
582
589
583 self._dirty = True
590 self._dirty = True
584
591
585 need_parent_file_data = (
592 need_parent_file_data = (
586 not (possibly_dirty or clean_p2 or merged)
593 not (possibly_dirty or clean_p2 or merged)
587 and wc_tracked
594 and wc_tracked
588 and p1_tracked
595 and p1_tracked
589 )
596 )
590
597
591 # this mean we are doing call for file we do not really care about the
598 # this mean we are doing call for file we do not really care about the
592 # data (eg: added or removed), however this should be a minor overhead
599 # data (eg: added or removed), however this should be a minor overhead
593 # compared to the overall update process calling this.
600 # compared to the overall update process calling this.
594 if need_parent_file_data:
601 if need_parent_file_data:
595 if parentfiledata is None:
602 if parentfiledata is None:
596 parentfiledata = self._get_filedata(filename)
603 parentfiledata = self._get_filedata(filename)
597 mtime = parentfiledata[2]
604 mtime = parentfiledata[2]
598
605
599 if mtime > self._lastnormaltime:
606 if mtime > self._lastnormaltime:
600 # Remember the most recent modification timeslot for
607 # Remember the most recent modification timeslot for
601 # status(), to make sure we won't miss future
608 # status(), to make sure we won't miss future
602 # size-preserving file content modifications that happen
609 # size-preserving file content modifications that happen
603 # within the same timeslot.
610 # within the same timeslot.
604 self._lastnormaltime = mtime
611 self._lastnormaltime = mtime
605
612
606 self._map.reset_state(
613 self._map.reset_state(
607 filename,
614 filename,
608 wc_tracked,
615 wc_tracked,
609 p1_tracked,
616 p1_tracked,
610 p2_tracked=p2_tracked,
617 p2_tracked=p2_tracked,
611 merged=merged,
618 merged=merged,
612 clean_p1=clean_p1,
619 clean_p1=clean_p1,
613 clean_p2=clean_p2,
620 clean_p2=clean_p2,
614 possibly_dirty=possibly_dirty,
621 possibly_dirty=possibly_dirty,
615 parentfiledata=parentfiledata,
622 parentfiledata=parentfiledata,
616 )
623 )
617 if (
624 if (
618 parentfiledata is not None
625 parentfiledata is not None
619 and parentfiledata[2] > self._lastnormaltime
626 and parentfiledata[2] > self._lastnormaltime
620 ):
627 ):
621 # Remember the most recent modification timeslot for status(),
628 # Remember the most recent modification timeslot for status(),
622 # to make sure we won't miss future size-preserving file content
629 # to make sure we won't miss future size-preserving file content
623 # modifications that happen within the same timeslot.
630 # modifications that happen within the same timeslot.
624 self._lastnormaltime = parentfiledata[2]
631 self._lastnormaltime = parentfiledata[2]
625
632
626 def _check_new_tracked_filename(self, filename):
633 def _check_new_tracked_filename(self, filename):
627 scmutil.checkfilename(filename)
634 scmutil.checkfilename(filename)
628 if self._map.hastrackeddir(filename):
635 if self._map.hastrackeddir(filename):
629 msg = _(b'directory %r already in dirstate')
636 msg = _(b'directory %r already in dirstate')
630 msg %= pycompat.bytestr(filename)
637 msg %= pycompat.bytestr(filename)
631 raise error.Abort(msg)
638 raise error.Abort(msg)
632 # shadows
639 # shadows
633 for d in pathutil.finddirs(filename):
640 for d in pathutil.finddirs(filename):
634 if self._map.hastrackeddir(d):
641 if self._map.hastrackeddir(d):
635 break
642 break
636 entry = self._map.get(d)
643 entry = self._map.get(d)
637 if entry is not None and not entry.removed:
644 if entry is not None and not entry.removed:
638 msg = _(b'file %r in dirstate clashes with %r')
645 msg = _(b'file %r in dirstate clashes with %r')
639 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
646 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
640 raise error.Abort(msg)
647 raise error.Abort(msg)
641
648
642 def _get_filedata(self, filename):
649 def _get_filedata(self, filename):
643 """returns"""
650 """returns"""
644 s = os.lstat(self._join(filename))
651 s = os.lstat(self._join(filename))
645 mode = s.st_mode
652 mode = s.st_mode
646 size = s.st_size
653 size = s.st_size
647 mtime = s[stat.ST_MTIME]
654 mtime = s[stat.ST_MTIME]
648 return (mode, size, mtime)
655 return (mode, size, mtime)
649
656
650 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
657 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
651 if exists is None:
658 if exists is None:
652 exists = os.path.lexists(os.path.join(self._root, path))
659 exists = os.path.lexists(os.path.join(self._root, path))
653 if not exists:
660 if not exists:
654 # Maybe a path component exists
661 # Maybe a path component exists
655 if not ignoremissing and b'/' in path:
662 if not ignoremissing and b'/' in path:
656 d, f = path.rsplit(b'/', 1)
663 d, f = path.rsplit(b'/', 1)
657 d = self._normalize(d, False, ignoremissing, None)
664 d = self._normalize(d, False, ignoremissing, None)
658 folded = d + b"/" + f
665 folded = d + b"/" + f
659 else:
666 else:
660 # No path components, preserve original case
667 # No path components, preserve original case
661 folded = path
668 folded = path
662 else:
669 else:
663 # recursively normalize leading directory components
670 # recursively normalize leading directory components
664 # against dirstate
671 # against dirstate
665 if b'/' in normed:
672 if b'/' in normed:
666 d, f = normed.rsplit(b'/', 1)
673 d, f = normed.rsplit(b'/', 1)
667 d = self._normalize(d, False, ignoremissing, True)
674 d = self._normalize(d, False, ignoremissing, True)
668 r = self._root + b"/" + d
675 r = self._root + b"/" + d
669 folded = d + b"/" + util.fspath(f, r)
676 folded = d + b"/" + util.fspath(f, r)
670 else:
677 else:
671 folded = util.fspath(normed, self._root)
678 folded = util.fspath(normed, self._root)
672 storemap[normed] = folded
679 storemap[normed] = folded
673
680
674 return folded
681 return folded
675
682
676 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
683 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
677 normed = util.normcase(path)
684 normed = util.normcase(path)
678 folded = self._map.filefoldmap.get(normed, None)
685 folded = self._map.filefoldmap.get(normed, None)
679 if folded is None:
686 if folded is None:
680 if isknown:
687 if isknown:
681 folded = path
688 folded = path
682 else:
689 else:
683 folded = self._discoverpath(
690 folded = self._discoverpath(
684 path, normed, ignoremissing, exists, self._map.filefoldmap
691 path, normed, ignoremissing, exists, self._map.filefoldmap
685 )
692 )
686 return folded
693 return folded
687
694
688 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
695 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
689 normed = util.normcase(path)
696 normed = util.normcase(path)
690 folded = self._map.filefoldmap.get(normed, None)
697 folded = self._map.filefoldmap.get(normed, None)
691 if folded is None:
698 if folded is None:
692 folded = self._map.dirfoldmap.get(normed, None)
699 folded = self._map.dirfoldmap.get(normed, None)
693 if folded is None:
700 if folded is None:
694 if isknown:
701 if isknown:
695 folded = path
702 folded = path
696 else:
703 else:
697 # store discovered result in dirfoldmap so that future
704 # store discovered result in dirfoldmap so that future
698 # normalizefile calls don't start matching directories
705 # normalizefile calls don't start matching directories
699 folded = self._discoverpath(
706 folded = self._discoverpath(
700 path, normed, ignoremissing, exists, self._map.dirfoldmap
707 path, normed, ignoremissing, exists, self._map.dirfoldmap
701 )
708 )
702 return folded
709 return folded
703
710
704 def normalize(self, path, isknown=False, ignoremissing=False):
711 def normalize(self, path, isknown=False, ignoremissing=False):
705 """
712 """
706 normalize the case of a pathname when on a casefolding filesystem
713 normalize the case of a pathname when on a casefolding filesystem
707
714
708 isknown specifies whether the filename came from walking the
715 isknown specifies whether the filename came from walking the
709 disk, to avoid extra filesystem access.
716 disk, to avoid extra filesystem access.
710
717
711 If ignoremissing is True, missing path are returned
718 If ignoremissing is True, missing path are returned
712 unchanged. Otherwise, we try harder to normalize possibly
719 unchanged. Otherwise, we try harder to normalize possibly
713 existing path components.
720 existing path components.
714
721
715 The normalized case is determined based on the following precedence:
722 The normalized case is determined based on the following precedence:
716
723
717 - version of name already stored in the dirstate
724 - version of name already stored in the dirstate
718 - version of name stored on disk
725 - version of name stored on disk
719 - version provided via command arguments
726 - version provided via command arguments
720 """
727 """
721
728
722 if self._checkcase:
729 if self._checkcase:
723 return self._normalize(path, isknown, ignoremissing)
730 return self._normalize(path, isknown, ignoremissing)
724 return path
731 return path
725
732
726 def clear(self):
733 def clear(self):
727 self._map.clear()
734 self._map.clear()
728 self._lastnormaltime = 0
735 self._lastnormaltime = 0
729 self._dirty = True
736 self._dirty = True
730
737
731 def rebuild(self, parent, allfiles, changedfiles=None):
738 def rebuild(self, parent, allfiles, changedfiles=None):
732 if changedfiles is None:
739 if changedfiles is None:
733 # Rebuild entire dirstate
740 # Rebuild entire dirstate
734 to_lookup = allfiles
741 to_lookup = allfiles
735 to_drop = []
742 to_drop = []
736 lastnormaltime = self._lastnormaltime
743 lastnormaltime = self._lastnormaltime
737 self.clear()
744 self.clear()
738 self._lastnormaltime = lastnormaltime
745 self._lastnormaltime = lastnormaltime
739 elif len(changedfiles) < 10:
746 elif len(changedfiles) < 10:
740 # Avoid turning allfiles into a set, which can be expensive if it's
747 # Avoid turning allfiles into a set, which can be expensive if it's
741 # large.
748 # large.
742 to_lookup = []
749 to_lookup = []
743 to_drop = []
750 to_drop = []
744 for f in changedfiles:
751 for f in changedfiles:
745 if f in allfiles:
752 if f in allfiles:
746 to_lookup.append(f)
753 to_lookup.append(f)
747 else:
754 else:
748 to_drop.append(f)
755 to_drop.append(f)
749 else:
756 else:
750 changedfilesset = set(changedfiles)
757 changedfilesset = set(changedfiles)
751 to_lookup = changedfilesset & set(allfiles)
758 to_lookup = changedfilesset & set(allfiles)
752 to_drop = changedfilesset - to_lookup
759 to_drop = changedfilesset - to_lookup
753
760
754 if self._origpl is None:
761 if self._origpl is None:
755 self._origpl = self._pl
762 self._origpl = self._pl
756 self._map.setparents(parent, self._nodeconstants.nullid)
763 self._map.setparents(parent, self._nodeconstants.nullid)
757
764
758 for f in to_lookup:
765 for f in to_lookup:
759
766
760 if self.in_merge:
767 if self.in_merge:
761 self.set_tracked(f)
768 self.set_tracked(f)
762 else:
769 else:
763 self._map.reset_state(
770 self._map.reset_state(
764 f,
771 f,
765 wc_tracked=True,
772 wc_tracked=True,
766 p1_tracked=True,
773 p1_tracked=True,
767 possibly_dirty=True,
774 possibly_dirty=True,
768 )
775 )
769 for f in to_drop:
776 for f in to_drop:
770 self._map.reset_state(f)
777 self._map.reset_state(f)
771
778
772 self._dirty = True
779 self._dirty = True
773
780
774 def identity(self):
781 def identity(self):
775 """Return identity of dirstate itself to detect changing in storage
782 """Return identity of dirstate itself to detect changing in storage
776
783
777 If identity of previous dirstate is equal to this, writing
784 If identity of previous dirstate is equal to this, writing
778 changes based on the former dirstate out can keep consistency.
785 changes based on the former dirstate out can keep consistency.
779 """
786 """
780 return self._map.identity
787 return self._map.identity
781
788
782 def write(self, tr):
789 def write(self, tr):
783 if not self._dirty:
790 if not self._dirty:
784 return
791 return
785
792
786 filename = self._filename
793 filename = self._filename
787 if tr:
794 if tr:
788 # 'dirstate.write()' is not only for writing in-memory
795 # 'dirstate.write()' is not only for writing in-memory
789 # changes out, but also for dropping ambiguous timestamp.
796 # changes out, but also for dropping ambiguous timestamp.
790 # delayed writing re-raise "ambiguous timestamp issue".
797 # delayed writing re-raise "ambiguous timestamp issue".
791 # See also the wiki page below for detail:
798 # See also the wiki page below for detail:
792 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
799 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
793
800
794 # record when mtime start to be ambiguous
801 # record when mtime start to be ambiguous
795 now = _getfsnow(self._opener)
802 now = _getfsnow(self._opener)
796
803
797 # delay writing in-memory changes out
804 # delay writing in-memory changes out
798 tr.addfilegenerator(
805 tr.addfilegenerator(
799 b'dirstate',
806 b'dirstate',
800 (self._filename,),
807 (self._filename,),
801 lambda f: self._writedirstate(tr, f, now=now),
808 lambda f: self._writedirstate(tr, f, now=now),
802 location=b'plain',
809 location=b'plain',
803 )
810 )
804 return
811 return
805
812
806 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
813 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
807 self._writedirstate(tr, st)
814 self._writedirstate(tr, st)
808
815
809 def addparentchangecallback(self, category, callback):
816 def addparentchangecallback(self, category, callback):
810 """add a callback to be called when the wd parents are changed
817 """add a callback to be called when the wd parents are changed
811
818
812 Callback will be called with the following arguments:
819 Callback will be called with the following arguments:
813 dirstate, (oldp1, oldp2), (newp1, newp2)
820 dirstate, (oldp1, oldp2), (newp1, newp2)
814
821
815 Category is a unique identifier to allow overwriting an old callback
822 Category is a unique identifier to allow overwriting an old callback
816 with a newer callback.
823 with a newer callback.
817 """
824 """
818 self._plchangecallbacks[category] = callback
825 self._plchangecallbacks[category] = callback
819
826
820 def _writedirstate(self, tr, st, now=None):
827 def _writedirstate(self, tr, st, now=None):
821 # notify callbacks about parents change
828 # notify callbacks about parents change
822 if self._origpl is not None and self._origpl != self._pl:
829 if self._origpl is not None and self._origpl != self._pl:
823 for c, callback in sorted(
830 for c, callback in sorted(
824 pycompat.iteritems(self._plchangecallbacks)
831 pycompat.iteritems(self._plchangecallbacks)
825 ):
832 ):
826 callback(self, self._origpl, self._pl)
833 callback(self, self._origpl, self._pl)
827 self._origpl = None
834 self._origpl = None
828
835
829 if now is None:
836 if now is None:
830 # use the modification time of the newly created temporary file as the
837 # use the modification time of the newly created temporary file as the
831 # filesystem's notion of 'now'
838 # filesystem's notion of 'now'
832 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
839 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
833
840
834 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
841 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
835 # timestamp of each entries in dirstate, because of 'now > mtime'
842 # timestamp of each entries in dirstate, because of 'now > mtime'
836 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
843 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
837 if delaywrite > 0:
844 if delaywrite > 0:
838 # do we have any files to delay for?
845 # do we have any files to delay for?
839 for f, e in pycompat.iteritems(self._map):
846 for f, e in pycompat.iteritems(self._map):
840 if e.need_delay(now):
847 if e.need_delay(now):
841 import time # to avoid useless import
848 import time # to avoid useless import
842
849
843 # rather than sleep n seconds, sleep until the next
850 # rather than sleep n seconds, sleep until the next
844 # multiple of n seconds
851 # multiple of n seconds
845 clock = time.time()
852 clock = time.time()
846 start = int(clock) - (int(clock) % delaywrite)
853 start = int(clock) - (int(clock) % delaywrite)
847 end = start + delaywrite
854 end = start + delaywrite
848 time.sleep(end - clock)
855 time.sleep(end - clock)
849 now = end # trust our estimate that the end is near now
856 now = end # trust our estimate that the end is near now
850 break
857 break
851
858
852 self._map.write(tr, st, now)
859 self._map.write(tr, st, now)
853 self._lastnormaltime = 0
860 self._lastnormaltime = 0
854 self._dirty = False
861 self._dirty = False
855
862
856 def _dirignore(self, f):
863 def _dirignore(self, f):
857 if self._ignore(f):
864 if self._ignore(f):
858 return True
865 return True
859 for p in pathutil.finddirs(f):
866 for p in pathutil.finddirs(f):
860 if self._ignore(p):
867 if self._ignore(p):
861 return True
868 return True
862 return False
869 return False
863
870
864 def _ignorefiles(self):
871 def _ignorefiles(self):
865 files = []
872 files = []
866 if os.path.exists(self._join(b'.hgignore')):
873 if os.path.exists(self._join(b'.hgignore')):
867 files.append(self._join(b'.hgignore'))
874 files.append(self._join(b'.hgignore'))
868 for name, path in self._ui.configitems(b"ui"):
875 for name, path in self._ui.configitems(b"ui"):
869 if name == b'ignore' or name.startswith(b'ignore.'):
876 if name == b'ignore' or name.startswith(b'ignore.'):
870 # we need to use os.path.join here rather than self._join
877 # we need to use os.path.join here rather than self._join
871 # because path is arbitrary and user-specified
878 # because path is arbitrary and user-specified
872 files.append(os.path.join(self._rootdir, util.expandpath(path)))
879 files.append(os.path.join(self._rootdir, util.expandpath(path)))
873 return files
880 return files
874
881
875 def _ignorefileandline(self, f):
882 def _ignorefileandline(self, f):
876 files = collections.deque(self._ignorefiles())
883 files = collections.deque(self._ignorefiles())
877 visited = set()
884 visited = set()
878 while files:
885 while files:
879 i = files.popleft()
886 i = files.popleft()
880 patterns = matchmod.readpatternfile(
887 patterns = matchmod.readpatternfile(
881 i, self._ui.warn, sourceinfo=True
888 i, self._ui.warn, sourceinfo=True
882 )
889 )
883 for pattern, lineno, line in patterns:
890 for pattern, lineno, line in patterns:
884 kind, p = matchmod._patsplit(pattern, b'glob')
891 kind, p = matchmod._patsplit(pattern, b'glob')
885 if kind == b"subinclude":
892 if kind == b"subinclude":
886 if p not in visited:
893 if p not in visited:
887 files.append(p)
894 files.append(p)
888 continue
895 continue
889 m = matchmod.match(
896 m = matchmod.match(
890 self._root, b'', [], [pattern], warn=self._ui.warn
897 self._root, b'', [], [pattern], warn=self._ui.warn
891 )
898 )
892 if m(f):
899 if m(f):
893 return (i, lineno, line)
900 return (i, lineno, line)
894 visited.add(i)
901 visited.add(i)
895 return (None, -1, b"")
902 return (None, -1, b"")
896
903
897 def _walkexplicit(self, match, subrepos):
904 def _walkexplicit(self, match, subrepos):
898 """Get stat data about the files explicitly specified by match.
905 """Get stat data about the files explicitly specified by match.
899
906
900 Return a triple (results, dirsfound, dirsnotfound).
907 Return a triple (results, dirsfound, dirsnotfound).
901 - results is a mapping from filename to stat result. It also contains
908 - results is a mapping from filename to stat result. It also contains
902 listings mapping subrepos and .hg to None.
909 listings mapping subrepos and .hg to None.
903 - dirsfound is a list of files found to be directories.
910 - dirsfound is a list of files found to be directories.
904 - dirsnotfound is a list of files that the dirstate thinks are
911 - dirsnotfound is a list of files that the dirstate thinks are
905 directories and that were not found."""
912 directories and that were not found."""
906
913
907 def badtype(mode):
914 def badtype(mode):
908 kind = _(b'unknown')
915 kind = _(b'unknown')
909 if stat.S_ISCHR(mode):
916 if stat.S_ISCHR(mode):
910 kind = _(b'character device')
917 kind = _(b'character device')
911 elif stat.S_ISBLK(mode):
918 elif stat.S_ISBLK(mode):
912 kind = _(b'block device')
919 kind = _(b'block device')
913 elif stat.S_ISFIFO(mode):
920 elif stat.S_ISFIFO(mode):
914 kind = _(b'fifo')
921 kind = _(b'fifo')
915 elif stat.S_ISSOCK(mode):
922 elif stat.S_ISSOCK(mode):
916 kind = _(b'socket')
923 kind = _(b'socket')
917 elif stat.S_ISDIR(mode):
924 elif stat.S_ISDIR(mode):
918 kind = _(b'directory')
925 kind = _(b'directory')
919 return _(b'unsupported file type (type is %s)') % kind
926 return _(b'unsupported file type (type is %s)') % kind
920
927
921 badfn = match.bad
928 badfn = match.bad
922 dmap = self._map
929 dmap = self._map
923 lstat = os.lstat
930 lstat = os.lstat
924 getkind = stat.S_IFMT
931 getkind = stat.S_IFMT
925 dirkind = stat.S_IFDIR
932 dirkind = stat.S_IFDIR
926 regkind = stat.S_IFREG
933 regkind = stat.S_IFREG
927 lnkkind = stat.S_IFLNK
934 lnkkind = stat.S_IFLNK
928 join = self._join
935 join = self._join
929 dirsfound = []
936 dirsfound = []
930 foundadd = dirsfound.append
937 foundadd = dirsfound.append
931 dirsnotfound = []
938 dirsnotfound = []
932 notfoundadd = dirsnotfound.append
939 notfoundadd = dirsnotfound.append
933
940
934 if not match.isexact() and self._checkcase:
941 if not match.isexact() and self._checkcase:
935 normalize = self._normalize
942 normalize = self._normalize
936 else:
943 else:
937 normalize = None
944 normalize = None
938
945
939 files = sorted(match.files())
946 files = sorted(match.files())
940 subrepos.sort()
947 subrepos.sort()
941 i, j = 0, 0
948 i, j = 0, 0
942 while i < len(files) and j < len(subrepos):
949 while i < len(files) and j < len(subrepos):
943 subpath = subrepos[j] + b"/"
950 subpath = subrepos[j] + b"/"
944 if files[i] < subpath:
951 if files[i] < subpath:
945 i += 1
952 i += 1
946 continue
953 continue
947 while i < len(files) and files[i].startswith(subpath):
954 while i < len(files) and files[i].startswith(subpath):
948 del files[i]
955 del files[i]
949 j += 1
956 j += 1
950
957
951 if not files or b'' in files:
958 if not files or b'' in files:
952 files = [b'']
959 files = [b'']
953 # constructing the foldmap is expensive, so don't do it for the
960 # constructing the foldmap is expensive, so don't do it for the
954 # common case where files is ['']
961 # common case where files is ['']
955 normalize = None
962 normalize = None
956 results = dict.fromkeys(subrepos)
963 results = dict.fromkeys(subrepos)
957 results[b'.hg'] = None
964 results[b'.hg'] = None
958
965
959 for ff in files:
966 for ff in files:
960 if normalize:
967 if normalize:
961 nf = normalize(ff, False, True)
968 nf = normalize(ff, False, True)
962 else:
969 else:
963 nf = ff
970 nf = ff
964 if nf in results:
971 if nf in results:
965 continue
972 continue
966
973
967 try:
974 try:
968 st = lstat(join(nf))
975 st = lstat(join(nf))
969 kind = getkind(st.st_mode)
976 kind = getkind(st.st_mode)
970 if kind == dirkind:
977 if kind == dirkind:
971 if nf in dmap:
978 if nf in dmap:
972 # file replaced by dir on disk but still in dirstate
979 # file replaced by dir on disk but still in dirstate
973 results[nf] = None
980 results[nf] = None
974 foundadd((nf, ff))
981 foundadd((nf, ff))
975 elif kind == regkind or kind == lnkkind:
982 elif kind == regkind or kind == lnkkind:
976 results[nf] = st
983 results[nf] = st
977 else:
984 else:
978 badfn(ff, badtype(kind))
985 badfn(ff, badtype(kind))
979 if nf in dmap:
986 if nf in dmap:
980 results[nf] = None
987 results[nf] = None
981 except OSError as inst: # nf not found on disk - it is dirstate only
988 except OSError as inst: # nf not found on disk - it is dirstate only
982 if nf in dmap: # does it exactly match a missing file?
989 if nf in dmap: # does it exactly match a missing file?
983 results[nf] = None
990 results[nf] = None
984 else: # does it match a missing directory?
991 else: # does it match a missing directory?
985 if self._map.hasdir(nf):
992 if self._map.hasdir(nf):
986 notfoundadd(nf)
993 notfoundadd(nf)
987 else:
994 else:
988 badfn(ff, encoding.strtolocal(inst.strerror))
995 badfn(ff, encoding.strtolocal(inst.strerror))
989
996
990 # match.files() may contain explicitly-specified paths that shouldn't
997 # match.files() may contain explicitly-specified paths that shouldn't
991 # be taken; drop them from the list of files found. dirsfound/notfound
998 # be taken; drop them from the list of files found. dirsfound/notfound
992 # aren't filtered here because they will be tested later.
999 # aren't filtered here because they will be tested later.
993 if match.anypats():
1000 if match.anypats():
994 for f in list(results):
1001 for f in list(results):
995 if f == b'.hg' or f in subrepos:
1002 if f == b'.hg' or f in subrepos:
996 # keep sentinel to disable further out-of-repo walks
1003 # keep sentinel to disable further out-of-repo walks
997 continue
1004 continue
998 if not match(f):
1005 if not match(f):
999 del results[f]
1006 del results[f]
1000
1007
1001 # Case insensitive filesystems cannot rely on lstat() failing to detect
1008 # Case insensitive filesystems cannot rely on lstat() failing to detect
1002 # a case-only rename. Prune the stat object for any file that does not
1009 # a case-only rename. Prune the stat object for any file that does not
1003 # match the case in the filesystem, if there are multiple files that
1010 # match the case in the filesystem, if there are multiple files that
1004 # normalize to the same path.
1011 # normalize to the same path.
1005 if match.isexact() and self._checkcase:
1012 if match.isexact() and self._checkcase:
1006 normed = {}
1013 normed = {}
1007
1014
1008 for f, st in pycompat.iteritems(results):
1015 for f, st in pycompat.iteritems(results):
1009 if st is None:
1016 if st is None:
1010 continue
1017 continue
1011
1018
1012 nc = util.normcase(f)
1019 nc = util.normcase(f)
1013 paths = normed.get(nc)
1020 paths = normed.get(nc)
1014
1021
1015 if paths is None:
1022 if paths is None:
1016 paths = set()
1023 paths = set()
1017 normed[nc] = paths
1024 normed[nc] = paths
1018
1025
1019 paths.add(f)
1026 paths.add(f)
1020
1027
1021 for norm, paths in pycompat.iteritems(normed):
1028 for norm, paths in pycompat.iteritems(normed):
1022 if len(paths) > 1:
1029 if len(paths) > 1:
1023 for path in paths:
1030 for path in paths:
1024 folded = self._discoverpath(
1031 folded = self._discoverpath(
1025 path, norm, True, None, self._map.dirfoldmap
1032 path, norm, True, None, self._map.dirfoldmap
1026 )
1033 )
1027 if path != folded:
1034 if path != folded:
1028 results[path] = None
1035 results[path] = None
1029
1036
1030 return results, dirsfound, dirsnotfound
1037 return results, dirsfound, dirsnotfound
1031
1038
1032 def walk(self, match, subrepos, unknown, ignored, full=True):
1039 def walk(self, match, subrepos, unknown, ignored, full=True):
1033 """
1040 """
1034 Walk recursively through the directory tree, finding all files
1041 Walk recursively through the directory tree, finding all files
1035 matched by match.
1042 matched by match.
1036
1043
1037 If full is False, maybe skip some known-clean files.
1044 If full is False, maybe skip some known-clean files.
1038
1045
1039 Return a dict mapping filename to stat-like object (either
1046 Return a dict mapping filename to stat-like object (either
1040 mercurial.osutil.stat instance or return value of os.stat()).
1047 mercurial.osutil.stat instance or return value of os.stat()).
1041
1048
1042 """
1049 """
1043 # full is a flag that extensions that hook into walk can use -- this
1050 # full is a flag that extensions that hook into walk can use -- this
1044 # implementation doesn't use it at all. This satisfies the contract
1051 # implementation doesn't use it at all. This satisfies the contract
1045 # because we only guarantee a "maybe".
1052 # because we only guarantee a "maybe".
1046
1053
1047 if ignored:
1054 if ignored:
1048 ignore = util.never
1055 ignore = util.never
1049 dirignore = util.never
1056 dirignore = util.never
1050 elif unknown:
1057 elif unknown:
1051 ignore = self._ignore
1058 ignore = self._ignore
1052 dirignore = self._dirignore
1059 dirignore = self._dirignore
1053 else:
1060 else:
1054 # if not unknown and not ignored, drop dir recursion and step 2
1061 # if not unknown and not ignored, drop dir recursion and step 2
1055 ignore = util.always
1062 ignore = util.always
1056 dirignore = util.always
1063 dirignore = util.always
1057
1064
1058 matchfn = match.matchfn
1065 matchfn = match.matchfn
1059 matchalways = match.always()
1066 matchalways = match.always()
1060 matchtdir = match.traversedir
1067 matchtdir = match.traversedir
1061 dmap = self._map
1068 dmap = self._map
1062 listdir = util.listdir
1069 listdir = util.listdir
1063 lstat = os.lstat
1070 lstat = os.lstat
1064 dirkind = stat.S_IFDIR
1071 dirkind = stat.S_IFDIR
1065 regkind = stat.S_IFREG
1072 regkind = stat.S_IFREG
1066 lnkkind = stat.S_IFLNK
1073 lnkkind = stat.S_IFLNK
1067 join = self._join
1074 join = self._join
1068
1075
1069 exact = skipstep3 = False
1076 exact = skipstep3 = False
1070 if match.isexact(): # match.exact
1077 if match.isexact(): # match.exact
1071 exact = True
1078 exact = True
1072 dirignore = util.always # skip step 2
1079 dirignore = util.always # skip step 2
1073 elif match.prefix(): # match.match, no patterns
1080 elif match.prefix(): # match.match, no patterns
1074 skipstep3 = True
1081 skipstep3 = True
1075
1082
1076 if not exact and self._checkcase:
1083 if not exact and self._checkcase:
1077 normalize = self._normalize
1084 normalize = self._normalize
1078 normalizefile = self._normalizefile
1085 normalizefile = self._normalizefile
1079 skipstep3 = False
1086 skipstep3 = False
1080 else:
1087 else:
1081 normalize = self._normalize
1088 normalize = self._normalize
1082 normalizefile = None
1089 normalizefile = None
1083
1090
1084 # step 1: find all explicit files
1091 # step 1: find all explicit files
1085 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1092 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1086 if matchtdir:
1093 if matchtdir:
1087 for d in work:
1094 for d in work:
1088 matchtdir(d[0])
1095 matchtdir(d[0])
1089 for d in dirsnotfound:
1096 for d in dirsnotfound:
1090 matchtdir(d)
1097 matchtdir(d)
1091
1098
1092 skipstep3 = skipstep3 and not (work or dirsnotfound)
1099 skipstep3 = skipstep3 and not (work or dirsnotfound)
1093 work = [d for d in work if not dirignore(d[0])]
1100 work = [d for d in work if not dirignore(d[0])]
1094
1101
1095 # step 2: visit subdirectories
1102 # step 2: visit subdirectories
1096 def traverse(work, alreadynormed):
1103 def traverse(work, alreadynormed):
1097 wadd = work.append
1104 wadd = work.append
1098 while work:
1105 while work:
1099 tracing.counter('dirstate.walk work', len(work))
1106 tracing.counter('dirstate.walk work', len(work))
1100 nd = work.pop()
1107 nd = work.pop()
1101 visitentries = match.visitchildrenset(nd)
1108 visitentries = match.visitchildrenset(nd)
1102 if not visitentries:
1109 if not visitentries:
1103 continue
1110 continue
1104 if visitentries == b'this' or visitentries == b'all':
1111 if visitentries == b'this' or visitentries == b'all':
1105 visitentries = None
1112 visitentries = None
1106 skip = None
1113 skip = None
1107 if nd != b'':
1114 if nd != b'':
1108 skip = b'.hg'
1115 skip = b'.hg'
1109 try:
1116 try:
1110 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1117 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1111 entries = listdir(join(nd), stat=True, skip=skip)
1118 entries = listdir(join(nd), stat=True, skip=skip)
1112 except OSError as inst:
1119 except OSError as inst:
1113 if inst.errno in (errno.EACCES, errno.ENOENT):
1120 if inst.errno in (errno.EACCES, errno.ENOENT):
1114 match.bad(
1121 match.bad(
1115 self.pathto(nd), encoding.strtolocal(inst.strerror)
1122 self.pathto(nd), encoding.strtolocal(inst.strerror)
1116 )
1123 )
1117 continue
1124 continue
1118 raise
1125 raise
1119 for f, kind, st in entries:
1126 for f, kind, st in entries:
1120 # Some matchers may return files in the visitentries set,
1127 # Some matchers may return files in the visitentries set,
1121 # instead of 'this', if the matcher explicitly mentions them
1128 # instead of 'this', if the matcher explicitly mentions them
1122 # and is not an exactmatcher. This is acceptable; we do not
1129 # and is not an exactmatcher. This is acceptable; we do not
1123 # make any hard assumptions about file-or-directory below
1130 # make any hard assumptions about file-or-directory below
1124 # based on the presence of `f` in visitentries. If
1131 # based on the presence of `f` in visitentries. If
1125 # visitchildrenset returned a set, we can always skip the
1132 # visitchildrenset returned a set, we can always skip the
1126 # entries *not* in the set it provided regardless of whether
1133 # entries *not* in the set it provided regardless of whether
1127 # they're actually a file or a directory.
1134 # they're actually a file or a directory.
1128 if visitentries and f not in visitentries:
1135 if visitentries and f not in visitentries:
1129 continue
1136 continue
1130 if normalizefile:
1137 if normalizefile:
1131 # even though f might be a directory, we're only
1138 # even though f might be a directory, we're only
1132 # interested in comparing it to files currently in the
1139 # interested in comparing it to files currently in the
1133 # dmap -- therefore normalizefile is enough
1140 # dmap -- therefore normalizefile is enough
1134 nf = normalizefile(
1141 nf = normalizefile(
1135 nd and (nd + b"/" + f) or f, True, True
1142 nd and (nd + b"/" + f) or f, True, True
1136 )
1143 )
1137 else:
1144 else:
1138 nf = nd and (nd + b"/" + f) or f
1145 nf = nd and (nd + b"/" + f) or f
1139 if nf not in results:
1146 if nf not in results:
1140 if kind == dirkind:
1147 if kind == dirkind:
1141 if not ignore(nf):
1148 if not ignore(nf):
1142 if matchtdir:
1149 if matchtdir:
1143 matchtdir(nf)
1150 matchtdir(nf)
1144 wadd(nf)
1151 wadd(nf)
1145 if nf in dmap and (matchalways or matchfn(nf)):
1152 if nf in dmap and (matchalways or matchfn(nf)):
1146 results[nf] = None
1153 results[nf] = None
1147 elif kind == regkind or kind == lnkkind:
1154 elif kind == regkind or kind == lnkkind:
1148 if nf in dmap:
1155 if nf in dmap:
1149 if matchalways or matchfn(nf):
1156 if matchalways or matchfn(nf):
1150 results[nf] = st
1157 results[nf] = st
1151 elif (matchalways or matchfn(nf)) and not ignore(
1158 elif (matchalways or matchfn(nf)) and not ignore(
1152 nf
1159 nf
1153 ):
1160 ):
1154 # unknown file -- normalize if necessary
1161 # unknown file -- normalize if necessary
1155 if not alreadynormed:
1162 if not alreadynormed:
1156 nf = normalize(nf, False, True)
1163 nf = normalize(nf, False, True)
1157 results[nf] = st
1164 results[nf] = st
1158 elif nf in dmap and (matchalways or matchfn(nf)):
1165 elif nf in dmap and (matchalways or matchfn(nf)):
1159 results[nf] = None
1166 results[nf] = None
1160
1167
1161 for nd, d in work:
1168 for nd, d in work:
1162 # alreadynormed means that processwork doesn't have to do any
1169 # alreadynormed means that processwork doesn't have to do any
1163 # expensive directory normalization
1170 # expensive directory normalization
1164 alreadynormed = not normalize or nd == d
1171 alreadynormed = not normalize or nd == d
1165 traverse([d], alreadynormed)
1172 traverse([d], alreadynormed)
1166
1173
1167 for s in subrepos:
1174 for s in subrepos:
1168 del results[s]
1175 del results[s]
1169 del results[b'.hg']
1176 del results[b'.hg']
1170
1177
1171 # step 3: visit remaining files from dmap
1178 # step 3: visit remaining files from dmap
1172 if not skipstep3 and not exact:
1179 if not skipstep3 and not exact:
1173 # If a dmap file is not in results yet, it was either
1180 # If a dmap file is not in results yet, it was either
1174 # a) not matching matchfn b) ignored, c) missing, or d) under a
1181 # a) not matching matchfn b) ignored, c) missing, or d) under a
1175 # symlink directory.
1182 # symlink directory.
1176 if not results and matchalways:
1183 if not results and matchalways:
1177 visit = [f for f in dmap]
1184 visit = [f for f in dmap]
1178 else:
1185 else:
1179 visit = [f for f in dmap if f not in results and matchfn(f)]
1186 visit = [f for f in dmap if f not in results and matchfn(f)]
1180 visit.sort()
1187 visit.sort()
1181
1188
1182 if unknown:
1189 if unknown:
1183 # unknown == True means we walked all dirs under the roots
1190 # unknown == True means we walked all dirs under the roots
1184 # that wasn't ignored, and everything that matched was stat'ed
1191 # that wasn't ignored, and everything that matched was stat'ed
1185 # and is already in results.
1192 # and is already in results.
1186 # The rest must thus be ignored or under a symlink.
1193 # The rest must thus be ignored or under a symlink.
1187 audit_path = pathutil.pathauditor(self._root, cached=True)
1194 audit_path = pathutil.pathauditor(self._root, cached=True)
1188
1195
1189 for nf in iter(visit):
1196 for nf in iter(visit):
1190 # If a stat for the same file was already added with a
1197 # If a stat for the same file was already added with a
1191 # different case, don't add one for this, since that would
1198 # different case, don't add one for this, since that would
1192 # make it appear as if the file exists under both names
1199 # make it appear as if the file exists under both names
1193 # on disk.
1200 # on disk.
1194 if (
1201 if (
1195 normalizefile
1202 normalizefile
1196 and normalizefile(nf, True, True) in results
1203 and normalizefile(nf, True, True) in results
1197 ):
1204 ):
1198 results[nf] = None
1205 results[nf] = None
1199 # Report ignored items in the dmap as long as they are not
1206 # Report ignored items in the dmap as long as they are not
1200 # under a symlink directory.
1207 # under a symlink directory.
1201 elif audit_path.check(nf):
1208 elif audit_path.check(nf):
1202 try:
1209 try:
1203 results[nf] = lstat(join(nf))
1210 results[nf] = lstat(join(nf))
1204 # file was just ignored, no links, and exists
1211 # file was just ignored, no links, and exists
1205 except OSError:
1212 except OSError:
1206 # file doesn't exist
1213 # file doesn't exist
1207 results[nf] = None
1214 results[nf] = None
1208 else:
1215 else:
1209 # It's either missing or under a symlink directory
1216 # It's either missing or under a symlink directory
1210 # which we in this case report as missing
1217 # which we in this case report as missing
1211 results[nf] = None
1218 results[nf] = None
1212 else:
1219 else:
1213 # We may not have walked the full directory tree above,
1220 # We may not have walked the full directory tree above,
1214 # so stat and check everything we missed.
1221 # so stat and check everything we missed.
1215 iv = iter(visit)
1222 iv = iter(visit)
1216 for st in util.statfiles([join(i) for i in visit]):
1223 for st in util.statfiles([join(i) for i in visit]):
1217 results[next(iv)] = st
1224 results[next(iv)] = st
1218 return results
1225 return results
1219
1226
1220 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1227 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1221 # Force Rayon (Rust parallelism library) to respect the number of
1228 # Force Rayon (Rust parallelism library) to respect the number of
1222 # workers. This is a temporary workaround until Rust code knows
1229 # workers. This is a temporary workaround until Rust code knows
1223 # how to read the config file.
1230 # how to read the config file.
1224 numcpus = self._ui.configint(b"worker", b"numcpus")
1231 numcpus = self._ui.configint(b"worker", b"numcpus")
1225 if numcpus is not None:
1232 if numcpus is not None:
1226 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1233 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1227
1234
1228 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1235 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1229 if not workers_enabled:
1236 if not workers_enabled:
1230 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1237 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1231
1238
1232 (
1239 (
1233 lookup,
1240 lookup,
1234 modified,
1241 modified,
1235 added,
1242 added,
1236 removed,
1243 removed,
1237 deleted,
1244 deleted,
1238 clean,
1245 clean,
1239 ignored,
1246 ignored,
1240 unknown,
1247 unknown,
1241 warnings,
1248 warnings,
1242 bad,
1249 bad,
1243 traversed,
1250 traversed,
1244 dirty,
1251 dirty,
1245 ) = rustmod.status(
1252 ) = rustmod.status(
1246 self._map._rustmap,
1253 self._map._rustmap,
1247 matcher,
1254 matcher,
1248 self._rootdir,
1255 self._rootdir,
1249 self._ignorefiles(),
1256 self._ignorefiles(),
1250 self._checkexec,
1257 self._checkexec,
1251 self._lastnormaltime,
1258 self._lastnormaltime,
1252 bool(list_clean),
1259 bool(list_clean),
1253 bool(list_ignored),
1260 bool(list_ignored),
1254 bool(list_unknown),
1261 bool(list_unknown),
1255 bool(matcher.traversedir),
1262 bool(matcher.traversedir),
1256 )
1263 )
1257
1264
1258 self._dirty |= dirty
1265 self._dirty |= dirty
1259
1266
1260 if matcher.traversedir:
1267 if matcher.traversedir:
1261 for dir in traversed:
1268 for dir in traversed:
1262 matcher.traversedir(dir)
1269 matcher.traversedir(dir)
1263
1270
1264 if self._ui.warn:
1271 if self._ui.warn:
1265 for item in warnings:
1272 for item in warnings:
1266 if isinstance(item, tuple):
1273 if isinstance(item, tuple):
1267 file_path, syntax = item
1274 file_path, syntax = item
1268 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1275 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1269 file_path,
1276 file_path,
1270 syntax,
1277 syntax,
1271 )
1278 )
1272 self._ui.warn(msg)
1279 self._ui.warn(msg)
1273 else:
1280 else:
1274 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1281 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1275 self._ui.warn(
1282 self._ui.warn(
1276 msg
1283 msg
1277 % (
1284 % (
1278 pathutil.canonpath(
1285 pathutil.canonpath(
1279 self._rootdir, self._rootdir, item
1286 self._rootdir, self._rootdir, item
1280 ),
1287 ),
1281 b"No such file or directory",
1288 b"No such file or directory",
1282 )
1289 )
1283 )
1290 )
1284
1291
1285 for (fn, message) in bad:
1292 for (fn, message) in bad:
1286 matcher.bad(fn, encoding.strtolocal(message))
1293 matcher.bad(fn, encoding.strtolocal(message))
1287
1294
1288 status = scmutil.status(
1295 status = scmutil.status(
1289 modified=modified,
1296 modified=modified,
1290 added=added,
1297 added=added,
1291 removed=removed,
1298 removed=removed,
1292 deleted=deleted,
1299 deleted=deleted,
1293 unknown=unknown,
1300 unknown=unknown,
1294 ignored=ignored,
1301 ignored=ignored,
1295 clean=clean,
1302 clean=clean,
1296 )
1303 )
1297 return (lookup, status)
1304 return (lookup, status)
1298
1305
1299 def status(self, match, subrepos, ignored, clean, unknown):
1306 def status(self, match, subrepos, ignored, clean, unknown):
1300 """Determine the status of the working copy relative to the
1307 """Determine the status of the working copy relative to the
1301 dirstate and return a pair of (unsure, status), where status is of type
1308 dirstate and return a pair of (unsure, status), where status is of type
1302 scmutil.status and:
1309 scmutil.status and:
1303
1310
1304 unsure:
1311 unsure:
1305 files that might have been modified since the dirstate was
1312 files that might have been modified since the dirstate was
1306 written, but need to be read to be sure (size is the same
1313 written, but need to be read to be sure (size is the same
1307 but mtime differs)
1314 but mtime differs)
1308 status.modified:
1315 status.modified:
1309 files that have definitely been modified since the dirstate
1316 files that have definitely been modified since the dirstate
1310 was written (different size or mode)
1317 was written (different size or mode)
1311 status.clean:
1318 status.clean:
1312 files that have definitely not been modified since the
1319 files that have definitely not been modified since the
1313 dirstate was written
1320 dirstate was written
1314 """
1321 """
1315 listignored, listclean, listunknown = ignored, clean, unknown
1322 listignored, listclean, listunknown = ignored, clean, unknown
1316 lookup, modified, added, unknown, ignored = [], [], [], [], []
1323 lookup, modified, added, unknown, ignored = [], [], [], [], []
1317 removed, deleted, clean = [], [], []
1324 removed, deleted, clean = [], [], []
1318
1325
1319 dmap = self._map
1326 dmap = self._map
1320 dmap.preload()
1327 dmap.preload()
1321
1328
1322 use_rust = True
1329 use_rust = True
1323
1330
1324 allowed_matchers = (
1331 allowed_matchers = (
1325 matchmod.alwaysmatcher,
1332 matchmod.alwaysmatcher,
1326 matchmod.exactmatcher,
1333 matchmod.exactmatcher,
1327 matchmod.includematcher,
1334 matchmod.includematcher,
1328 )
1335 )
1329
1336
1330 if rustmod is None:
1337 if rustmod is None:
1331 use_rust = False
1338 use_rust = False
1332 elif self._checkcase:
1339 elif self._checkcase:
1333 # Case-insensitive filesystems are not handled yet
1340 # Case-insensitive filesystems are not handled yet
1334 use_rust = False
1341 use_rust = False
1335 elif subrepos:
1342 elif subrepos:
1336 use_rust = False
1343 use_rust = False
1337 elif sparse.enabled:
1344 elif sparse.enabled:
1338 use_rust = False
1345 use_rust = False
1339 elif not isinstance(match, allowed_matchers):
1346 elif not isinstance(match, allowed_matchers):
1340 # Some matchers have yet to be implemented
1347 # Some matchers have yet to be implemented
1341 use_rust = False
1348 use_rust = False
1342
1349
1343 if use_rust:
1350 if use_rust:
1344 try:
1351 try:
1345 return self._rust_status(
1352 return self._rust_status(
1346 match, listclean, listignored, listunknown
1353 match, listclean, listignored, listunknown
1347 )
1354 )
1348 except rustmod.FallbackError:
1355 except rustmod.FallbackError:
1349 pass
1356 pass
1350
1357
1351 def noop(f):
1358 def noop(f):
1352 pass
1359 pass
1353
1360
1354 dcontains = dmap.__contains__
1361 dcontains = dmap.__contains__
1355 dget = dmap.__getitem__
1362 dget = dmap.__getitem__
1356 ladd = lookup.append # aka "unsure"
1363 ladd = lookup.append # aka "unsure"
1357 madd = modified.append
1364 madd = modified.append
1358 aadd = added.append
1365 aadd = added.append
1359 uadd = unknown.append if listunknown else noop
1366 uadd = unknown.append if listunknown else noop
1360 iadd = ignored.append if listignored else noop
1367 iadd = ignored.append if listignored else noop
1361 radd = removed.append
1368 radd = removed.append
1362 dadd = deleted.append
1369 dadd = deleted.append
1363 cadd = clean.append if listclean else noop
1370 cadd = clean.append if listclean else noop
1364 mexact = match.exact
1371 mexact = match.exact
1365 dirignore = self._dirignore
1372 dirignore = self._dirignore
1366 checkexec = self._checkexec
1373 checkexec = self._checkexec
1367 copymap = self._map.copymap
1374 copymap = self._map.copymap
1368 lastnormaltime = self._lastnormaltime
1375 lastnormaltime = self._lastnormaltime
1369
1376
1370 # We need to do full walks when either
1377 # We need to do full walks when either
1371 # - we're listing all clean files, or
1378 # - we're listing all clean files, or
1372 # - match.traversedir does something, because match.traversedir should
1379 # - match.traversedir does something, because match.traversedir should
1373 # be called for every dir in the working dir
1380 # be called for every dir in the working dir
1374 full = listclean or match.traversedir is not None
1381 full = listclean or match.traversedir is not None
1375 for fn, st in pycompat.iteritems(
1382 for fn, st in pycompat.iteritems(
1376 self.walk(match, subrepos, listunknown, listignored, full=full)
1383 self.walk(match, subrepos, listunknown, listignored, full=full)
1377 ):
1384 ):
1378 if not dcontains(fn):
1385 if not dcontains(fn):
1379 if (listignored or mexact(fn)) and dirignore(fn):
1386 if (listignored or mexact(fn)) and dirignore(fn):
1380 if listignored:
1387 if listignored:
1381 iadd(fn)
1388 iadd(fn)
1382 else:
1389 else:
1383 uadd(fn)
1390 uadd(fn)
1384 continue
1391 continue
1385
1392
1386 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1393 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1387 # written like that for performance reasons. dmap[fn] is not a
1394 # written like that for performance reasons. dmap[fn] is not a
1388 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1395 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1389 # opcode has fast paths when the value to be unpacked is a tuple or
1396 # opcode has fast paths when the value to be unpacked is a tuple or
1390 # a list, but falls back to creating a full-fledged iterator in
1397 # a list, but falls back to creating a full-fledged iterator in
1391 # general. That is much slower than simply accessing and storing the
1398 # general. That is much slower than simply accessing and storing the
1392 # tuple members one by one.
1399 # tuple members one by one.
1393 t = dget(fn)
1400 t = dget(fn)
1394 mode = t.mode
1401 mode = t.mode
1395 size = t.size
1402 size = t.size
1396 time = t.mtime
1403 time = t.mtime
1397
1404
1398 if not st and t.tracked:
1405 if not st and t.tracked:
1399 dadd(fn)
1406 dadd(fn)
1400 elif t.merged:
1407 elif t.merged:
1401 madd(fn)
1408 madd(fn)
1402 elif t.added:
1409 elif t.added:
1403 aadd(fn)
1410 aadd(fn)
1404 elif t.removed:
1411 elif t.removed:
1405 radd(fn)
1412 radd(fn)
1406 elif t.tracked:
1413 elif t.tracked:
1407 if (
1414 if (
1408 size >= 0
1415 size >= 0
1409 and (
1416 and (
1410 (size != st.st_size and size != st.st_size & _rangemask)
1417 (size != st.st_size and size != st.st_size & _rangemask)
1411 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1418 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1412 )
1419 )
1413 or t.from_p2
1420 or t.from_p2
1414 or fn in copymap
1421 or fn in copymap
1415 ):
1422 ):
1416 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1423 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1417 # issue6456: Size returned may be longer due to
1424 # issue6456: Size returned may be longer due to
1418 # encryption on EXT-4 fscrypt, undecided.
1425 # encryption on EXT-4 fscrypt, undecided.
1419 ladd(fn)
1426 ladd(fn)
1420 else:
1427 else:
1421 madd(fn)
1428 madd(fn)
1422 elif (
1429 elif (
1423 time != st[stat.ST_MTIME]
1430 time != st[stat.ST_MTIME]
1424 and time != st[stat.ST_MTIME] & _rangemask
1431 and time != st[stat.ST_MTIME] & _rangemask
1425 ):
1432 ):
1426 ladd(fn)
1433 ladd(fn)
1427 elif st[stat.ST_MTIME] == lastnormaltime:
1434 elif st[stat.ST_MTIME] == lastnormaltime:
1428 # fn may have just been marked as normal and it may have
1435 # fn may have just been marked as normal and it may have
1429 # changed in the same second without changing its size.
1436 # changed in the same second without changing its size.
1430 # This can happen if we quickly do multiple commits.
1437 # This can happen if we quickly do multiple commits.
1431 # Force lookup, so we don't miss such a racy file change.
1438 # Force lookup, so we don't miss such a racy file change.
1432 ladd(fn)
1439 ladd(fn)
1433 elif listclean:
1440 elif listclean:
1434 cadd(fn)
1441 cadd(fn)
1435 status = scmutil.status(
1442 status = scmutil.status(
1436 modified, added, removed, deleted, unknown, ignored, clean
1443 modified, added, removed, deleted, unknown, ignored, clean
1437 )
1444 )
1438 return (lookup, status)
1445 return (lookup, status)
1439
1446
1440 def matches(self, match):
1447 def matches(self, match):
1441 """
1448 """
1442 return files in the dirstate (in whatever state) filtered by match
1449 return files in the dirstate (in whatever state) filtered by match
1443 """
1450 """
1444 dmap = self._map
1451 dmap = self._map
1445 if rustmod is not None:
1452 if rustmod is not None:
1446 dmap = self._map._rustmap
1453 dmap = self._map._rustmap
1447
1454
1448 if match.always():
1455 if match.always():
1449 return dmap.keys()
1456 return dmap.keys()
1450 files = match.files()
1457 files = match.files()
1451 if match.isexact():
1458 if match.isexact():
1452 # fast path -- filter the other way around, since typically files is
1459 # fast path -- filter the other way around, since typically files is
1453 # much smaller than dmap
1460 # much smaller than dmap
1454 return [f for f in files if f in dmap]
1461 return [f for f in files if f in dmap]
1455 if match.prefix() and all(fn in dmap for fn in files):
1462 if match.prefix() and all(fn in dmap for fn in files):
1456 # fast path -- all the values are known to be files, so just return
1463 # fast path -- all the values are known to be files, so just return
1457 # that
1464 # that
1458 return list(files)
1465 return list(files)
1459 return [f for f in dmap if match(f)]
1466 return [f for f in dmap if match(f)]
1460
1467
1461 def _actualfilename(self, tr):
1468 def _actualfilename(self, tr):
1462 if tr:
1469 if tr:
1463 return self._pendingfilename
1470 return self._pendingfilename
1464 else:
1471 else:
1465 return self._filename
1472 return self._filename
1466
1473
1467 def savebackup(self, tr, backupname):
1474 def savebackup(self, tr, backupname):
1468 '''Save current dirstate into backup file'''
1475 '''Save current dirstate into backup file'''
1469 filename = self._actualfilename(tr)
1476 filename = self._actualfilename(tr)
1470 assert backupname != filename
1477 assert backupname != filename
1471
1478
1472 # use '_writedirstate' instead of 'write' to write changes certainly,
1479 # use '_writedirstate' instead of 'write' to write changes certainly,
1473 # because the latter omits writing out if transaction is running.
1480 # because the latter omits writing out if transaction is running.
1474 # output file will be used to create backup of dirstate at this point.
1481 # output file will be used to create backup of dirstate at this point.
1475 if self._dirty or not self._opener.exists(filename):
1482 if self._dirty or not self._opener.exists(filename):
1476 self._writedirstate(
1483 self._writedirstate(
1477 tr,
1484 tr,
1478 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1485 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1479 )
1486 )
1480
1487
1481 if tr:
1488 if tr:
1482 # ensure that subsequent tr.writepending returns True for
1489 # ensure that subsequent tr.writepending returns True for
1483 # changes written out above, even if dirstate is never
1490 # changes written out above, even if dirstate is never
1484 # changed after this
1491 # changed after this
1485 tr.addfilegenerator(
1492 tr.addfilegenerator(
1486 b'dirstate',
1493 b'dirstate',
1487 (self._filename,),
1494 (self._filename,),
1488 lambda f: self._writedirstate(tr, f),
1495 lambda f: self._writedirstate(tr, f),
1489 location=b'plain',
1496 location=b'plain',
1490 )
1497 )
1491
1498
1492 # ensure that pending file written above is unlinked at
1499 # ensure that pending file written above is unlinked at
1493 # failure, even if tr.writepending isn't invoked until the
1500 # failure, even if tr.writepending isn't invoked until the
1494 # end of this transaction
1501 # end of this transaction
1495 tr.registertmp(filename, location=b'plain')
1502 tr.registertmp(filename, location=b'plain')
1496
1503
1497 self._opener.tryunlink(backupname)
1504 self._opener.tryunlink(backupname)
1498 # hardlink backup is okay because _writedirstate is always called
1505 # hardlink backup is okay because _writedirstate is always called
1499 # with an "atomictemp=True" file.
1506 # with an "atomictemp=True" file.
1500 util.copyfile(
1507 util.copyfile(
1501 self._opener.join(filename),
1508 self._opener.join(filename),
1502 self._opener.join(backupname),
1509 self._opener.join(backupname),
1503 hardlink=True,
1510 hardlink=True,
1504 )
1511 )
1505
1512
1506 def restorebackup(self, tr, backupname):
1513 def restorebackup(self, tr, backupname):
1507 '''Restore dirstate by backup file'''
1514 '''Restore dirstate by backup file'''
1508 # this "invalidate()" prevents "wlock.release()" from writing
1515 # this "invalidate()" prevents "wlock.release()" from writing
1509 # changes of dirstate out after restoring from backup file
1516 # changes of dirstate out after restoring from backup file
1510 self.invalidate()
1517 self.invalidate()
1511 filename = self._actualfilename(tr)
1518 filename = self._actualfilename(tr)
1512 o = self._opener
1519 o = self._opener
1513 if util.samefile(o.join(backupname), o.join(filename)):
1520 if util.samefile(o.join(backupname), o.join(filename)):
1514 o.unlink(backupname)
1521 o.unlink(backupname)
1515 else:
1522 else:
1516 o.rename(backupname, filename, checkambig=True)
1523 o.rename(backupname, filename, checkambig=True)
1517
1524
1518 def clearbackup(self, tr, backupname):
1525 def clearbackup(self, tr, backupname):
1519 '''Clear backup file'''
1526 '''Clear backup file'''
1520 self._opener.unlink(backupname)
1527 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now