##// END OF EJS Templates
dirstate: drop the `_updatedfiles` set...
marmoute -
r48871:62188e4d default
parent child Browse files
Show More
@@ -1,1568 +1,1550 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = dirstatemap.DirstateItem
48 DirstateItem = dirstatemap.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
133 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
134 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
135 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
136 # raises an exception).
138 self._cwd
137 self._cwd
139
138
140 def prefetch_parents(self):
139 def prefetch_parents(self):
141 """make sure the parents are loaded
140 """make sure the parents are loaded
142
141
143 Used to avoid a race condition.
142 Used to avoid a race condition.
144 """
143 """
145 self._pl
144 self._pl
146
145
147 @contextlib.contextmanager
146 @contextlib.contextmanager
148 def parentchange(self):
147 def parentchange(self):
149 """Context manager for handling dirstate parents.
148 """Context manager for handling dirstate parents.
150
149
151 If an exception occurs in the scope of the context manager,
150 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
151 the incoherent dirstate won't be written when wlock is
153 released.
152 released.
154 """
153 """
155 self._parentwriters += 1
154 self._parentwriters += 1
156 yield
155 yield
157 # Typically we want the "undo" step of a context manager in a
156 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
157 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
158 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
159 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
160 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
161 self._parentwriters -= 1
163
162
164 def pendingparentchange(self):
163 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
164 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
165 that modify the dirstate parent.
167 """
166 """
168 return self._parentwriters > 0
167 return self._parentwriters > 0
169
168
170 @propertycache
169 @propertycache
171 def _map(self):
170 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
171 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
172 self._map = self._mapcls(
174 self._ui,
173 self._ui,
175 self._opener,
174 self._opener,
176 self._root,
175 self._root,
177 self._nodeconstants,
176 self._nodeconstants,
178 self._use_dirstate_v2,
177 self._use_dirstate_v2,
179 )
178 )
180 return self._map
179 return self._map
181
180
182 @property
181 @property
183 def _sparsematcher(self):
182 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
183 """The matcher for the sparse checkout.
185
184
186 The working directory may not include every file from a manifest. The
185 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
186 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
187 included in the working directory.
189 """
188 """
190 # TODO there is potential to cache this property. For now, the matcher
189 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
190 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
191 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
192 return self._sparsematchfn()
194
193
195 @repocache(b'branch')
194 @repocache(b'branch')
196 def _branch(self):
195 def _branch(self):
197 try:
196 try:
198 return self._opener.read(b"branch").strip() or b"default"
197 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
198 except IOError as inst:
200 if inst.errno != errno.ENOENT:
199 if inst.errno != errno.ENOENT:
201 raise
200 raise
202 return b"default"
201 return b"default"
203
202
204 @property
203 @property
205 def _pl(self):
204 def _pl(self):
206 return self._map.parents()
205 return self._map.parents()
207
206
208 def hasdir(self, d):
207 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
208 return self._map.hastrackeddir(d)
210
209
211 @rootcache(b'.hgignore')
210 @rootcache(b'.hgignore')
212 def _ignore(self):
211 def _ignore(self):
213 files = self._ignorefiles()
212 files = self._ignorefiles()
214 if not files:
213 if not files:
215 return matchmod.never()
214 return matchmod.never()
216
215
217 pats = [b'include:%s' % f for f in files]
216 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
217 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
218
220 @propertycache
219 @propertycache
221 def _slash(self):
220 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
221 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
222
224 @propertycache
223 @propertycache
225 def _checklink(self):
224 def _checklink(self):
226 return util.checklink(self._root)
225 return util.checklink(self._root)
227
226
228 @propertycache
227 @propertycache
229 def _checkexec(self):
228 def _checkexec(self):
230 return bool(util.checkexec(self._root))
229 return bool(util.checkexec(self._root))
231
230
232 @propertycache
231 @propertycache
233 def _checkcase(self):
232 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
233 return not util.fscasesensitive(self._join(b'.hg'))
235
234
236 def _join(self, f):
235 def _join(self, f):
237 # much faster than os.path.join()
236 # much faster than os.path.join()
238 # it's safe because f is always a relative path
237 # it's safe because f is always a relative path
239 return self._rootdir + f
238 return self._rootdir + f
240
239
241 def flagfunc(self, buildfallback):
240 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
241 if self._checklink and self._checkexec:
243
242
244 def f(x):
243 def f(x):
245 try:
244 try:
246 st = os.lstat(self._join(x))
245 st = os.lstat(self._join(x))
247 if util.statislink(st):
246 if util.statislink(st):
248 return b'l'
247 return b'l'
249 if util.statisexec(st):
248 if util.statisexec(st):
250 return b'x'
249 return b'x'
251 except OSError:
250 except OSError:
252 pass
251 pass
253 return b''
252 return b''
254
253
255 return f
254 return f
256
255
257 fallback = buildfallback()
256 fallback = buildfallback()
258 if self._checklink:
257 if self._checklink:
259
258
260 def f(x):
259 def f(x):
261 if os.path.islink(self._join(x)):
260 if os.path.islink(self._join(x)):
262 return b'l'
261 return b'l'
263 if b'x' in fallback(x):
262 if b'x' in fallback(x):
264 return b'x'
263 return b'x'
265 return b''
264 return b''
266
265
267 return f
266 return f
268 if self._checkexec:
267 if self._checkexec:
269
268
270 def f(x):
269 def f(x):
271 if b'l' in fallback(x):
270 if b'l' in fallback(x):
272 return b'l'
271 return b'l'
273 if util.isexec(self._join(x)):
272 if util.isexec(self._join(x)):
274 return b'x'
273 return b'x'
275 return b''
274 return b''
276
275
277 return f
276 return f
278 else:
277 else:
279 return fallback
278 return fallback
280
279
281 @propertycache
280 @propertycache
282 def _cwd(self):
281 def _cwd(self):
283 # internal config: ui.forcecwd
282 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
283 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
284 if forcecwd:
286 return forcecwd
285 return forcecwd
287 return encoding.getcwd()
286 return encoding.getcwd()
288
287
289 def getcwd(self):
288 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
289 """Return the path from which a canonical path is calculated.
291
290
292 This path should be used to resolve file patterns or to convert
291 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
292 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
293 used to get real file paths. Use vfs functions instead.
295 """
294 """
296 cwd = self._cwd
295 cwd = self._cwd
297 if cwd == self._root:
296 if cwd == self._root:
298 return b''
297 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
298 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
299 rootsep = self._root
301 if not util.endswithsep(rootsep):
300 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
301 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
302 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
303 return cwd[len(rootsep) :]
305 else:
304 else:
306 # we're outside the repo. return an absolute path.
305 # we're outside the repo. return an absolute path.
307 return cwd
306 return cwd
308
307
309 def pathto(self, f, cwd=None):
308 def pathto(self, f, cwd=None):
310 if cwd is None:
309 if cwd is None:
311 cwd = self.getcwd()
310 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
311 path = util.pathto(self._root, cwd, f)
313 if self._slash:
312 if self._slash:
314 return util.pconvert(path)
313 return util.pconvert(path)
315 return path
314 return path
316
315
317 def __getitem__(self, key):
316 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
317 """Return the current state of key (a filename) in the dirstate.
319
318
320 States are:
319 States are:
321 n normal
320 n normal
322 m needs merging
321 m needs merging
323 r marked for removal
322 r marked for removal
324 a marked for addition
323 a marked for addition
325 ? not tracked
324 ? not tracked
326
325
327 XXX The "state" is a bit obscure to be in the "public" API. we should
326 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
327 consider migrating all user of this to going through the dirstate entry
329 instead.
328 instead.
330 """
329 """
331 entry = self._map.get(key)
330 entry = self._map.get(key)
332 if entry is not None:
331 if entry is not None:
333 return entry.state
332 return entry.state
334 return b'?'
333 return b'?'
335
334
336 def __contains__(self, key):
335 def __contains__(self, key):
337 return key in self._map
336 return key in self._map
338
337
339 def __iter__(self):
338 def __iter__(self):
340 return iter(sorted(self._map))
339 return iter(sorted(self._map))
341
340
342 def items(self):
341 def items(self):
343 return pycompat.iteritems(self._map)
342 return pycompat.iteritems(self._map)
344
343
345 iteritems = items
344 iteritems = items
346
345
347 def parents(self):
346 def parents(self):
348 return [self._validate(p) for p in self._pl]
347 return [self._validate(p) for p in self._pl]
349
348
350 def p1(self):
349 def p1(self):
351 return self._validate(self._pl[0])
350 return self._validate(self._pl[0])
352
351
353 def p2(self):
352 def p2(self):
354 return self._validate(self._pl[1])
353 return self._validate(self._pl[1])
355
354
356 @property
355 @property
357 def in_merge(self):
356 def in_merge(self):
358 """True if a merge is in progress"""
357 """True if a merge is in progress"""
359 return self._pl[1] != self._nodeconstants.nullid
358 return self._pl[1] != self._nodeconstants.nullid
360
359
361 def branch(self):
360 def branch(self):
362 return encoding.tolocal(self._branch)
361 return encoding.tolocal(self._branch)
363
362
364 def setparents(self, p1, p2=None):
363 def setparents(self, p1, p2=None):
365 """Set dirstate parents to p1 and p2.
364 """Set dirstate parents to p1 and p2.
366
365
367 When moving from two parents to one, "merged" entries a
366 When moving from two parents to one, "merged" entries a
368 adjusted to normal and previous copy records discarded and
367 adjusted to normal and previous copy records discarded and
369 returned by the call.
368 returned by the call.
370
369
371 See localrepo.setparents()
370 See localrepo.setparents()
372 """
371 """
373 if p2 is None:
372 if p2 is None:
374 p2 = self._nodeconstants.nullid
373 p2 = self._nodeconstants.nullid
375 if self._parentwriters == 0:
374 if self._parentwriters == 0:
376 raise ValueError(
375 raise ValueError(
377 b"cannot set dirstate parent outside of "
376 b"cannot set dirstate parent outside of "
378 b"dirstate.parentchange context manager"
377 b"dirstate.parentchange context manager"
379 )
378 )
380
379
381 self._dirty = True
380 self._dirty = True
382 oldp2 = self._pl[1]
381 oldp2 = self._pl[1]
383 if self._origpl is None:
382 if self._origpl is None:
384 self._origpl = self._pl
383 self._origpl = self._pl
385 self._map.setparents(p1, p2)
384 self._map.setparents(p1, p2)
386 copies = {}
385 copies = {}
387 nullid = self._nodeconstants.nullid
386 nullid = self._nodeconstants.nullid
388 if oldp2 != nullid and p2 == nullid:
387 if oldp2 != nullid and p2 == nullid:
389 candidatefiles = self._map.non_normal_or_other_parent_paths()
388 candidatefiles = self._map.non_normal_or_other_parent_paths()
390
389
391 for f in candidatefiles:
390 for f in candidatefiles:
392 s = self._map.get(f)
391 s = self._map.get(f)
393 if s is None:
392 if s is None:
394 continue
393 continue
395
394
396 # Discard "merged" markers when moving away from a merge state
395 # Discard "merged" markers when moving away from a merge state
397 if s.merged:
396 if s.merged:
398 source = self._map.copymap.get(f)
397 source = self._map.copymap.get(f)
399 if source:
398 if source:
400 copies[f] = source
399 copies[f] = source
401 self._map.reset_state(
400 self._map.reset_state(
402 f,
401 f,
403 wc_tracked=True,
402 wc_tracked=True,
404 p1_tracked=True,
403 p1_tracked=True,
405 possibly_dirty=True,
404 possibly_dirty=True,
406 )
405 )
407 # Also fix up otherparent markers
406 # Also fix up otherparent markers
408 elif s.from_p2:
407 elif s.from_p2:
409 source = self._map.copymap.get(f)
408 source = self._map.copymap.get(f)
410 if source:
409 if source:
411 copies[f] = source
410 copies[f] = source
412 self._check_new_tracked_filename(f)
411 self._check_new_tracked_filename(f)
413 self._updatedfiles.add(f)
414 self._map.reset_state(
412 self._map.reset_state(
415 f,
413 f,
416 p1_tracked=False,
414 p1_tracked=False,
417 wc_tracked=True,
415 wc_tracked=True,
418 )
416 )
419 return copies
417 return copies
420
418
421 def setbranch(self, branch):
419 def setbranch(self, branch):
422 self.__class__._branch.set(self, encoding.fromlocal(branch))
420 self.__class__._branch.set(self, encoding.fromlocal(branch))
423 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
421 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
424 try:
422 try:
425 f.write(self._branch + b'\n')
423 f.write(self._branch + b'\n')
426 f.close()
424 f.close()
427
425
428 # make sure filecache has the correct stat info for _branch after
426 # make sure filecache has the correct stat info for _branch after
429 # replacing the underlying file
427 # replacing the underlying file
430 ce = self._filecache[b'_branch']
428 ce = self._filecache[b'_branch']
431 if ce:
429 if ce:
432 ce.refresh()
430 ce.refresh()
433 except: # re-raises
431 except: # re-raises
434 f.discard()
432 f.discard()
435 raise
433 raise
436
434
437 def invalidate(self):
435 def invalidate(self):
438 """Causes the next access to reread the dirstate.
436 """Causes the next access to reread the dirstate.
439
437
440 This is different from localrepo.invalidatedirstate() because it always
438 This is different from localrepo.invalidatedirstate() because it always
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
439 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
442 check whether the dirstate has changed before rereading it."""
440 check whether the dirstate has changed before rereading it."""
443
441
444 for a in ("_map", "_branch", "_ignore"):
442 for a in ("_map", "_branch", "_ignore"):
445 if a in self.__dict__:
443 if a in self.__dict__:
446 delattr(self, a)
444 delattr(self, a)
447 self._lastnormaltime = 0
445 self._lastnormaltime = 0
448 self._dirty = False
446 self._dirty = False
449 self._updatedfiles.clear()
450 self._parentwriters = 0
447 self._parentwriters = 0
451 self._origpl = None
448 self._origpl = None
452
449
453 def copy(self, source, dest):
450 def copy(self, source, dest):
454 """Mark dest as a copy of source. Unmark dest if source is None."""
451 """Mark dest as a copy of source. Unmark dest if source is None."""
455 if source == dest:
452 if source == dest:
456 return
453 return
457 self._dirty = True
454 self._dirty = True
458 if source is not None:
455 if source is not None:
459 self._map.copymap[dest] = source
456 self._map.copymap[dest] = source
460 self._updatedfiles.add(source)
457 else:
461 self._updatedfiles.add(dest)
458 self._map.copymap.pop(dest, None)
462 elif self._map.copymap.pop(dest, None):
463 self._updatedfiles.add(dest)
464
459
465 def copied(self, file):
460 def copied(self, file):
466 return self._map.copymap.get(file, None)
461 return self._map.copymap.get(file, None)
467
462
468 def copies(self):
463 def copies(self):
469 return self._map.copymap
464 return self._map.copymap
470
465
471 @requires_no_parents_change
466 @requires_no_parents_change
472 def set_tracked(self, filename):
467 def set_tracked(self, filename):
473 """a "public" method for generic code to mark a file as tracked
468 """a "public" method for generic code to mark a file as tracked
474
469
475 This function is to be called outside of "update/merge" case. For
470 This function is to be called outside of "update/merge" case. For
476 example by a command like `hg add X`.
471 example by a command like `hg add X`.
477
472
478 return True the file was previously untracked, False otherwise.
473 return True the file was previously untracked, False otherwise.
479 """
474 """
480 self._dirty = True
475 self._dirty = True
481 self._updatedfiles.add(filename)
482 entry = self._map.get(filename)
476 entry = self._map.get(filename)
483 if entry is None or not entry.tracked:
477 if entry is None or not entry.tracked:
484 self._check_new_tracked_filename(filename)
478 self._check_new_tracked_filename(filename)
485 return self._map.set_tracked(filename)
479 return self._map.set_tracked(filename)
486
480
487 @requires_no_parents_change
481 @requires_no_parents_change
488 def set_untracked(self, filename):
482 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
483 """a "public" method for generic code to mark a file as untracked
490
484
491 This function is to be called outside of "update/merge" case. For
485 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
486 example by a command like `hg remove X`.
493
487
494 return True the file was previously tracked, False otherwise.
488 return True the file was previously tracked, False otherwise.
495 """
489 """
496 ret = self._map.set_untracked(filename)
490 ret = self._map.set_untracked(filename)
497 if ret:
491 if ret:
498 self._dirty = True
492 self._dirty = True
499 self._updatedfiles.add(filename)
500 return ret
493 return ret
501
494
502 @requires_no_parents_change
495 @requires_no_parents_change
503 def set_clean(self, filename, parentfiledata=None):
496 def set_clean(self, filename, parentfiledata=None):
504 """record that the current state of the file on disk is known to be clean"""
497 """record that the current state of the file on disk is known to be clean"""
505 self._dirty = True
498 self._dirty = True
506 self._updatedfiles.add(filename)
507 if parentfiledata:
499 if parentfiledata:
508 (mode, size, mtime) = parentfiledata
500 (mode, size, mtime) = parentfiledata
509 else:
501 else:
510 (mode, size, mtime) = self._get_filedata(filename)
502 (mode, size, mtime) = self._get_filedata(filename)
511 if not self._map[filename].tracked:
503 if not self._map[filename].tracked:
512 self._check_new_tracked_filename(filename)
504 self._check_new_tracked_filename(filename)
513 self._map.set_clean(filename, mode, size, mtime)
505 self._map.set_clean(filename, mode, size, mtime)
514 if mtime > self._lastnormaltime:
506 if mtime > self._lastnormaltime:
515 # Remember the most recent modification timeslot for status(),
507 # Remember the most recent modification timeslot for status(),
516 # to make sure we won't miss future size-preserving file content
508 # to make sure we won't miss future size-preserving file content
517 # modifications that happen within the same timeslot.
509 # modifications that happen within the same timeslot.
518 self._lastnormaltime = mtime
510 self._lastnormaltime = mtime
519
511
520 @requires_no_parents_change
512 @requires_no_parents_change
521 def set_possibly_dirty(self, filename):
513 def set_possibly_dirty(self, filename):
522 """record that the current state of the file on disk is unknown"""
514 """record that the current state of the file on disk is unknown"""
523 self._dirty = True
515 self._dirty = True
524 self._updatedfiles.add(filename)
525 self._map.set_possibly_dirty(filename)
516 self._map.set_possibly_dirty(filename)
526
517
527 @requires_parents_change
518 @requires_parents_change
528 def update_file_p1(
519 def update_file_p1(
529 self,
520 self,
530 filename,
521 filename,
531 p1_tracked,
522 p1_tracked,
532 ):
523 ):
533 """Set a file as tracked in the parent (or not)
524 """Set a file as tracked in the parent (or not)
534
525
535 This is to be called when adjust the dirstate to a new parent after an history
526 This is to be called when adjust the dirstate to a new parent after an history
536 rewriting operation.
527 rewriting operation.
537
528
538 It should not be called during a merge (p2 != nullid) and only within
529 It should not be called during a merge (p2 != nullid) and only within
539 a `with dirstate.parentchange():` context.
530 a `with dirstate.parentchange():` context.
540 """
531 """
541 if self.in_merge:
532 if self.in_merge:
542 msg = b'update_file_reference should not be called when merging'
533 msg = b'update_file_reference should not be called when merging'
543 raise error.ProgrammingError(msg)
534 raise error.ProgrammingError(msg)
544 entry = self._map.get(filename)
535 entry = self._map.get(filename)
545 if entry is None:
536 if entry is None:
546 wc_tracked = False
537 wc_tracked = False
547 else:
538 else:
548 wc_tracked = entry.tracked
539 wc_tracked = entry.tracked
549 possibly_dirty = False
540 possibly_dirty = False
550 if p1_tracked and wc_tracked:
541 if p1_tracked and wc_tracked:
551 # the underlying reference might have changed, we will have to
542 # the underlying reference might have changed, we will have to
552 # check it.
543 # check it.
553 possibly_dirty = True
544 possibly_dirty = True
554 elif not (p1_tracked or wc_tracked):
545 elif not (p1_tracked or wc_tracked):
555 # the file is no longer relevant to anyone
546 # the file is no longer relevant to anyone
556 if self._map.get(filename) is not None:
547 if self._map.get(filename) is not None:
557 self._map.reset_state(filename)
548 self._map.reset_state(filename)
558 self._dirty = True
549 self._dirty = True
559 self._updatedfiles.add(filename)
560 elif (not p1_tracked) and wc_tracked:
550 elif (not p1_tracked) and wc_tracked:
561 if entry is not None and entry.added:
551 if entry is not None and entry.added:
562 return # avoid dropping copy information (maybe?)
552 return # avoid dropping copy information (maybe?)
563 elif p1_tracked and not wc_tracked:
553 elif p1_tracked and not wc_tracked:
564 pass
554 pass
565 else:
555 else:
566 assert False, 'unreachable'
556 assert False, 'unreachable'
567
557
568 # this mean we are doing call for file we do not really care about the
558 # this mean we are doing call for file we do not really care about the
569 # data (eg: added or removed), however this should be a minor overhead
559 # data (eg: added or removed), however this should be a minor overhead
570 # compared to the overall update process calling this.
560 # compared to the overall update process calling this.
571 parentfiledata = None
561 parentfiledata = None
572 if wc_tracked:
562 if wc_tracked:
573 parentfiledata = self._get_filedata(filename)
563 parentfiledata = self._get_filedata(filename)
574
564
575 self._updatedfiles.add(filename)
576 self._map.reset_state(
565 self._map.reset_state(
577 filename,
566 filename,
578 wc_tracked,
567 wc_tracked,
579 p1_tracked,
568 p1_tracked,
580 possibly_dirty=possibly_dirty,
569 possibly_dirty=possibly_dirty,
581 parentfiledata=parentfiledata,
570 parentfiledata=parentfiledata,
582 )
571 )
583 if (
572 if (
584 parentfiledata is not None
573 parentfiledata is not None
585 and parentfiledata[2] > self._lastnormaltime
574 and parentfiledata[2] > self._lastnormaltime
586 ):
575 ):
587 # Remember the most recent modification timeslot for status(),
576 # Remember the most recent modification timeslot for status(),
588 # to make sure we won't miss future size-preserving file content
577 # to make sure we won't miss future size-preserving file content
589 # modifications that happen within the same timeslot.
578 # modifications that happen within the same timeslot.
590 self._lastnormaltime = parentfiledata[2]
579 self._lastnormaltime = parentfiledata[2]
591
580
592 @requires_parents_change
581 @requires_parents_change
593 def update_file(
582 def update_file(
594 self,
583 self,
595 filename,
584 filename,
596 wc_tracked,
585 wc_tracked,
597 p1_tracked,
586 p1_tracked,
598 p2_tracked=False,
587 p2_tracked=False,
599 merged=False,
588 merged=False,
600 clean_p1=False,
589 clean_p1=False,
601 clean_p2=False,
590 clean_p2=False,
602 possibly_dirty=False,
591 possibly_dirty=False,
603 parentfiledata=None,
592 parentfiledata=None,
604 ):
593 ):
605 """update the information about a file in the dirstate
594 """update the information about a file in the dirstate
606
595
607 This is to be called when the direstates parent changes to keep track
596 This is to be called when the direstates parent changes to keep track
608 of what is the file situation in regards to the working copy and its parent.
597 of what is the file situation in regards to the working copy and its parent.
609
598
610 This function must be called within a `dirstate.parentchange` context.
599 This function must be called within a `dirstate.parentchange` context.
611
600
612 note: the API is at an early stage and we might need to adjust it
601 note: the API is at an early stage and we might need to adjust it
613 depending of what information ends up being relevant and useful to
602 depending of what information ends up being relevant and useful to
614 other processing.
603 other processing.
615 """
604 """
616 if merged and (clean_p1 or clean_p2):
605 if merged and (clean_p1 or clean_p2):
617 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
606 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
618 raise error.ProgrammingError(msg)
607 raise error.ProgrammingError(msg)
619
608
620 # note: I do not think we need to double check name clash here since we
609 # note: I do not think we need to double check name clash here since we
621 # are in a update/merge case that should already have taken care of
610 # are in a update/merge case that should already have taken care of
622 # this. The test agrees
611 # this. The test agrees
623
612
624 self._dirty = True
613 self._dirty = True
625 self._updatedfiles.add(filename)
626
614
627 need_parent_file_data = (
615 need_parent_file_data = (
628 not (possibly_dirty or clean_p2 or merged)
616 not (possibly_dirty or clean_p2 or merged)
629 and wc_tracked
617 and wc_tracked
630 and p1_tracked
618 and p1_tracked
631 )
619 )
632
620
633 # this mean we are doing call for file we do not really care about the
621 # this mean we are doing call for file we do not really care about the
634 # data (eg: added or removed), however this should be a minor overhead
622 # data (eg: added or removed), however this should be a minor overhead
635 # compared to the overall update process calling this.
623 # compared to the overall update process calling this.
636 if need_parent_file_data:
624 if need_parent_file_data:
637 if parentfiledata is None:
625 if parentfiledata is None:
638 parentfiledata = self._get_filedata(filename)
626 parentfiledata = self._get_filedata(filename)
639 mtime = parentfiledata[2]
627 mtime = parentfiledata[2]
640
628
641 if mtime > self._lastnormaltime:
629 if mtime > self._lastnormaltime:
642 # Remember the most recent modification timeslot for
630 # Remember the most recent modification timeslot for
643 # status(), to make sure we won't miss future
631 # status(), to make sure we won't miss future
644 # size-preserving file content modifications that happen
632 # size-preserving file content modifications that happen
645 # within the same timeslot.
633 # within the same timeslot.
646 self._lastnormaltime = mtime
634 self._lastnormaltime = mtime
647
635
648 self._map.reset_state(
636 self._map.reset_state(
649 filename,
637 filename,
650 wc_tracked,
638 wc_tracked,
651 p1_tracked,
639 p1_tracked,
652 p2_tracked=p2_tracked,
640 p2_tracked=p2_tracked,
653 merged=merged,
641 merged=merged,
654 clean_p1=clean_p1,
642 clean_p1=clean_p1,
655 clean_p2=clean_p2,
643 clean_p2=clean_p2,
656 possibly_dirty=possibly_dirty,
644 possibly_dirty=possibly_dirty,
657 parentfiledata=parentfiledata,
645 parentfiledata=parentfiledata,
658 )
646 )
659 if (
647 if (
660 parentfiledata is not None
648 parentfiledata is not None
661 and parentfiledata[2] > self._lastnormaltime
649 and parentfiledata[2] > self._lastnormaltime
662 ):
650 ):
663 # Remember the most recent modification timeslot for status(),
651 # Remember the most recent modification timeslot for status(),
664 # to make sure we won't miss future size-preserving file content
652 # to make sure we won't miss future size-preserving file content
665 # modifications that happen within the same timeslot.
653 # modifications that happen within the same timeslot.
666 self._lastnormaltime = parentfiledata[2]
654 self._lastnormaltime = parentfiledata[2]
667
655
668 def _check_new_tracked_filename(self, filename):
656 def _check_new_tracked_filename(self, filename):
669 scmutil.checkfilename(filename)
657 scmutil.checkfilename(filename)
670 if self._map.hastrackeddir(filename):
658 if self._map.hastrackeddir(filename):
671 msg = _(b'directory %r already in dirstate')
659 msg = _(b'directory %r already in dirstate')
672 msg %= pycompat.bytestr(filename)
660 msg %= pycompat.bytestr(filename)
673 raise error.Abort(msg)
661 raise error.Abort(msg)
674 # shadows
662 # shadows
675 for d in pathutil.finddirs(filename):
663 for d in pathutil.finddirs(filename):
676 if self._map.hastrackeddir(d):
664 if self._map.hastrackeddir(d):
677 break
665 break
678 entry = self._map.get(d)
666 entry = self._map.get(d)
679 if entry is not None and not entry.removed:
667 if entry is not None and not entry.removed:
680 msg = _(b'file %r in dirstate clashes with %r')
668 msg = _(b'file %r in dirstate clashes with %r')
681 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
669 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
682 raise error.Abort(msg)
670 raise error.Abort(msg)
683
671
684 def _get_filedata(self, filename):
672 def _get_filedata(self, filename):
685 """returns"""
673 """returns"""
686 s = os.lstat(self._join(filename))
674 s = os.lstat(self._join(filename))
687 mode = s.st_mode
675 mode = s.st_mode
688 size = s.st_size
676 size = s.st_size
689 mtime = s[stat.ST_MTIME]
677 mtime = s[stat.ST_MTIME]
690 return (mode, size, mtime)
678 return (mode, size, mtime)
691
679
692 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
680 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
693 if exists is None:
681 if exists is None:
694 exists = os.path.lexists(os.path.join(self._root, path))
682 exists = os.path.lexists(os.path.join(self._root, path))
695 if not exists:
683 if not exists:
696 # Maybe a path component exists
684 # Maybe a path component exists
697 if not ignoremissing and b'/' in path:
685 if not ignoremissing and b'/' in path:
698 d, f = path.rsplit(b'/', 1)
686 d, f = path.rsplit(b'/', 1)
699 d = self._normalize(d, False, ignoremissing, None)
687 d = self._normalize(d, False, ignoremissing, None)
700 folded = d + b"/" + f
688 folded = d + b"/" + f
701 else:
689 else:
702 # No path components, preserve original case
690 # No path components, preserve original case
703 folded = path
691 folded = path
704 else:
692 else:
705 # recursively normalize leading directory components
693 # recursively normalize leading directory components
706 # against dirstate
694 # against dirstate
707 if b'/' in normed:
695 if b'/' in normed:
708 d, f = normed.rsplit(b'/', 1)
696 d, f = normed.rsplit(b'/', 1)
709 d = self._normalize(d, False, ignoremissing, True)
697 d = self._normalize(d, False, ignoremissing, True)
710 r = self._root + b"/" + d
698 r = self._root + b"/" + d
711 folded = d + b"/" + util.fspath(f, r)
699 folded = d + b"/" + util.fspath(f, r)
712 else:
700 else:
713 folded = util.fspath(normed, self._root)
701 folded = util.fspath(normed, self._root)
714 storemap[normed] = folded
702 storemap[normed] = folded
715
703
716 return folded
704 return folded
717
705
718 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
706 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
719 normed = util.normcase(path)
707 normed = util.normcase(path)
720 folded = self._map.filefoldmap.get(normed, None)
708 folded = self._map.filefoldmap.get(normed, None)
721 if folded is None:
709 if folded is None:
722 if isknown:
710 if isknown:
723 folded = path
711 folded = path
724 else:
712 else:
725 folded = self._discoverpath(
713 folded = self._discoverpath(
726 path, normed, ignoremissing, exists, self._map.filefoldmap
714 path, normed, ignoremissing, exists, self._map.filefoldmap
727 )
715 )
728 return folded
716 return folded
729
717
730 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
718 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
731 normed = util.normcase(path)
719 normed = util.normcase(path)
732 folded = self._map.filefoldmap.get(normed, None)
720 folded = self._map.filefoldmap.get(normed, None)
733 if folded is None:
721 if folded is None:
734 folded = self._map.dirfoldmap.get(normed, None)
722 folded = self._map.dirfoldmap.get(normed, None)
735 if folded is None:
723 if folded is None:
736 if isknown:
724 if isknown:
737 folded = path
725 folded = path
738 else:
726 else:
739 # store discovered result in dirfoldmap so that future
727 # store discovered result in dirfoldmap so that future
740 # normalizefile calls don't start matching directories
728 # normalizefile calls don't start matching directories
741 folded = self._discoverpath(
729 folded = self._discoverpath(
742 path, normed, ignoremissing, exists, self._map.dirfoldmap
730 path, normed, ignoremissing, exists, self._map.dirfoldmap
743 )
731 )
744 return folded
732 return folded
745
733
746 def normalize(self, path, isknown=False, ignoremissing=False):
734 def normalize(self, path, isknown=False, ignoremissing=False):
747 """
735 """
748 normalize the case of a pathname when on a casefolding filesystem
736 normalize the case of a pathname when on a casefolding filesystem
749
737
750 isknown specifies whether the filename came from walking the
738 isknown specifies whether the filename came from walking the
751 disk, to avoid extra filesystem access.
739 disk, to avoid extra filesystem access.
752
740
753 If ignoremissing is True, missing path are returned
741 If ignoremissing is True, missing path are returned
754 unchanged. Otherwise, we try harder to normalize possibly
742 unchanged. Otherwise, we try harder to normalize possibly
755 existing path components.
743 existing path components.
756
744
757 The normalized case is determined based on the following precedence:
745 The normalized case is determined based on the following precedence:
758
746
759 - version of name already stored in the dirstate
747 - version of name already stored in the dirstate
760 - version of name stored on disk
748 - version of name stored on disk
761 - version provided via command arguments
749 - version provided via command arguments
762 """
750 """
763
751
764 if self._checkcase:
752 if self._checkcase:
765 return self._normalize(path, isknown, ignoremissing)
753 return self._normalize(path, isknown, ignoremissing)
766 return path
754 return path
767
755
768 def clear(self):
756 def clear(self):
769 self._map.clear()
757 self._map.clear()
770 self._lastnormaltime = 0
758 self._lastnormaltime = 0
771 self._updatedfiles.clear()
772 self._dirty = True
759 self._dirty = True
773
760
774 def rebuild(self, parent, allfiles, changedfiles=None):
761 def rebuild(self, parent, allfiles, changedfiles=None):
775 if changedfiles is None:
762 if changedfiles is None:
776 # Rebuild entire dirstate
763 # Rebuild entire dirstate
777 to_lookup = allfiles
764 to_lookup = allfiles
778 to_drop = []
765 to_drop = []
779 lastnormaltime = self._lastnormaltime
766 lastnormaltime = self._lastnormaltime
780 self.clear()
767 self.clear()
781 self._lastnormaltime = lastnormaltime
768 self._lastnormaltime = lastnormaltime
782 elif len(changedfiles) < 10:
769 elif len(changedfiles) < 10:
783 # Avoid turning allfiles into a set, which can be expensive if it's
770 # Avoid turning allfiles into a set, which can be expensive if it's
784 # large.
771 # large.
785 to_lookup = []
772 to_lookup = []
786 to_drop = []
773 to_drop = []
787 for f in changedfiles:
774 for f in changedfiles:
788 if f in allfiles:
775 if f in allfiles:
789 to_lookup.append(f)
776 to_lookup.append(f)
790 else:
777 else:
791 to_drop.append(f)
778 to_drop.append(f)
792 else:
779 else:
793 changedfilesset = set(changedfiles)
780 changedfilesset = set(changedfiles)
794 to_lookup = changedfilesset & set(allfiles)
781 to_lookup = changedfilesset & set(allfiles)
795 to_drop = changedfilesset - to_lookup
782 to_drop = changedfilesset - to_lookup
796
783
797 if self._origpl is None:
784 if self._origpl is None:
798 self._origpl = self._pl
785 self._origpl = self._pl
799 self._map.setparents(parent, self._nodeconstants.nullid)
786 self._map.setparents(parent, self._nodeconstants.nullid)
800
787
801 for f in to_lookup:
788 for f in to_lookup:
802
789
803 if self.in_merge:
790 if self.in_merge:
804 self.set_tracked(f)
791 self.set_tracked(f)
805 else:
792 else:
806 self._map.reset_state(
793 self._map.reset_state(
807 f,
794 f,
808 wc_tracked=True,
795 wc_tracked=True,
809 p1_tracked=True,
796 p1_tracked=True,
810 possibly_dirty=True,
797 possibly_dirty=True,
811 )
798 )
812 self._updatedfiles.add(f)
813 for f in to_drop:
799 for f in to_drop:
814 self._map.reset_state(f)
800 self._map.reset_state(f)
815 self._updatedfiles.add(f)
816
801
817 self._dirty = True
802 self._dirty = True
818
803
819 def identity(self):
804 def identity(self):
820 """Return identity of dirstate itself to detect changing in storage
805 """Return identity of dirstate itself to detect changing in storage
821
806
822 If identity of previous dirstate is equal to this, writing
807 If identity of previous dirstate is equal to this, writing
823 changes based on the former dirstate out can keep consistency.
808 changes based on the former dirstate out can keep consistency.
824 """
809 """
825 return self._map.identity
810 return self._map.identity
826
811
827 def write(self, tr):
812 def write(self, tr):
828 if not self._dirty:
813 if not self._dirty:
829 return
814 return
830
815
831 filename = self._filename
816 filename = self._filename
832 if tr:
817 if tr:
833 # 'dirstate.write()' is not only for writing in-memory
818 # 'dirstate.write()' is not only for writing in-memory
834 # changes out, but also for dropping ambiguous timestamp.
819 # changes out, but also for dropping ambiguous timestamp.
835 # delayed writing re-raise "ambiguous timestamp issue".
820 # delayed writing re-raise "ambiguous timestamp issue".
836 # See also the wiki page below for detail:
821 # See also the wiki page below for detail:
837 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
822 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
838
823
839 # record when mtime start to be ambiguous
824 # record when mtime start to be ambiguous
840 now = _getfsnow(self._opener)
825 now = _getfsnow(self._opener)
841
826
842 # emulate that all 'dirstate.normal' results are written out
843 self._updatedfiles.clear()
844
845 # delay writing in-memory changes out
827 # delay writing in-memory changes out
846 tr.addfilegenerator(
828 tr.addfilegenerator(
847 b'dirstate',
829 b'dirstate',
848 (self._filename,),
830 (self._filename,),
849 lambda f: self._writedirstate(tr, f, now=now),
831 lambda f: self._writedirstate(tr, f, now=now),
850 location=b'plain',
832 location=b'plain',
851 )
833 )
852 return
834 return
853
835
854 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
836 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
855 self._writedirstate(tr, st)
837 self._writedirstate(tr, st)
856
838
857 def addparentchangecallback(self, category, callback):
839 def addparentchangecallback(self, category, callback):
858 """add a callback to be called when the wd parents are changed
840 """add a callback to be called when the wd parents are changed
859
841
860 Callback will be called with the following arguments:
842 Callback will be called with the following arguments:
861 dirstate, (oldp1, oldp2), (newp1, newp2)
843 dirstate, (oldp1, oldp2), (newp1, newp2)
862
844
863 Category is a unique identifier to allow overwriting an old callback
845 Category is a unique identifier to allow overwriting an old callback
864 with a newer callback.
846 with a newer callback.
865 """
847 """
866 self._plchangecallbacks[category] = callback
848 self._plchangecallbacks[category] = callback
867
849
868 def _writedirstate(self, tr, st, now=None):
850 def _writedirstate(self, tr, st, now=None):
869 # notify callbacks about parents change
851 # notify callbacks about parents change
870 if self._origpl is not None and self._origpl != self._pl:
852 if self._origpl is not None and self._origpl != self._pl:
871 for c, callback in sorted(
853 for c, callback in sorted(
872 pycompat.iteritems(self._plchangecallbacks)
854 pycompat.iteritems(self._plchangecallbacks)
873 ):
855 ):
874 callback(self, self._origpl, self._pl)
856 callback(self, self._origpl, self._pl)
875 self._origpl = None
857 self._origpl = None
876
858
877 if now is None:
859 if now is None:
878 # use the modification time of the newly created temporary file as the
860 # use the modification time of the newly created temporary file as the
879 # filesystem's notion of 'now'
861 # filesystem's notion of 'now'
880 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
862 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
881
863
882 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
864 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
883 # timestamp of each entries in dirstate, because of 'now > mtime'
865 # timestamp of each entries in dirstate, because of 'now > mtime'
884 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
866 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
885 if delaywrite > 0:
867 if delaywrite > 0:
886 # do we have any files to delay for?
868 # do we have any files to delay for?
887 for f, e in pycompat.iteritems(self._map):
869 for f, e in pycompat.iteritems(self._map):
888 if e.need_delay(now):
870 if e.need_delay(now):
889 import time # to avoid useless import
871 import time # to avoid useless import
890
872
891 # rather than sleep n seconds, sleep until the next
873 # rather than sleep n seconds, sleep until the next
892 # multiple of n seconds
874 # multiple of n seconds
893 clock = time.time()
875 clock = time.time()
894 start = int(clock) - (int(clock) % delaywrite)
876 start = int(clock) - (int(clock) % delaywrite)
895 end = start + delaywrite
877 end = start + delaywrite
896 time.sleep(end - clock)
878 time.sleep(end - clock)
897 now = end # trust our estimate that the end is near now
879 now = end # trust our estimate that the end is near now
898 break
880 break
899
881
900 self._map.write(tr, st, now)
882 self._map.write(tr, st, now)
901 self._lastnormaltime = 0
883 self._lastnormaltime = 0
902 self._dirty = False
884 self._dirty = False
903
885
904 def _dirignore(self, f):
886 def _dirignore(self, f):
905 if self._ignore(f):
887 if self._ignore(f):
906 return True
888 return True
907 for p in pathutil.finddirs(f):
889 for p in pathutil.finddirs(f):
908 if self._ignore(p):
890 if self._ignore(p):
909 return True
891 return True
910 return False
892 return False
911
893
912 def _ignorefiles(self):
894 def _ignorefiles(self):
913 files = []
895 files = []
914 if os.path.exists(self._join(b'.hgignore')):
896 if os.path.exists(self._join(b'.hgignore')):
915 files.append(self._join(b'.hgignore'))
897 files.append(self._join(b'.hgignore'))
916 for name, path in self._ui.configitems(b"ui"):
898 for name, path in self._ui.configitems(b"ui"):
917 if name == b'ignore' or name.startswith(b'ignore.'):
899 if name == b'ignore' or name.startswith(b'ignore.'):
918 # we need to use os.path.join here rather than self._join
900 # we need to use os.path.join here rather than self._join
919 # because path is arbitrary and user-specified
901 # because path is arbitrary and user-specified
920 files.append(os.path.join(self._rootdir, util.expandpath(path)))
902 files.append(os.path.join(self._rootdir, util.expandpath(path)))
921 return files
903 return files
922
904
923 def _ignorefileandline(self, f):
905 def _ignorefileandline(self, f):
924 files = collections.deque(self._ignorefiles())
906 files = collections.deque(self._ignorefiles())
925 visited = set()
907 visited = set()
926 while files:
908 while files:
927 i = files.popleft()
909 i = files.popleft()
928 patterns = matchmod.readpatternfile(
910 patterns = matchmod.readpatternfile(
929 i, self._ui.warn, sourceinfo=True
911 i, self._ui.warn, sourceinfo=True
930 )
912 )
931 for pattern, lineno, line in patterns:
913 for pattern, lineno, line in patterns:
932 kind, p = matchmod._patsplit(pattern, b'glob')
914 kind, p = matchmod._patsplit(pattern, b'glob')
933 if kind == b"subinclude":
915 if kind == b"subinclude":
934 if p not in visited:
916 if p not in visited:
935 files.append(p)
917 files.append(p)
936 continue
918 continue
937 m = matchmod.match(
919 m = matchmod.match(
938 self._root, b'', [], [pattern], warn=self._ui.warn
920 self._root, b'', [], [pattern], warn=self._ui.warn
939 )
921 )
940 if m(f):
922 if m(f):
941 return (i, lineno, line)
923 return (i, lineno, line)
942 visited.add(i)
924 visited.add(i)
943 return (None, -1, b"")
925 return (None, -1, b"")
944
926
945 def _walkexplicit(self, match, subrepos):
927 def _walkexplicit(self, match, subrepos):
946 """Get stat data about the files explicitly specified by match.
928 """Get stat data about the files explicitly specified by match.
947
929
948 Return a triple (results, dirsfound, dirsnotfound).
930 Return a triple (results, dirsfound, dirsnotfound).
949 - results is a mapping from filename to stat result. It also contains
931 - results is a mapping from filename to stat result. It also contains
950 listings mapping subrepos and .hg to None.
932 listings mapping subrepos and .hg to None.
951 - dirsfound is a list of files found to be directories.
933 - dirsfound is a list of files found to be directories.
952 - dirsnotfound is a list of files that the dirstate thinks are
934 - dirsnotfound is a list of files that the dirstate thinks are
953 directories and that were not found."""
935 directories and that were not found."""
954
936
955 def badtype(mode):
937 def badtype(mode):
956 kind = _(b'unknown')
938 kind = _(b'unknown')
957 if stat.S_ISCHR(mode):
939 if stat.S_ISCHR(mode):
958 kind = _(b'character device')
940 kind = _(b'character device')
959 elif stat.S_ISBLK(mode):
941 elif stat.S_ISBLK(mode):
960 kind = _(b'block device')
942 kind = _(b'block device')
961 elif stat.S_ISFIFO(mode):
943 elif stat.S_ISFIFO(mode):
962 kind = _(b'fifo')
944 kind = _(b'fifo')
963 elif stat.S_ISSOCK(mode):
945 elif stat.S_ISSOCK(mode):
964 kind = _(b'socket')
946 kind = _(b'socket')
965 elif stat.S_ISDIR(mode):
947 elif stat.S_ISDIR(mode):
966 kind = _(b'directory')
948 kind = _(b'directory')
967 return _(b'unsupported file type (type is %s)') % kind
949 return _(b'unsupported file type (type is %s)') % kind
968
950
969 badfn = match.bad
951 badfn = match.bad
970 dmap = self._map
952 dmap = self._map
971 lstat = os.lstat
953 lstat = os.lstat
972 getkind = stat.S_IFMT
954 getkind = stat.S_IFMT
973 dirkind = stat.S_IFDIR
955 dirkind = stat.S_IFDIR
974 regkind = stat.S_IFREG
956 regkind = stat.S_IFREG
975 lnkkind = stat.S_IFLNK
957 lnkkind = stat.S_IFLNK
976 join = self._join
958 join = self._join
977 dirsfound = []
959 dirsfound = []
978 foundadd = dirsfound.append
960 foundadd = dirsfound.append
979 dirsnotfound = []
961 dirsnotfound = []
980 notfoundadd = dirsnotfound.append
962 notfoundadd = dirsnotfound.append
981
963
982 if not match.isexact() and self._checkcase:
964 if not match.isexact() and self._checkcase:
983 normalize = self._normalize
965 normalize = self._normalize
984 else:
966 else:
985 normalize = None
967 normalize = None
986
968
987 files = sorted(match.files())
969 files = sorted(match.files())
988 subrepos.sort()
970 subrepos.sort()
989 i, j = 0, 0
971 i, j = 0, 0
990 while i < len(files) and j < len(subrepos):
972 while i < len(files) and j < len(subrepos):
991 subpath = subrepos[j] + b"/"
973 subpath = subrepos[j] + b"/"
992 if files[i] < subpath:
974 if files[i] < subpath:
993 i += 1
975 i += 1
994 continue
976 continue
995 while i < len(files) and files[i].startswith(subpath):
977 while i < len(files) and files[i].startswith(subpath):
996 del files[i]
978 del files[i]
997 j += 1
979 j += 1
998
980
999 if not files or b'' in files:
981 if not files or b'' in files:
1000 files = [b'']
982 files = [b'']
1001 # constructing the foldmap is expensive, so don't do it for the
983 # constructing the foldmap is expensive, so don't do it for the
1002 # common case where files is ['']
984 # common case where files is ['']
1003 normalize = None
985 normalize = None
1004 results = dict.fromkeys(subrepos)
986 results = dict.fromkeys(subrepos)
1005 results[b'.hg'] = None
987 results[b'.hg'] = None
1006
988
1007 for ff in files:
989 for ff in files:
1008 if normalize:
990 if normalize:
1009 nf = normalize(ff, False, True)
991 nf = normalize(ff, False, True)
1010 else:
992 else:
1011 nf = ff
993 nf = ff
1012 if nf in results:
994 if nf in results:
1013 continue
995 continue
1014
996
1015 try:
997 try:
1016 st = lstat(join(nf))
998 st = lstat(join(nf))
1017 kind = getkind(st.st_mode)
999 kind = getkind(st.st_mode)
1018 if kind == dirkind:
1000 if kind == dirkind:
1019 if nf in dmap:
1001 if nf in dmap:
1020 # file replaced by dir on disk but still in dirstate
1002 # file replaced by dir on disk but still in dirstate
1021 results[nf] = None
1003 results[nf] = None
1022 foundadd((nf, ff))
1004 foundadd((nf, ff))
1023 elif kind == regkind or kind == lnkkind:
1005 elif kind == regkind or kind == lnkkind:
1024 results[nf] = st
1006 results[nf] = st
1025 else:
1007 else:
1026 badfn(ff, badtype(kind))
1008 badfn(ff, badtype(kind))
1027 if nf in dmap:
1009 if nf in dmap:
1028 results[nf] = None
1010 results[nf] = None
1029 except OSError as inst: # nf not found on disk - it is dirstate only
1011 except OSError as inst: # nf not found on disk - it is dirstate only
1030 if nf in dmap: # does it exactly match a missing file?
1012 if nf in dmap: # does it exactly match a missing file?
1031 results[nf] = None
1013 results[nf] = None
1032 else: # does it match a missing directory?
1014 else: # does it match a missing directory?
1033 if self._map.hasdir(nf):
1015 if self._map.hasdir(nf):
1034 notfoundadd(nf)
1016 notfoundadd(nf)
1035 else:
1017 else:
1036 badfn(ff, encoding.strtolocal(inst.strerror))
1018 badfn(ff, encoding.strtolocal(inst.strerror))
1037
1019
1038 # match.files() may contain explicitly-specified paths that shouldn't
1020 # match.files() may contain explicitly-specified paths that shouldn't
1039 # be taken; drop them from the list of files found. dirsfound/notfound
1021 # be taken; drop them from the list of files found. dirsfound/notfound
1040 # aren't filtered here because they will be tested later.
1022 # aren't filtered here because they will be tested later.
1041 if match.anypats():
1023 if match.anypats():
1042 for f in list(results):
1024 for f in list(results):
1043 if f == b'.hg' or f in subrepos:
1025 if f == b'.hg' or f in subrepos:
1044 # keep sentinel to disable further out-of-repo walks
1026 # keep sentinel to disable further out-of-repo walks
1045 continue
1027 continue
1046 if not match(f):
1028 if not match(f):
1047 del results[f]
1029 del results[f]
1048
1030
1049 # Case insensitive filesystems cannot rely on lstat() failing to detect
1031 # Case insensitive filesystems cannot rely on lstat() failing to detect
1050 # a case-only rename. Prune the stat object for any file that does not
1032 # a case-only rename. Prune the stat object for any file that does not
1051 # match the case in the filesystem, if there are multiple files that
1033 # match the case in the filesystem, if there are multiple files that
1052 # normalize to the same path.
1034 # normalize to the same path.
1053 if match.isexact() and self._checkcase:
1035 if match.isexact() and self._checkcase:
1054 normed = {}
1036 normed = {}
1055
1037
1056 for f, st in pycompat.iteritems(results):
1038 for f, st in pycompat.iteritems(results):
1057 if st is None:
1039 if st is None:
1058 continue
1040 continue
1059
1041
1060 nc = util.normcase(f)
1042 nc = util.normcase(f)
1061 paths = normed.get(nc)
1043 paths = normed.get(nc)
1062
1044
1063 if paths is None:
1045 if paths is None:
1064 paths = set()
1046 paths = set()
1065 normed[nc] = paths
1047 normed[nc] = paths
1066
1048
1067 paths.add(f)
1049 paths.add(f)
1068
1050
1069 for norm, paths in pycompat.iteritems(normed):
1051 for norm, paths in pycompat.iteritems(normed):
1070 if len(paths) > 1:
1052 if len(paths) > 1:
1071 for path in paths:
1053 for path in paths:
1072 folded = self._discoverpath(
1054 folded = self._discoverpath(
1073 path, norm, True, None, self._map.dirfoldmap
1055 path, norm, True, None, self._map.dirfoldmap
1074 )
1056 )
1075 if path != folded:
1057 if path != folded:
1076 results[path] = None
1058 results[path] = None
1077
1059
1078 return results, dirsfound, dirsnotfound
1060 return results, dirsfound, dirsnotfound
1079
1061
1080 def walk(self, match, subrepos, unknown, ignored, full=True):
1062 def walk(self, match, subrepos, unknown, ignored, full=True):
1081 """
1063 """
1082 Walk recursively through the directory tree, finding all files
1064 Walk recursively through the directory tree, finding all files
1083 matched by match.
1065 matched by match.
1084
1066
1085 If full is False, maybe skip some known-clean files.
1067 If full is False, maybe skip some known-clean files.
1086
1068
1087 Return a dict mapping filename to stat-like object (either
1069 Return a dict mapping filename to stat-like object (either
1088 mercurial.osutil.stat instance or return value of os.stat()).
1070 mercurial.osutil.stat instance or return value of os.stat()).
1089
1071
1090 """
1072 """
1091 # full is a flag that extensions that hook into walk can use -- this
1073 # full is a flag that extensions that hook into walk can use -- this
1092 # implementation doesn't use it at all. This satisfies the contract
1074 # implementation doesn't use it at all. This satisfies the contract
1093 # because we only guarantee a "maybe".
1075 # because we only guarantee a "maybe".
1094
1076
1095 if ignored:
1077 if ignored:
1096 ignore = util.never
1078 ignore = util.never
1097 dirignore = util.never
1079 dirignore = util.never
1098 elif unknown:
1080 elif unknown:
1099 ignore = self._ignore
1081 ignore = self._ignore
1100 dirignore = self._dirignore
1082 dirignore = self._dirignore
1101 else:
1083 else:
1102 # if not unknown and not ignored, drop dir recursion and step 2
1084 # if not unknown and not ignored, drop dir recursion and step 2
1103 ignore = util.always
1085 ignore = util.always
1104 dirignore = util.always
1086 dirignore = util.always
1105
1087
1106 matchfn = match.matchfn
1088 matchfn = match.matchfn
1107 matchalways = match.always()
1089 matchalways = match.always()
1108 matchtdir = match.traversedir
1090 matchtdir = match.traversedir
1109 dmap = self._map
1091 dmap = self._map
1110 listdir = util.listdir
1092 listdir = util.listdir
1111 lstat = os.lstat
1093 lstat = os.lstat
1112 dirkind = stat.S_IFDIR
1094 dirkind = stat.S_IFDIR
1113 regkind = stat.S_IFREG
1095 regkind = stat.S_IFREG
1114 lnkkind = stat.S_IFLNK
1096 lnkkind = stat.S_IFLNK
1115 join = self._join
1097 join = self._join
1116
1098
1117 exact = skipstep3 = False
1099 exact = skipstep3 = False
1118 if match.isexact(): # match.exact
1100 if match.isexact(): # match.exact
1119 exact = True
1101 exact = True
1120 dirignore = util.always # skip step 2
1102 dirignore = util.always # skip step 2
1121 elif match.prefix(): # match.match, no patterns
1103 elif match.prefix(): # match.match, no patterns
1122 skipstep3 = True
1104 skipstep3 = True
1123
1105
1124 if not exact and self._checkcase:
1106 if not exact and self._checkcase:
1125 normalize = self._normalize
1107 normalize = self._normalize
1126 normalizefile = self._normalizefile
1108 normalizefile = self._normalizefile
1127 skipstep3 = False
1109 skipstep3 = False
1128 else:
1110 else:
1129 normalize = self._normalize
1111 normalize = self._normalize
1130 normalizefile = None
1112 normalizefile = None
1131
1113
1132 # step 1: find all explicit files
1114 # step 1: find all explicit files
1133 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1115 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1134 if matchtdir:
1116 if matchtdir:
1135 for d in work:
1117 for d in work:
1136 matchtdir(d[0])
1118 matchtdir(d[0])
1137 for d in dirsnotfound:
1119 for d in dirsnotfound:
1138 matchtdir(d)
1120 matchtdir(d)
1139
1121
1140 skipstep3 = skipstep3 and not (work or dirsnotfound)
1122 skipstep3 = skipstep3 and not (work or dirsnotfound)
1141 work = [d for d in work if not dirignore(d[0])]
1123 work = [d for d in work if not dirignore(d[0])]
1142
1124
1143 # step 2: visit subdirectories
1125 # step 2: visit subdirectories
1144 def traverse(work, alreadynormed):
1126 def traverse(work, alreadynormed):
1145 wadd = work.append
1127 wadd = work.append
1146 while work:
1128 while work:
1147 tracing.counter('dirstate.walk work', len(work))
1129 tracing.counter('dirstate.walk work', len(work))
1148 nd = work.pop()
1130 nd = work.pop()
1149 visitentries = match.visitchildrenset(nd)
1131 visitentries = match.visitchildrenset(nd)
1150 if not visitentries:
1132 if not visitentries:
1151 continue
1133 continue
1152 if visitentries == b'this' or visitentries == b'all':
1134 if visitentries == b'this' or visitentries == b'all':
1153 visitentries = None
1135 visitentries = None
1154 skip = None
1136 skip = None
1155 if nd != b'':
1137 if nd != b'':
1156 skip = b'.hg'
1138 skip = b'.hg'
1157 try:
1139 try:
1158 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1140 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1159 entries = listdir(join(nd), stat=True, skip=skip)
1141 entries = listdir(join(nd), stat=True, skip=skip)
1160 except OSError as inst:
1142 except OSError as inst:
1161 if inst.errno in (errno.EACCES, errno.ENOENT):
1143 if inst.errno in (errno.EACCES, errno.ENOENT):
1162 match.bad(
1144 match.bad(
1163 self.pathto(nd), encoding.strtolocal(inst.strerror)
1145 self.pathto(nd), encoding.strtolocal(inst.strerror)
1164 )
1146 )
1165 continue
1147 continue
1166 raise
1148 raise
1167 for f, kind, st in entries:
1149 for f, kind, st in entries:
1168 # Some matchers may return files in the visitentries set,
1150 # Some matchers may return files in the visitentries set,
1169 # instead of 'this', if the matcher explicitly mentions them
1151 # instead of 'this', if the matcher explicitly mentions them
1170 # and is not an exactmatcher. This is acceptable; we do not
1152 # and is not an exactmatcher. This is acceptable; we do not
1171 # make any hard assumptions about file-or-directory below
1153 # make any hard assumptions about file-or-directory below
1172 # based on the presence of `f` in visitentries. If
1154 # based on the presence of `f` in visitentries. If
1173 # visitchildrenset returned a set, we can always skip the
1155 # visitchildrenset returned a set, we can always skip the
1174 # entries *not* in the set it provided regardless of whether
1156 # entries *not* in the set it provided regardless of whether
1175 # they're actually a file or a directory.
1157 # they're actually a file or a directory.
1176 if visitentries and f not in visitentries:
1158 if visitentries and f not in visitentries:
1177 continue
1159 continue
1178 if normalizefile:
1160 if normalizefile:
1179 # even though f might be a directory, we're only
1161 # even though f might be a directory, we're only
1180 # interested in comparing it to files currently in the
1162 # interested in comparing it to files currently in the
1181 # dmap -- therefore normalizefile is enough
1163 # dmap -- therefore normalizefile is enough
1182 nf = normalizefile(
1164 nf = normalizefile(
1183 nd and (nd + b"/" + f) or f, True, True
1165 nd and (nd + b"/" + f) or f, True, True
1184 )
1166 )
1185 else:
1167 else:
1186 nf = nd and (nd + b"/" + f) or f
1168 nf = nd and (nd + b"/" + f) or f
1187 if nf not in results:
1169 if nf not in results:
1188 if kind == dirkind:
1170 if kind == dirkind:
1189 if not ignore(nf):
1171 if not ignore(nf):
1190 if matchtdir:
1172 if matchtdir:
1191 matchtdir(nf)
1173 matchtdir(nf)
1192 wadd(nf)
1174 wadd(nf)
1193 if nf in dmap and (matchalways or matchfn(nf)):
1175 if nf in dmap and (matchalways or matchfn(nf)):
1194 results[nf] = None
1176 results[nf] = None
1195 elif kind == regkind or kind == lnkkind:
1177 elif kind == regkind or kind == lnkkind:
1196 if nf in dmap:
1178 if nf in dmap:
1197 if matchalways or matchfn(nf):
1179 if matchalways or matchfn(nf):
1198 results[nf] = st
1180 results[nf] = st
1199 elif (matchalways or matchfn(nf)) and not ignore(
1181 elif (matchalways or matchfn(nf)) and not ignore(
1200 nf
1182 nf
1201 ):
1183 ):
1202 # unknown file -- normalize if necessary
1184 # unknown file -- normalize if necessary
1203 if not alreadynormed:
1185 if not alreadynormed:
1204 nf = normalize(nf, False, True)
1186 nf = normalize(nf, False, True)
1205 results[nf] = st
1187 results[nf] = st
1206 elif nf in dmap and (matchalways or matchfn(nf)):
1188 elif nf in dmap and (matchalways or matchfn(nf)):
1207 results[nf] = None
1189 results[nf] = None
1208
1190
1209 for nd, d in work:
1191 for nd, d in work:
1210 # alreadynormed means that processwork doesn't have to do any
1192 # alreadynormed means that processwork doesn't have to do any
1211 # expensive directory normalization
1193 # expensive directory normalization
1212 alreadynormed = not normalize or nd == d
1194 alreadynormed = not normalize or nd == d
1213 traverse([d], alreadynormed)
1195 traverse([d], alreadynormed)
1214
1196
1215 for s in subrepos:
1197 for s in subrepos:
1216 del results[s]
1198 del results[s]
1217 del results[b'.hg']
1199 del results[b'.hg']
1218
1200
1219 # step 3: visit remaining files from dmap
1201 # step 3: visit remaining files from dmap
1220 if not skipstep3 and not exact:
1202 if not skipstep3 and not exact:
1221 # If a dmap file is not in results yet, it was either
1203 # If a dmap file is not in results yet, it was either
1222 # a) not matching matchfn b) ignored, c) missing, or d) under a
1204 # a) not matching matchfn b) ignored, c) missing, or d) under a
1223 # symlink directory.
1205 # symlink directory.
1224 if not results and matchalways:
1206 if not results and matchalways:
1225 visit = [f for f in dmap]
1207 visit = [f for f in dmap]
1226 else:
1208 else:
1227 visit = [f for f in dmap if f not in results and matchfn(f)]
1209 visit = [f for f in dmap if f not in results and matchfn(f)]
1228 visit.sort()
1210 visit.sort()
1229
1211
1230 if unknown:
1212 if unknown:
1231 # unknown == True means we walked all dirs under the roots
1213 # unknown == True means we walked all dirs under the roots
1232 # that wasn't ignored, and everything that matched was stat'ed
1214 # that wasn't ignored, and everything that matched was stat'ed
1233 # and is already in results.
1215 # and is already in results.
1234 # The rest must thus be ignored or under a symlink.
1216 # The rest must thus be ignored or under a symlink.
1235 audit_path = pathutil.pathauditor(self._root, cached=True)
1217 audit_path = pathutil.pathauditor(self._root, cached=True)
1236
1218
1237 for nf in iter(visit):
1219 for nf in iter(visit):
1238 # If a stat for the same file was already added with a
1220 # If a stat for the same file was already added with a
1239 # different case, don't add one for this, since that would
1221 # different case, don't add one for this, since that would
1240 # make it appear as if the file exists under both names
1222 # make it appear as if the file exists under both names
1241 # on disk.
1223 # on disk.
1242 if (
1224 if (
1243 normalizefile
1225 normalizefile
1244 and normalizefile(nf, True, True) in results
1226 and normalizefile(nf, True, True) in results
1245 ):
1227 ):
1246 results[nf] = None
1228 results[nf] = None
1247 # Report ignored items in the dmap as long as they are not
1229 # Report ignored items in the dmap as long as they are not
1248 # under a symlink directory.
1230 # under a symlink directory.
1249 elif audit_path.check(nf):
1231 elif audit_path.check(nf):
1250 try:
1232 try:
1251 results[nf] = lstat(join(nf))
1233 results[nf] = lstat(join(nf))
1252 # file was just ignored, no links, and exists
1234 # file was just ignored, no links, and exists
1253 except OSError:
1235 except OSError:
1254 # file doesn't exist
1236 # file doesn't exist
1255 results[nf] = None
1237 results[nf] = None
1256 else:
1238 else:
1257 # It's either missing or under a symlink directory
1239 # It's either missing or under a symlink directory
1258 # which we in this case report as missing
1240 # which we in this case report as missing
1259 results[nf] = None
1241 results[nf] = None
1260 else:
1242 else:
1261 # We may not have walked the full directory tree above,
1243 # We may not have walked the full directory tree above,
1262 # so stat and check everything we missed.
1244 # so stat and check everything we missed.
1263 iv = iter(visit)
1245 iv = iter(visit)
1264 for st in util.statfiles([join(i) for i in visit]):
1246 for st in util.statfiles([join(i) for i in visit]):
1265 results[next(iv)] = st
1247 results[next(iv)] = st
1266 return results
1248 return results
1267
1249
1268 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1250 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1269 # Force Rayon (Rust parallelism library) to respect the number of
1251 # Force Rayon (Rust parallelism library) to respect the number of
1270 # workers. This is a temporary workaround until Rust code knows
1252 # workers. This is a temporary workaround until Rust code knows
1271 # how to read the config file.
1253 # how to read the config file.
1272 numcpus = self._ui.configint(b"worker", b"numcpus")
1254 numcpus = self._ui.configint(b"worker", b"numcpus")
1273 if numcpus is not None:
1255 if numcpus is not None:
1274 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1256 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1275
1257
1276 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1258 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1277 if not workers_enabled:
1259 if not workers_enabled:
1278 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1260 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1279
1261
1280 (
1262 (
1281 lookup,
1263 lookup,
1282 modified,
1264 modified,
1283 added,
1265 added,
1284 removed,
1266 removed,
1285 deleted,
1267 deleted,
1286 clean,
1268 clean,
1287 ignored,
1269 ignored,
1288 unknown,
1270 unknown,
1289 warnings,
1271 warnings,
1290 bad,
1272 bad,
1291 traversed,
1273 traversed,
1292 dirty,
1274 dirty,
1293 ) = rustmod.status(
1275 ) = rustmod.status(
1294 self._map._rustmap,
1276 self._map._rustmap,
1295 matcher,
1277 matcher,
1296 self._rootdir,
1278 self._rootdir,
1297 self._ignorefiles(),
1279 self._ignorefiles(),
1298 self._checkexec,
1280 self._checkexec,
1299 self._lastnormaltime,
1281 self._lastnormaltime,
1300 bool(list_clean),
1282 bool(list_clean),
1301 bool(list_ignored),
1283 bool(list_ignored),
1302 bool(list_unknown),
1284 bool(list_unknown),
1303 bool(matcher.traversedir),
1285 bool(matcher.traversedir),
1304 )
1286 )
1305
1287
1306 self._dirty |= dirty
1288 self._dirty |= dirty
1307
1289
1308 if matcher.traversedir:
1290 if matcher.traversedir:
1309 for dir in traversed:
1291 for dir in traversed:
1310 matcher.traversedir(dir)
1292 matcher.traversedir(dir)
1311
1293
1312 if self._ui.warn:
1294 if self._ui.warn:
1313 for item in warnings:
1295 for item in warnings:
1314 if isinstance(item, tuple):
1296 if isinstance(item, tuple):
1315 file_path, syntax = item
1297 file_path, syntax = item
1316 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1298 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1317 file_path,
1299 file_path,
1318 syntax,
1300 syntax,
1319 )
1301 )
1320 self._ui.warn(msg)
1302 self._ui.warn(msg)
1321 else:
1303 else:
1322 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1304 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1323 self._ui.warn(
1305 self._ui.warn(
1324 msg
1306 msg
1325 % (
1307 % (
1326 pathutil.canonpath(
1308 pathutil.canonpath(
1327 self._rootdir, self._rootdir, item
1309 self._rootdir, self._rootdir, item
1328 ),
1310 ),
1329 b"No such file or directory",
1311 b"No such file or directory",
1330 )
1312 )
1331 )
1313 )
1332
1314
1333 for (fn, message) in bad:
1315 for (fn, message) in bad:
1334 matcher.bad(fn, encoding.strtolocal(message))
1316 matcher.bad(fn, encoding.strtolocal(message))
1335
1317
1336 status = scmutil.status(
1318 status = scmutil.status(
1337 modified=modified,
1319 modified=modified,
1338 added=added,
1320 added=added,
1339 removed=removed,
1321 removed=removed,
1340 deleted=deleted,
1322 deleted=deleted,
1341 unknown=unknown,
1323 unknown=unknown,
1342 ignored=ignored,
1324 ignored=ignored,
1343 clean=clean,
1325 clean=clean,
1344 )
1326 )
1345 return (lookup, status)
1327 return (lookup, status)
1346
1328
1347 def status(self, match, subrepos, ignored, clean, unknown):
1329 def status(self, match, subrepos, ignored, clean, unknown):
1348 """Determine the status of the working copy relative to the
1330 """Determine the status of the working copy relative to the
1349 dirstate and return a pair of (unsure, status), where status is of type
1331 dirstate and return a pair of (unsure, status), where status is of type
1350 scmutil.status and:
1332 scmutil.status and:
1351
1333
1352 unsure:
1334 unsure:
1353 files that might have been modified since the dirstate was
1335 files that might have been modified since the dirstate was
1354 written, but need to be read to be sure (size is the same
1336 written, but need to be read to be sure (size is the same
1355 but mtime differs)
1337 but mtime differs)
1356 status.modified:
1338 status.modified:
1357 files that have definitely been modified since the dirstate
1339 files that have definitely been modified since the dirstate
1358 was written (different size or mode)
1340 was written (different size or mode)
1359 status.clean:
1341 status.clean:
1360 files that have definitely not been modified since the
1342 files that have definitely not been modified since the
1361 dirstate was written
1343 dirstate was written
1362 """
1344 """
1363 listignored, listclean, listunknown = ignored, clean, unknown
1345 listignored, listclean, listunknown = ignored, clean, unknown
1364 lookup, modified, added, unknown, ignored = [], [], [], [], []
1346 lookup, modified, added, unknown, ignored = [], [], [], [], []
1365 removed, deleted, clean = [], [], []
1347 removed, deleted, clean = [], [], []
1366
1348
1367 dmap = self._map
1349 dmap = self._map
1368 dmap.preload()
1350 dmap.preload()
1369
1351
1370 use_rust = True
1352 use_rust = True
1371
1353
1372 allowed_matchers = (
1354 allowed_matchers = (
1373 matchmod.alwaysmatcher,
1355 matchmod.alwaysmatcher,
1374 matchmod.exactmatcher,
1356 matchmod.exactmatcher,
1375 matchmod.includematcher,
1357 matchmod.includematcher,
1376 )
1358 )
1377
1359
1378 if rustmod is None:
1360 if rustmod is None:
1379 use_rust = False
1361 use_rust = False
1380 elif self._checkcase:
1362 elif self._checkcase:
1381 # Case-insensitive filesystems are not handled yet
1363 # Case-insensitive filesystems are not handled yet
1382 use_rust = False
1364 use_rust = False
1383 elif subrepos:
1365 elif subrepos:
1384 use_rust = False
1366 use_rust = False
1385 elif sparse.enabled:
1367 elif sparse.enabled:
1386 use_rust = False
1368 use_rust = False
1387 elif not isinstance(match, allowed_matchers):
1369 elif not isinstance(match, allowed_matchers):
1388 # Some matchers have yet to be implemented
1370 # Some matchers have yet to be implemented
1389 use_rust = False
1371 use_rust = False
1390
1372
1391 if use_rust:
1373 if use_rust:
1392 try:
1374 try:
1393 return self._rust_status(
1375 return self._rust_status(
1394 match, listclean, listignored, listunknown
1376 match, listclean, listignored, listunknown
1395 )
1377 )
1396 except rustmod.FallbackError:
1378 except rustmod.FallbackError:
1397 pass
1379 pass
1398
1380
1399 def noop(f):
1381 def noop(f):
1400 pass
1382 pass
1401
1383
1402 dcontains = dmap.__contains__
1384 dcontains = dmap.__contains__
1403 dget = dmap.__getitem__
1385 dget = dmap.__getitem__
1404 ladd = lookup.append # aka "unsure"
1386 ladd = lookup.append # aka "unsure"
1405 madd = modified.append
1387 madd = modified.append
1406 aadd = added.append
1388 aadd = added.append
1407 uadd = unknown.append if listunknown else noop
1389 uadd = unknown.append if listunknown else noop
1408 iadd = ignored.append if listignored else noop
1390 iadd = ignored.append if listignored else noop
1409 radd = removed.append
1391 radd = removed.append
1410 dadd = deleted.append
1392 dadd = deleted.append
1411 cadd = clean.append if listclean else noop
1393 cadd = clean.append if listclean else noop
1412 mexact = match.exact
1394 mexact = match.exact
1413 dirignore = self._dirignore
1395 dirignore = self._dirignore
1414 checkexec = self._checkexec
1396 checkexec = self._checkexec
1415 copymap = self._map.copymap
1397 copymap = self._map.copymap
1416 lastnormaltime = self._lastnormaltime
1398 lastnormaltime = self._lastnormaltime
1417
1399
1418 # We need to do full walks when either
1400 # We need to do full walks when either
1419 # - we're listing all clean files, or
1401 # - we're listing all clean files, or
1420 # - match.traversedir does something, because match.traversedir should
1402 # - match.traversedir does something, because match.traversedir should
1421 # be called for every dir in the working dir
1403 # be called for every dir in the working dir
1422 full = listclean or match.traversedir is not None
1404 full = listclean or match.traversedir is not None
1423 for fn, st in pycompat.iteritems(
1405 for fn, st in pycompat.iteritems(
1424 self.walk(match, subrepos, listunknown, listignored, full=full)
1406 self.walk(match, subrepos, listunknown, listignored, full=full)
1425 ):
1407 ):
1426 if not dcontains(fn):
1408 if not dcontains(fn):
1427 if (listignored or mexact(fn)) and dirignore(fn):
1409 if (listignored or mexact(fn)) and dirignore(fn):
1428 if listignored:
1410 if listignored:
1429 iadd(fn)
1411 iadd(fn)
1430 else:
1412 else:
1431 uadd(fn)
1413 uadd(fn)
1432 continue
1414 continue
1433
1415
1434 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1416 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1435 # written like that for performance reasons. dmap[fn] is not a
1417 # written like that for performance reasons. dmap[fn] is not a
1436 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1418 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1437 # opcode has fast paths when the value to be unpacked is a tuple or
1419 # opcode has fast paths when the value to be unpacked is a tuple or
1438 # a list, but falls back to creating a full-fledged iterator in
1420 # a list, but falls back to creating a full-fledged iterator in
1439 # general. That is much slower than simply accessing and storing the
1421 # general. That is much slower than simply accessing and storing the
1440 # tuple members one by one.
1422 # tuple members one by one.
1441 t = dget(fn)
1423 t = dget(fn)
1442 mode = t.mode
1424 mode = t.mode
1443 size = t.size
1425 size = t.size
1444 time = t.mtime
1426 time = t.mtime
1445
1427
1446 if not st and t.tracked:
1428 if not st and t.tracked:
1447 dadd(fn)
1429 dadd(fn)
1448 elif t.merged:
1430 elif t.merged:
1449 madd(fn)
1431 madd(fn)
1450 elif t.added:
1432 elif t.added:
1451 aadd(fn)
1433 aadd(fn)
1452 elif t.removed:
1434 elif t.removed:
1453 radd(fn)
1435 radd(fn)
1454 elif t.tracked:
1436 elif t.tracked:
1455 if (
1437 if (
1456 size >= 0
1438 size >= 0
1457 and (
1439 and (
1458 (size != st.st_size and size != st.st_size & _rangemask)
1440 (size != st.st_size and size != st.st_size & _rangemask)
1459 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1441 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1460 )
1442 )
1461 or t.from_p2
1443 or t.from_p2
1462 or fn in copymap
1444 or fn in copymap
1463 ):
1445 ):
1464 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1446 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1465 # issue6456: Size returned may be longer due to
1447 # issue6456: Size returned may be longer due to
1466 # encryption on EXT-4 fscrypt, undecided.
1448 # encryption on EXT-4 fscrypt, undecided.
1467 ladd(fn)
1449 ladd(fn)
1468 else:
1450 else:
1469 madd(fn)
1451 madd(fn)
1470 elif (
1452 elif (
1471 time != st[stat.ST_MTIME]
1453 time != st[stat.ST_MTIME]
1472 and time != st[stat.ST_MTIME] & _rangemask
1454 and time != st[stat.ST_MTIME] & _rangemask
1473 ):
1455 ):
1474 ladd(fn)
1456 ladd(fn)
1475 elif st[stat.ST_MTIME] == lastnormaltime:
1457 elif st[stat.ST_MTIME] == lastnormaltime:
1476 # fn may have just been marked as normal and it may have
1458 # fn may have just been marked as normal and it may have
1477 # changed in the same second without changing its size.
1459 # changed in the same second without changing its size.
1478 # This can happen if we quickly do multiple commits.
1460 # This can happen if we quickly do multiple commits.
1479 # Force lookup, so we don't miss such a racy file change.
1461 # Force lookup, so we don't miss such a racy file change.
1480 ladd(fn)
1462 ladd(fn)
1481 elif listclean:
1463 elif listclean:
1482 cadd(fn)
1464 cadd(fn)
1483 status = scmutil.status(
1465 status = scmutil.status(
1484 modified, added, removed, deleted, unknown, ignored, clean
1466 modified, added, removed, deleted, unknown, ignored, clean
1485 )
1467 )
1486 return (lookup, status)
1468 return (lookup, status)
1487
1469
1488 def matches(self, match):
1470 def matches(self, match):
1489 """
1471 """
1490 return files in the dirstate (in whatever state) filtered by match
1472 return files in the dirstate (in whatever state) filtered by match
1491 """
1473 """
1492 dmap = self._map
1474 dmap = self._map
1493 if rustmod is not None:
1475 if rustmod is not None:
1494 dmap = self._map._rustmap
1476 dmap = self._map._rustmap
1495
1477
1496 if match.always():
1478 if match.always():
1497 return dmap.keys()
1479 return dmap.keys()
1498 files = match.files()
1480 files = match.files()
1499 if match.isexact():
1481 if match.isexact():
1500 # fast path -- filter the other way around, since typically files is
1482 # fast path -- filter the other way around, since typically files is
1501 # much smaller than dmap
1483 # much smaller than dmap
1502 return [f for f in files if f in dmap]
1484 return [f for f in files if f in dmap]
1503 if match.prefix() and all(fn in dmap for fn in files):
1485 if match.prefix() and all(fn in dmap for fn in files):
1504 # fast path -- all the values are known to be files, so just return
1486 # fast path -- all the values are known to be files, so just return
1505 # that
1487 # that
1506 return list(files)
1488 return list(files)
1507 return [f for f in dmap if match(f)]
1489 return [f for f in dmap if match(f)]
1508
1490
1509 def _actualfilename(self, tr):
1491 def _actualfilename(self, tr):
1510 if tr:
1492 if tr:
1511 return self._pendingfilename
1493 return self._pendingfilename
1512 else:
1494 else:
1513 return self._filename
1495 return self._filename
1514
1496
1515 def savebackup(self, tr, backupname):
1497 def savebackup(self, tr, backupname):
1516 '''Save current dirstate into backup file'''
1498 '''Save current dirstate into backup file'''
1517 filename = self._actualfilename(tr)
1499 filename = self._actualfilename(tr)
1518 assert backupname != filename
1500 assert backupname != filename
1519
1501
1520 # use '_writedirstate' instead of 'write' to write changes certainly,
1502 # use '_writedirstate' instead of 'write' to write changes certainly,
1521 # because the latter omits writing out if transaction is running.
1503 # because the latter omits writing out if transaction is running.
1522 # output file will be used to create backup of dirstate at this point.
1504 # output file will be used to create backup of dirstate at this point.
1523 if self._dirty or not self._opener.exists(filename):
1505 if self._dirty or not self._opener.exists(filename):
1524 self._writedirstate(
1506 self._writedirstate(
1525 tr,
1507 tr,
1526 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1508 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1527 )
1509 )
1528
1510
1529 if tr:
1511 if tr:
1530 # ensure that subsequent tr.writepending returns True for
1512 # ensure that subsequent tr.writepending returns True for
1531 # changes written out above, even if dirstate is never
1513 # changes written out above, even if dirstate is never
1532 # changed after this
1514 # changed after this
1533 tr.addfilegenerator(
1515 tr.addfilegenerator(
1534 b'dirstate',
1516 b'dirstate',
1535 (self._filename,),
1517 (self._filename,),
1536 lambda f: self._writedirstate(tr, f),
1518 lambda f: self._writedirstate(tr, f),
1537 location=b'plain',
1519 location=b'plain',
1538 )
1520 )
1539
1521
1540 # ensure that pending file written above is unlinked at
1522 # ensure that pending file written above is unlinked at
1541 # failure, even if tr.writepending isn't invoked until the
1523 # failure, even if tr.writepending isn't invoked until the
1542 # end of this transaction
1524 # end of this transaction
1543 tr.registertmp(filename, location=b'plain')
1525 tr.registertmp(filename, location=b'plain')
1544
1526
1545 self._opener.tryunlink(backupname)
1527 self._opener.tryunlink(backupname)
1546 # hardlink backup is okay because _writedirstate is always called
1528 # hardlink backup is okay because _writedirstate is always called
1547 # with an "atomictemp=True" file.
1529 # with an "atomictemp=True" file.
1548 util.copyfile(
1530 util.copyfile(
1549 self._opener.join(filename),
1531 self._opener.join(filename),
1550 self._opener.join(backupname),
1532 self._opener.join(backupname),
1551 hardlink=True,
1533 hardlink=True,
1552 )
1534 )
1553
1535
1554 def restorebackup(self, tr, backupname):
1536 def restorebackup(self, tr, backupname):
1555 '''Restore dirstate by backup file'''
1537 '''Restore dirstate by backup file'''
1556 # this "invalidate()" prevents "wlock.release()" from writing
1538 # this "invalidate()" prevents "wlock.release()" from writing
1557 # changes of dirstate out after restoring from backup file
1539 # changes of dirstate out after restoring from backup file
1558 self.invalidate()
1540 self.invalidate()
1559 filename = self._actualfilename(tr)
1541 filename = self._actualfilename(tr)
1560 o = self._opener
1542 o = self._opener
1561 if util.samefile(o.join(backupname), o.join(filename)):
1543 if util.samefile(o.join(backupname), o.join(filename)):
1562 o.unlink(backupname)
1544 o.unlink(backupname)
1563 else:
1545 else:
1564 o.rename(backupname, filename, checkambig=True)
1546 o.rename(backupname, filename, checkambig=True)
1565
1547
1566 def clearbackup(self, tr, backupname):
1548 def clearbackup(self, tr, backupname):
1567 '''Clear backup file'''
1549 '''Clear backup file'''
1568 self._opener.unlink(backupname)
1550 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now