##// END OF EJS Templates
dirstate: properly update `_lastnormaltime` in `update_file`...
marmoute -
r48495:f98145ce default
parent child Browse files
Show More
@@ -1,1656 +1,1664 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self.normallookup(f)
406 self.normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self.normallookup(filename)
479 self.normallookup(filename)
480 return True
480 return True
481 return False
481 return False
482
482
483 @requires_no_parents_change
483 @requires_no_parents_change
484 def set_untracked(self, filename):
484 def set_untracked(self, filename):
485 """a "public" method for generic code to mark a file as untracked
485 """a "public" method for generic code to mark a file as untracked
486
486
487 This function is to be called outside of "update/merge" case. For
487 This function is to be called outside of "update/merge" case. For
488 example by a command like `hg remove X`.
488 example by a command like `hg remove X`.
489
489
490 return True the file was previously tracked, False otherwise.
490 return True the file was previously tracked, False otherwise.
491 """
491 """
492 entry = self._map.get(filename)
492 entry = self._map.get(filename)
493 if entry is None:
493 if entry is None:
494 return False
494 return False
495 elif entry.added:
495 elif entry.added:
496 self._drop(filename)
496 self._drop(filename)
497 return True
497 return True
498 else:
498 else:
499 self._remove(filename)
499 self._remove(filename)
500 return True
500 return True
501
501
502 @requires_parents_change
502 @requires_parents_change
503 def update_file_p1(
503 def update_file_p1(
504 self,
504 self,
505 filename,
505 filename,
506 p1_tracked,
506 p1_tracked,
507 ):
507 ):
508 """Set a file as tracked in the parent (or not)
508 """Set a file as tracked in the parent (or not)
509
509
510 This is to be called when adjust the dirstate to a new parent after an history
510 This is to be called when adjust the dirstate to a new parent after an history
511 rewriting operation.
511 rewriting operation.
512
512
513 It should not be called during a merge (p2 != nullid) and only within
513 It should not be called during a merge (p2 != nullid) and only within
514 a `with dirstate.parentchange():` context.
514 a `with dirstate.parentchange():` context.
515 """
515 """
516 if self.in_merge:
516 if self.in_merge:
517 msg = b'update_file_reference should not be called when merging'
517 msg = b'update_file_reference should not be called when merging'
518 raise error.ProgrammingError(msg)
518 raise error.ProgrammingError(msg)
519 entry = self._map.get(filename)
519 entry = self._map.get(filename)
520 if entry is None:
520 if entry is None:
521 wc_tracked = False
521 wc_tracked = False
522 else:
522 else:
523 wc_tracked = entry.tracked
523 wc_tracked = entry.tracked
524 possibly_dirty = False
524 possibly_dirty = False
525 if p1_tracked and wc_tracked:
525 if p1_tracked and wc_tracked:
526 # the underlying reference might have changed, we will have to
526 # the underlying reference might have changed, we will have to
527 # check it.
527 # check it.
528 possibly_dirty = True
528 possibly_dirty = True
529 elif not (p1_tracked or wc_tracked):
529 elif not (p1_tracked or wc_tracked):
530 # the file is no longer relevant to anyone
530 # the file is no longer relevant to anyone
531 self._drop(filename)
531 self._drop(filename)
532 elif (not p1_tracked) and wc_tracked:
532 elif (not p1_tracked) and wc_tracked:
533 if entry is not None and entry.added:
533 if entry is not None and entry.added:
534 return # avoid dropping copy information (maybe?)
534 return # avoid dropping copy information (maybe?)
535 elif p1_tracked and not wc_tracked:
535 elif p1_tracked and not wc_tracked:
536 pass
536 pass
537 else:
537 else:
538 assert False, 'unreachable'
538 assert False, 'unreachable'
539
539
540 # this mean we are doing call for file we do not really care about the
540 # this mean we are doing call for file we do not really care about the
541 # data (eg: added or removed), however this should be a minor overhead
541 # data (eg: added or removed), however this should be a minor overhead
542 # compared to the overall update process calling this.
542 # compared to the overall update process calling this.
543 parentfiledata = None
543 parentfiledata = None
544 if wc_tracked:
544 if wc_tracked:
545 parentfiledata = self._get_filedata(filename)
545 parentfiledata = self._get_filedata(filename)
546
546
547 self._updatedfiles.add(filename)
547 self._updatedfiles.add(filename)
548 self._map.reset_state(
548 self._map.reset_state(
549 filename,
549 filename,
550 wc_tracked,
550 wc_tracked,
551 p1_tracked,
551 p1_tracked,
552 possibly_dirty=possibly_dirty,
552 possibly_dirty=possibly_dirty,
553 parentfiledata=parentfiledata,
553 parentfiledata=parentfiledata,
554 )
554 )
555
555
556 @requires_parents_change
556 @requires_parents_change
557 def update_file(
557 def update_file(
558 self,
558 self,
559 filename,
559 filename,
560 wc_tracked,
560 wc_tracked,
561 p1_tracked,
561 p1_tracked,
562 p2_tracked=False,
562 p2_tracked=False,
563 merged=False,
563 merged=False,
564 clean_p1=False,
564 clean_p1=False,
565 clean_p2=False,
565 clean_p2=False,
566 possibly_dirty=False,
566 possibly_dirty=False,
567 parentfiledata=None,
567 parentfiledata=None,
568 ):
568 ):
569 """update the information about a file in the dirstate
569 """update the information about a file in the dirstate
570
570
571 This is to be called when the direstates parent changes to keep track
571 This is to be called when the direstates parent changes to keep track
572 of what is the file situation in regards to the working copy and its parent.
572 of what is the file situation in regards to the working copy and its parent.
573
573
574 This function must be called within a `dirstate.parentchange` context.
574 This function must be called within a `dirstate.parentchange` context.
575
575
576 note: the API is at an early stage and we might need to ajust it
576 note: the API is at an early stage and we might need to ajust it
577 depending of what information ends up being relevant and useful to
577 depending of what information ends up being relevant and useful to
578 other processing.
578 other processing.
579 """
579 """
580 if merged and (clean_p1 or clean_p2):
580 if merged and (clean_p1 or clean_p2):
581 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
581 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
582 raise error.ProgrammingError(msg)
582 raise error.ProgrammingError(msg)
583
583
584 # note: I do not think we need to double check name clash here since we
584 # note: I do not think we need to double check name clash here since we
585 # are in a update/merge case that should already have taken care of
585 # are in a update/merge case that should already have taken care of
586 # this. The test agrees
586 # this. The test agrees
587
587
588 self._dirty = True
588 self._dirty = True
589 self._updatedfiles.add(filename)
589 self._updatedfiles.add(filename)
590
590
591 need_parent_file_data = (
591 need_parent_file_data = (
592 not (possibly_dirty or clean_p2 or merged)
592 not (possibly_dirty or clean_p2 or merged)
593 and wc_tracked
593 and wc_tracked
594 and p1_tracked
594 and p1_tracked
595 )
595 )
596
596
597 # this mean we are doing call for file we do not really care about the
597 # this mean we are doing call for file we do not really care about the
598 # data (eg: added or removed), however this should be a minor overhead
598 # data (eg: added or removed), however this should be a minor overhead
599 # compared to the overall update process calling this.
599 # compared to the overall update process calling this.
600 if need_parent_file_data:
600 if need_parent_file_data:
601 if parentfiledata is None:
601 if parentfiledata is None:
602 parentfiledata = self._get_filedata(filename)
602 parentfiledata = self._get_filedata(filename)
603 mtime = parentfiledata[2]
603 mtime = parentfiledata[2]
604
604
605 if mtime > self._lastnormaltime:
605 if mtime > self._lastnormaltime:
606 # Remember the most recent modification timeslot for
606 # Remember the most recent modification timeslot for
607 # status(), to make sure we won't miss future
607 # status(), to make sure we won't miss future
608 # size-preserving file content modifications that happen
608 # size-preserving file content modifications that happen
609 # within the same timeslot.
609 # within the same timeslot.
610 self._lastnormaltime = mtime
610 self._lastnormaltime = mtime
611
611
612 self._map.reset_state(
612 self._map.reset_state(
613 filename,
613 filename,
614 wc_tracked,
614 wc_tracked,
615 p1_tracked,
615 p1_tracked,
616 p2_tracked=p2_tracked,
616 p2_tracked=p2_tracked,
617 merged=merged,
617 merged=merged,
618 clean_p1=clean_p1,
618 clean_p1=clean_p1,
619 clean_p2=clean_p2,
619 clean_p2=clean_p2,
620 possibly_dirty=possibly_dirty,
620 possibly_dirty=possibly_dirty,
621 parentfiledata=parentfiledata,
621 parentfiledata=parentfiledata,
622 )
622 )
623 if (
624 parentfiledata is not None
625 and parentfiledata[2] > self._lastnormaltime
626 ):
627 # Remember the most recent modification timeslot for status(),
628 # to make sure we won't miss future size-preserving file content
629 # modifications that happen within the same timeslot.
630 self._lastnormaltime = parentfiledata[2]
623
631
624 def _addpath(
632 def _addpath(
625 self,
633 self,
626 f,
634 f,
627 mode=0,
635 mode=0,
628 size=None,
636 size=None,
629 mtime=None,
637 mtime=None,
630 added=False,
638 added=False,
631 merged=False,
639 merged=False,
632 from_p2=False,
640 from_p2=False,
633 possibly_dirty=False,
641 possibly_dirty=False,
634 ):
642 ):
635 entry = self._map.get(f)
643 entry = self._map.get(f)
636 if added or entry is not None and entry.removed:
644 if added or entry is not None and entry.removed:
637 scmutil.checkfilename(f)
645 scmutil.checkfilename(f)
638 if self._map.hastrackeddir(f):
646 if self._map.hastrackeddir(f):
639 msg = _(b'directory %r already in dirstate')
647 msg = _(b'directory %r already in dirstate')
640 msg %= pycompat.bytestr(f)
648 msg %= pycompat.bytestr(f)
641 raise error.Abort(msg)
649 raise error.Abort(msg)
642 # shadows
650 # shadows
643 for d in pathutil.finddirs(f):
651 for d in pathutil.finddirs(f):
644 if self._map.hastrackeddir(d):
652 if self._map.hastrackeddir(d):
645 break
653 break
646 entry = self._map.get(d)
654 entry = self._map.get(d)
647 if entry is not None and not entry.removed:
655 if entry is not None and not entry.removed:
648 msg = _(b'file %r in dirstate clashes with %r')
656 msg = _(b'file %r in dirstate clashes with %r')
649 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
657 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
650 raise error.Abort(msg)
658 raise error.Abort(msg)
651 self._dirty = True
659 self._dirty = True
652 self._updatedfiles.add(f)
660 self._updatedfiles.add(f)
653 self._map.addfile(
661 self._map.addfile(
654 f,
662 f,
655 mode=mode,
663 mode=mode,
656 size=size,
664 size=size,
657 mtime=mtime,
665 mtime=mtime,
658 added=added,
666 added=added,
659 merged=merged,
667 merged=merged,
660 from_p2=from_p2,
668 from_p2=from_p2,
661 possibly_dirty=possibly_dirty,
669 possibly_dirty=possibly_dirty,
662 )
670 )
663
671
664 def _get_filedata(self, filename):
672 def _get_filedata(self, filename):
665 """returns"""
673 """returns"""
666 s = os.lstat(self._join(filename))
674 s = os.lstat(self._join(filename))
667 mode = s.st_mode
675 mode = s.st_mode
668 size = s.st_size
676 size = s.st_size
669 mtime = s[stat.ST_MTIME]
677 mtime = s[stat.ST_MTIME]
670 return (mode, size, mtime)
678 return (mode, size, mtime)
671
679
672 def normal(self, f, parentfiledata=None):
680 def normal(self, f, parentfiledata=None):
673 """Mark a file normal and clean.
681 """Mark a file normal and clean.
674
682
675 parentfiledata: (mode, size, mtime) of the clean file
683 parentfiledata: (mode, size, mtime) of the clean file
676
684
677 parentfiledata should be computed from memory (for mode,
685 parentfiledata should be computed from memory (for mode,
678 size), as or close as possible from the point where we
686 size), as or close as possible from the point where we
679 determined the file was clean, to limit the risk of the
687 determined the file was clean, to limit the risk of the
680 file having been changed by an external process between the
688 file having been changed by an external process between the
681 moment where the file was determined to be clean and now."""
689 moment where the file was determined to be clean and now."""
682 if parentfiledata:
690 if parentfiledata:
683 (mode, size, mtime) = parentfiledata
691 (mode, size, mtime) = parentfiledata
684 else:
692 else:
685 (mode, size, mtime) = self._get_filedata(f)
693 (mode, size, mtime) = self._get_filedata(f)
686 self._addpath(f, mode=mode, size=size, mtime=mtime)
694 self._addpath(f, mode=mode, size=size, mtime=mtime)
687 self._map.copymap.pop(f, None)
695 self._map.copymap.pop(f, None)
688 if f in self._map.nonnormalset:
696 if f in self._map.nonnormalset:
689 self._map.nonnormalset.remove(f)
697 self._map.nonnormalset.remove(f)
690 if mtime > self._lastnormaltime:
698 if mtime > self._lastnormaltime:
691 # Remember the most recent modification timeslot for status(),
699 # Remember the most recent modification timeslot for status(),
692 # to make sure we won't miss future size-preserving file content
700 # to make sure we won't miss future size-preserving file content
693 # modifications that happen within the same timeslot.
701 # modifications that happen within the same timeslot.
694 self._lastnormaltime = mtime
702 self._lastnormaltime = mtime
695
703
696 def normallookup(self, f):
704 def normallookup(self, f):
697 '''Mark a file normal, but possibly dirty.'''
705 '''Mark a file normal, but possibly dirty.'''
698 if self.in_merge:
706 if self.in_merge:
699 # if there is a merge going on and the file was either
707 # if there is a merge going on and the file was either
700 # "merged" or coming from other parent (-2) before
708 # "merged" or coming from other parent (-2) before
701 # being removed, restore that state.
709 # being removed, restore that state.
702 entry = self._map.get(f)
710 entry = self._map.get(f)
703 if entry is not None:
711 if entry is not None:
704 # XXX this should probably be dealt with a a lower level
712 # XXX this should probably be dealt with a a lower level
705 # (see `merged_removed` and `from_p2_removed`)
713 # (see `merged_removed` and `from_p2_removed`)
706 if entry.merged_removed or entry.from_p2_removed:
714 if entry.merged_removed or entry.from_p2_removed:
707 source = self._map.copymap.get(f)
715 source = self._map.copymap.get(f)
708 if entry.merged_removed:
716 if entry.merged_removed:
709 self.merge(f)
717 self.merge(f)
710 elif entry.from_p2_removed:
718 elif entry.from_p2_removed:
711 self.otherparent(f)
719 self.otherparent(f)
712 if source is not None:
720 if source is not None:
713 self.copy(source, f)
721 self.copy(source, f)
714 return
722 return
715 elif entry.merged or entry.from_p2:
723 elif entry.merged or entry.from_p2:
716 return
724 return
717 self._addpath(f, possibly_dirty=True)
725 self._addpath(f, possibly_dirty=True)
718 self._map.copymap.pop(f, None)
726 self._map.copymap.pop(f, None)
719
727
720 def otherparent(self, f):
728 def otherparent(self, f):
721 '''Mark as coming from the other parent, always dirty.'''
729 '''Mark as coming from the other parent, always dirty.'''
722 if not self.in_merge:
730 if not self.in_merge:
723 msg = _(b"setting %r to other parent only allowed in merges") % f
731 msg = _(b"setting %r to other parent only allowed in merges") % f
724 raise error.Abort(msg)
732 raise error.Abort(msg)
725 entry = self._map.get(f)
733 entry = self._map.get(f)
726 if entry is not None and entry.tracked:
734 if entry is not None and entry.tracked:
727 # merge-like
735 # merge-like
728 self._addpath(f, merged=True)
736 self._addpath(f, merged=True)
729 else:
737 else:
730 # add-like
738 # add-like
731 self._addpath(f, from_p2=True)
739 self._addpath(f, from_p2=True)
732 self._map.copymap.pop(f, None)
740 self._map.copymap.pop(f, None)
733
741
734 def add(self, f):
742 def add(self, f):
735 '''Mark a file added.'''
743 '''Mark a file added.'''
736 if not self.pendingparentchange():
744 if not self.pendingparentchange():
737 util.nouideprecwarn(
745 util.nouideprecwarn(
738 b"do not use `add` outside of update/merge context."
746 b"do not use `add` outside of update/merge context."
739 b" Use `set_tracked`",
747 b" Use `set_tracked`",
740 b'6.0',
748 b'6.0',
741 stacklevel=2,
749 stacklevel=2,
742 )
750 )
743 self._add(f)
751 self._add(f)
744
752
745 def _add(self, filename):
753 def _add(self, filename):
746 """internal function to mark a file as added"""
754 """internal function to mark a file as added"""
747 self._addpath(filename, added=True)
755 self._addpath(filename, added=True)
748 self._map.copymap.pop(filename, None)
756 self._map.copymap.pop(filename, None)
749
757
750 def remove(self, f):
758 def remove(self, f):
751 '''Mark a file removed'''
759 '''Mark a file removed'''
752 if not self.pendingparentchange():
760 if not self.pendingparentchange():
753 util.nouideprecwarn(
761 util.nouideprecwarn(
754 b"do not use `remove` outside of update/merge context."
762 b"do not use `remove` outside of update/merge context."
755 b" Use `set_untracked`",
763 b" Use `set_untracked`",
756 b'6.0',
764 b'6.0',
757 stacklevel=2,
765 stacklevel=2,
758 )
766 )
759 self._remove(f)
767 self._remove(f)
760
768
761 def _remove(self, filename):
769 def _remove(self, filename):
762 """internal function to mark a file removed"""
770 """internal function to mark a file removed"""
763 self._dirty = True
771 self._dirty = True
764 self._updatedfiles.add(filename)
772 self._updatedfiles.add(filename)
765 self._map.removefile(filename, in_merge=self.in_merge)
773 self._map.removefile(filename, in_merge=self.in_merge)
766
774
767 def merge(self, f):
775 def merge(self, f):
768 '''Mark a file merged.'''
776 '''Mark a file merged.'''
769 if not self.in_merge:
777 if not self.in_merge:
770 return self.normallookup(f)
778 return self.normallookup(f)
771 return self.otherparent(f)
779 return self.otherparent(f)
772
780
773 def drop(self, f):
781 def drop(self, f):
774 '''Drop a file from the dirstate'''
782 '''Drop a file from the dirstate'''
775 if not self.pendingparentchange():
783 if not self.pendingparentchange():
776 util.nouideprecwarn(
784 util.nouideprecwarn(
777 b"do not use `drop` outside of update/merge context."
785 b"do not use `drop` outside of update/merge context."
778 b" Use `set_untracked`",
786 b" Use `set_untracked`",
779 b'6.0',
787 b'6.0',
780 stacklevel=2,
788 stacklevel=2,
781 )
789 )
782 self._drop(f)
790 self._drop(f)
783
791
784 def _drop(self, filename):
792 def _drop(self, filename):
785 """internal function to drop a file from the dirstate"""
793 """internal function to drop a file from the dirstate"""
786 if self._map.dropfile(filename):
794 if self._map.dropfile(filename):
787 self._dirty = True
795 self._dirty = True
788 self._updatedfiles.add(filename)
796 self._updatedfiles.add(filename)
789 self._map.copymap.pop(filename, None)
797 self._map.copymap.pop(filename, None)
790
798
791 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
799 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
792 if exists is None:
800 if exists is None:
793 exists = os.path.lexists(os.path.join(self._root, path))
801 exists = os.path.lexists(os.path.join(self._root, path))
794 if not exists:
802 if not exists:
795 # Maybe a path component exists
803 # Maybe a path component exists
796 if not ignoremissing and b'/' in path:
804 if not ignoremissing and b'/' in path:
797 d, f = path.rsplit(b'/', 1)
805 d, f = path.rsplit(b'/', 1)
798 d = self._normalize(d, False, ignoremissing, None)
806 d = self._normalize(d, False, ignoremissing, None)
799 folded = d + b"/" + f
807 folded = d + b"/" + f
800 else:
808 else:
801 # No path components, preserve original case
809 # No path components, preserve original case
802 folded = path
810 folded = path
803 else:
811 else:
804 # recursively normalize leading directory components
812 # recursively normalize leading directory components
805 # against dirstate
813 # against dirstate
806 if b'/' in normed:
814 if b'/' in normed:
807 d, f = normed.rsplit(b'/', 1)
815 d, f = normed.rsplit(b'/', 1)
808 d = self._normalize(d, False, ignoremissing, True)
816 d = self._normalize(d, False, ignoremissing, True)
809 r = self._root + b"/" + d
817 r = self._root + b"/" + d
810 folded = d + b"/" + util.fspath(f, r)
818 folded = d + b"/" + util.fspath(f, r)
811 else:
819 else:
812 folded = util.fspath(normed, self._root)
820 folded = util.fspath(normed, self._root)
813 storemap[normed] = folded
821 storemap[normed] = folded
814
822
815 return folded
823 return folded
816
824
817 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
825 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
818 normed = util.normcase(path)
826 normed = util.normcase(path)
819 folded = self._map.filefoldmap.get(normed, None)
827 folded = self._map.filefoldmap.get(normed, None)
820 if folded is None:
828 if folded is None:
821 if isknown:
829 if isknown:
822 folded = path
830 folded = path
823 else:
831 else:
824 folded = self._discoverpath(
832 folded = self._discoverpath(
825 path, normed, ignoremissing, exists, self._map.filefoldmap
833 path, normed, ignoremissing, exists, self._map.filefoldmap
826 )
834 )
827 return folded
835 return folded
828
836
829 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
837 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
830 normed = util.normcase(path)
838 normed = util.normcase(path)
831 folded = self._map.filefoldmap.get(normed, None)
839 folded = self._map.filefoldmap.get(normed, None)
832 if folded is None:
840 if folded is None:
833 folded = self._map.dirfoldmap.get(normed, None)
841 folded = self._map.dirfoldmap.get(normed, None)
834 if folded is None:
842 if folded is None:
835 if isknown:
843 if isknown:
836 folded = path
844 folded = path
837 else:
845 else:
838 # store discovered result in dirfoldmap so that future
846 # store discovered result in dirfoldmap so that future
839 # normalizefile calls don't start matching directories
847 # normalizefile calls don't start matching directories
840 folded = self._discoverpath(
848 folded = self._discoverpath(
841 path, normed, ignoremissing, exists, self._map.dirfoldmap
849 path, normed, ignoremissing, exists, self._map.dirfoldmap
842 )
850 )
843 return folded
851 return folded
844
852
845 def normalize(self, path, isknown=False, ignoremissing=False):
853 def normalize(self, path, isknown=False, ignoremissing=False):
846 """
854 """
847 normalize the case of a pathname when on a casefolding filesystem
855 normalize the case of a pathname when on a casefolding filesystem
848
856
849 isknown specifies whether the filename came from walking the
857 isknown specifies whether the filename came from walking the
850 disk, to avoid extra filesystem access.
858 disk, to avoid extra filesystem access.
851
859
852 If ignoremissing is True, missing path are returned
860 If ignoremissing is True, missing path are returned
853 unchanged. Otherwise, we try harder to normalize possibly
861 unchanged. Otherwise, we try harder to normalize possibly
854 existing path components.
862 existing path components.
855
863
856 The normalized case is determined based on the following precedence:
864 The normalized case is determined based on the following precedence:
857
865
858 - version of name already stored in the dirstate
866 - version of name already stored in the dirstate
859 - version of name stored on disk
867 - version of name stored on disk
860 - version provided via command arguments
868 - version provided via command arguments
861 """
869 """
862
870
863 if self._checkcase:
871 if self._checkcase:
864 return self._normalize(path, isknown, ignoremissing)
872 return self._normalize(path, isknown, ignoremissing)
865 return path
873 return path
866
874
867 def clear(self):
875 def clear(self):
868 self._map.clear()
876 self._map.clear()
869 self._lastnormaltime = 0
877 self._lastnormaltime = 0
870 self._updatedfiles.clear()
878 self._updatedfiles.clear()
871 self._dirty = True
879 self._dirty = True
872
880
873 def rebuild(self, parent, allfiles, changedfiles=None):
881 def rebuild(self, parent, allfiles, changedfiles=None):
874 if changedfiles is None:
882 if changedfiles is None:
875 # Rebuild entire dirstate
883 # Rebuild entire dirstate
876 to_lookup = allfiles
884 to_lookup = allfiles
877 to_drop = []
885 to_drop = []
878 lastnormaltime = self._lastnormaltime
886 lastnormaltime = self._lastnormaltime
879 self.clear()
887 self.clear()
880 self._lastnormaltime = lastnormaltime
888 self._lastnormaltime = lastnormaltime
881 elif len(changedfiles) < 10:
889 elif len(changedfiles) < 10:
882 # Avoid turning allfiles into a set, which can be expensive if it's
890 # Avoid turning allfiles into a set, which can be expensive if it's
883 # large.
891 # large.
884 to_lookup = []
892 to_lookup = []
885 to_drop = []
893 to_drop = []
886 for f in changedfiles:
894 for f in changedfiles:
887 if f in allfiles:
895 if f in allfiles:
888 to_lookup.append(f)
896 to_lookup.append(f)
889 else:
897 else:
890 to_drop.append(f)
898 to_drop.append(f)
891 else:
899 else:
892 changedfilesset = set(changedfiles)
900 changedfilesset = set(changedfiles)
893 to_lookup = changedfilesset & set(allfiles)
901 to_lookup = changedfilesset & set(allfiles)
894 to_drop = changedfilesset - to_lookup
902 to_drop = changedfilesset - to_lookup
895
903
896 if self._origpl is None:
904 if self._origpl is None:
897 self._origpl = self._pl
905 self._origpl = self._pl
898 self._map.setparents(parent, self._nodeconstants.nullid)
906 self._map.setparents(parent, self._nodeconstants.nullid)
899
907
900 for f in to_lookup:
908 for f in to_lookup:
901 self.normallookup(f)
909 self.normallookup(f)
902 for f in to_drop:
910 for f in to_drop:
903 self._drop(f)
911 self._drop(f)
904
912
905 self._dirty = True
913 self._dirty = True
906
914
907 def identity(self):
915 def identity(self):
908 """Return identity of dirstate itself to detect changing in storage
916 """Return identity of dirstate itself to detect changing in storage
909
917
910 If identity of previous dirstate is equal to this, writing
918 If identity of previous dirstate is equal to this, writing
911 changes based on the former dirstate out can keep consistency.
919 changes based on the former dirstate out can keep consistency.
912 """
920 """
913 return self._map.identity
921 return self._map.identity
914
922
915 def write(self, tr):
923 def write(self, tr):
916 if not self._dirty:
924 if not self._dirty:
917 return
925 return
918
926
919 filename = self._filename
927 filename = self._filename
920 if tr:
928 if tr:
921 # 'dirstate.write()' is not only for writing in-memory
929 # 'dirstate.write()' is not only for writing in-memory
922 # changes out, but also for dropping ambiguous timestamp.
930 # changes out, but also for dropping ambiguous timestamp.
923 # delayed writing re-raise "ambiguous timestamp issue".
931 # delayed writing re-raise "ambiguous timestamp issue".
924 # See also the wiki page below for detail:
932 # See also the wiki page below for detail:
925 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
933 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
926
934
927 # emulate dropping timestamp in 'parsers.pack_dirstate'
935 # emulate dropping timestamp in 'parsers.pack_dirstate'
928 now = _getfsnow(self._opener)
936 now = _getfsnow(self._opener)
929 self._map.clearambiguoustimes(self._updatedfiles, now)
937 self._map.clearambiguoustimes(self._updatedfiles, now)
930
938
931 # emulate that all 'dirstate.normal' results are written out
939 # emulate that all 'dirstate.normal' results are written out
932 self._lastnormaltime = 0
940 self._lastnormaltime = 0
933 self._updatedfiles.clear()
941 self._updatedfiles.clear()
934
942
935 # delay writing in-memory changes out
943 # delay writing in-memory changes out
936 tr.addfilegenerator(
944 tr.addfilegenerator(
937 b'dirstate',
945 b'dirstate',
938 (self._filename,),
946 (self._filename,),
939 lambda f: self._writedirstate(tr, f),
947 lambda f: self._writedirstate(tr, f),
940 location=b'plain',
948 location=b'plain',
941 )
949 )
942 return
950 return
943
951
944 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
952 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
945 self._writedirstate(tr, st)
953 self._writedirstate(tr, st)
946
954
947 def addparentchangecallback(self, category, callback):
955 def addparentchangecallback(self, category, callback):
948 """add a callback to be called when the wd parents are changed
956 """add a callback to be called when the wd parents are changed
949
957
950 Callback will be called with the following arguments:
958 Callback will be called with the following arguments:
951 dirstate, (oldp1, oldp2), (newp1, newp2)
959 dirstate, (oldp1, oldp2), (newp1, newp2)
952
960
953 Category is a unique identifier to allow overwriting an old callback
961 Category is a unique identifier to allow overwriting an old callback
954 with a newer callback.
962 with a newer callback.
955 """
963 """
956 self._plchangecallbacks[category] = callback
964 self._plchangecallbacks[category] = callback
957
965
958 def _writedirstate(self, tr, st):
966 def _writedirstate(self, tr, st):
959 # notify callbacks about parents change
967 # notify callbacks about parents change
960 if self._origpl is not None and self._origpl != self._pl:
968 if self._origpl is not None and self._origpl != self._pl:
961 for c, callback in sorted(
969 for c, callback in sorted(
962 pycompat.iteritems(self._plchangecallbacks)
970 pycompat.iteritems(self._plchangecallbacks)
963 ):
971 ):
964 callback(self, self._origpl, self._pl)
972 callback(self, self._origpl, self._pl)
965 self._origpl = None
973 self._origpl = None
966 # use the modification time of the newly created temporary file as the
974 # use the modification time of the newly created temporary file as the
967 # filesystem's notion of 'now'
975 # filesystem's notion of 'now'
968 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
976 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
969
977
970 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
978 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
971 # timestamp of each entries in dirstate, because of 'now > mtime'
979 # timestamp of each entries in dirstate, because of 'now > mtime'
972 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
980 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
973 if delaywrite > 0:
981 if delaywrite > 0:
974 # do we have any files to delay for?
982 # do we have any files to delay for?
975 for f, e in pycompat.iteritems(self._map):
983 for f, e in pycompat.iteritems(self._map):
976 if e.need_delay(now):
984 if e.need_delay(now):
977 import time # to avoid useless import
985 import time # to avoid useless import
978
986
979 # rather than sleep n seconds, sleep until the next
987 # rather than sleep n seconds, sleep until the next
980 # multiple of n seconds
988 # multiple of n seconds
981 clock = time.time()
989 clock = time.time()
982 start = int(clock) - (int(clock) % delaywrite)
990 start = int(clock) - (int(clock) % delaywrite)
983 end = start + delaywrite
991 end = start + delaywrite
984 time.sleep(end - clock)
992 time.sleep(end - clock)
985 now = end # trust our estimate that the end is near now
993 now = end # trust our estimate that the end is near now
986 break
994 break
987
995
988 self._map.write(tr, st, now)
996 self._map.write(tr, st, now)
989 self._lastnormaltime = 0
997 self._lastnormaltime = 0
990 self._dirty = False
998 self._dirty = False
991
999
992 def _dirignore(self, f):
1000 def _dirignore(self, f):
993 if self._ignore(f):
1001 if self._ignore(f):
994 return True
1002 return True
995 for p in pathutil.finddirs(f):
1003 for p in pathutil.finddirs(f):
996 if self._ignore(p):
1004 if self._ignore(p):
997 return True
1005 return True
998 return False
1006 return False
999
1007
1000 def _ignorefiles(self):
1008 def _ignorefiles(self):
1001 files = []
1009 files = []
1002 if os.path.exists(self._join(b'.hgignore')):
1010 if os.path.exists(self._join(b'.hgignore')):
1003 files.append(self._join(b'.hgignore'))
1011 files.append(self._join(b'.hgignore'))
1004 for name, path in self._ui.configitems(b"ui"):
1012 for name, path in self._ui.configitems(b"ui"):
1005 if name == b'ignore' or name.startswith(b'ignore.'):
1013 if name == b'ignore' or name.startswith(b'ignore.'):
1006 # we need to use os.path.join here rather than self._join
1014 # we need to use os.path.join here rather than self._join
1007 # because path is arbitrary and user-specified
1015 # because path is arbitrary and user-specified
1008 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1016 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1009 return files
1017 return files
1010
1018
1011 def _ignorefileandline(self, f):
1019 def _ignorefileandline(self, f):
1012 files = collections.deque(self._ignorefiles())
1020 files = collections.deque(self._ignorefiles())
1013 visited = set()
1021 visited = set()
1014 while files:
1022 while files:
1015 i = files.popleft()
1023 i = files.popleft()
1016 patterns = matchmod.readpatternfile(
1024 patterns = matchmod.readpatternfile(
1017 i, self._ui.warn, sourceinfo=True
1025 i, self._ui.warn, sourceinfo=True
1018 )
1026 )
1019 for pattern, lineno, line in patterns:
1027 for pattern, lineno, line in patterns:
1020 kind, p = matchmod._patsplit(pattern, b'glob')
1028 kind, p = matchmod._patsplit(pattern, b'glob')
1021 if kind == b"subinclude":
1029 if kind == b"subinclude":
1022 if p not in visited:
1030 if p not in visited:
1023 files.append(p)
1031 files.append(p)
1024 continue
1032 continue
1025 m = matchmod.match(
1033 m = matchmod.match(
1026 self._root, b'', [], [pattern], warn=self._ui.warn
1034 self._root, b'', [], [pattern], warn=self._ui.warn
1027 )
1035 )
1028 if m(f):
1036 if m(f):
1029 return (i, lineno, line)
1037 return (i, lineno, line)
1030 visited.add(i)
1038 visited.add(i)
1031 return (None, -1, b"")
1039 return (None, -1, b"")
1032
1040
1033 def _walkexplicit(self, match, subrepos):
1041 def _walkexplicit(self, match, subrepos):
1034 """Get stat data about the files explicitly specified by match.
1042 """Get stat data about the files explicitly specified by match.
1035
1043
1036 Return a triple (results, dirsfound, dirsnotfound).
1044 Return a triple (results, dirsfound, dirsnotfound).
1037 - results is a mapping from filename to stat result. It also contains
1045 - results is a mapping from filename to stat result. It also contains
1038 listings mapping subrepos and .hg to None.
1046 listings mapping subrepos and .hg to None.
1039 - dirsfound is a list of files found to be directories.
1047 - dirsfound is a list of files found to be directories.
1040 - dirsnotfound is a list of files that the dirstate thinks are
1048 - dirsnotfound is a list of files that the dirstate thinks are
1041 directories and that were not found."""
1049 directories and that were not found."""
1042
1050
1043 def badtype(mode):
1051 def badtype(mode):
1044 kind = _(b'unknown')
1052 kind = _(b'unknown')
1045 if stat.S_ISCHR(mode):
1053 if stat.S_ISCHR(mode):
1046 kind = _(b'character device')
1054 kind = _(b'character device')
1047 elif stat.S_ISBLK(mode):
1055 elif stat.S_ISBLK(mode):
1048 kind = _(b'block device')
1056 kind = _(b'block device')
1049 elif stat.S_ISFIFO(mode):
1057 elif stat.S_ISFIFO(mode):
1050 kind = _(b'fifo')
1058 kind = _(b'fifo')
1051 elif stat.S_ISSOCK(mode):
1059 elif stat.S_ISSOCK(mode):
1052 kind = _(b'socket')
1060 kind = _(b'socket')
1053 elif stat.S_ISDIR(mode):
1061 elif stat.S_ISDIR(mode):
1054 kind = _(b'directory')
1062 kind = _(b'directory')
1055 return _(b'unsupported file type (type is %s)') % kind
1063 return _(b'unsupported file type (type is %s)') % kind
1056
1064
1057 badfn = match.bad
1065 badfn = match.bad
1058 dmap = self._map
1066 dmap = self._map
1059 lstat = os.lstat
1067 lstat = os.lstat
1060 getkind = stat.S_IFMT
1068 getkind = stat.S_IFMT
1061 dirkind = stat.S_IFDIR
1069 dirkind = stat.S_IFDIR
1062 regkind = stat.S_IFREG
1070 regkind = stat.S_IFREG
1063 lnkkind = stat.S_IFLNK
1071 lnkkind = stat.S_IFLNK
1064 join = self._join
1072 join = self._join
1065 dirsfound = []
1073 dirsfound = []
1066 foundadd = dirsfound.append
1074 foundadd = dirsfound.append
1067 dirsnotfound = []
1075 dirsnotfound = []
1068 notfoundadd = dirsnotfound.append
1076 notfoundadd = dirsnotfound.append
1069
1077
1070 if not match.isexact() and self._checkcase:
1078 if not match.isexact() and self._checkcase:
1071 normalize = self._normalize
1079 normalize = self._normalize
1072 else:
1080 else:
1073 normalize = None
1081 normalize = None
1074
1082
1075 files = sorted(match.files())
1083 files = sorted(match.files())
1076 subrepos.sort()
1084 subrepos.sort()
1077 i, j = 0, 0
1085 i, j = 0, 0
1078 while i < len(files) and j < len(subrepos):
1086 while i < len(files) and j < len(subrepos):
1079 subpath = subrepos[j] + b"/"
1087 subpath = subrepos[j] + b"/"
1080 if files[i] < subpath:
1088 if files[i] < subpath:
1081 i += 1
1089 i += 1
1082 continue
1090 continue
1083 while i < len(files) and files[i].startswith(subpath):
1091 while i < len(files) and files[i].startswith(subpath):
1084 del files[i]
1092 del files[i]
1085 j += 1
1093 j += 1
1086
1094
1087 if not files or b'' in files:
1095 if not files or b'' in files:
1088 files = [b'']
1096 files = [b'']
1089 # constructing the foldmap is expensive, so don't do it for the
1097 # constructing the foldmap is expensive, so don't do it for the
1090 # common case where files is ['']
1098 # common case where files is ['']
1091 normalize = None
1099 normalize = None
1092 results = dict.fromkeys(subrepos)
1100 results = dict.fromkeys(subrepos)
1093 results[b'.hg'] = None
1101 results[b'.hg'] = None
1094
1102
1095 for ff in files:
1103 for ff in files:
1096 if normalize:
1104 if normalize:
1097 nf = normalize(ff, False, True)
1105 nf = normalize(ff, False, True)
1098 else:
1106 else:
1099 nf = ff
1107 nf = ff
1100 if nf in results:
1108 if nf in results:
1101 continue
1109 continue
1102
1110
1103 try:
1111 try:
1104 st = lstat(join(nf))
1112 st = lstat(join(nf))
1105 kind = getkind(st.st_mode)
1113 kind = getkind(st.st_mode)
1106 if kind == dirkind:
1114 if kind == dirkind:
1107 if nf in dmap:
1115 if nf in dmap:
1108 # file replaced by dir on disk but still in dirstate
1116 # file replaced by dir on disk but still in dirstate
1109 results[nf] = None
1117 results[nf] = None
1110 foundadd((nf, ff))
1118 foundadd((nf, ff))
1111 elif kind == regkind or kind == lnkkind:
1119 elif kind == regkind or kind == lnkkind:
1112 results[nf] = st
1120 results[nf] = st
1113 else:
1121 else:
1114 badfn(ff, badtype(kind))
1122 badfn(ff, badtype(kind))
1115 if nf in dmap:
1123 if nf in dmap:
1116 results[nf] = None
1124 results[nf] = None
1117 except OSError as inst: # nf not found on disk - it is dirstate only
1125 except OSError as inst: # nf not found on disk - it is dirstate only
1118 if nf in dmap: # does it exactly match a missing file?
1126 if nf in dmap: # does it exactly match a missing file?
1119 results[nf] = None
1127 results[nf] = None
1120 else: # does it match a missing directory?
1128 else: # does it match a missing directory?
1121 if self._map.hasdir(nf):
1129 if self._map.hasdir(nf):
1122 notfoundadd(nf)
1130 notfoundadd(nf)
1123 else:
1131 else:
1124 badfn(ff, encoding.strtolocal(inst.strerror))
1132 badfn(ff, encoding.strtolocal(inst.strerror))
1125
1133
1126 # match.files() may contain explicitly-specified paths that shouldn't
1134 # match.files() may contain explicitly-specified paths that shouldn't
1127 # be taken; drop them from the list of files found. dirsfound/notfound
1135 # be taken; drop them from the list of files found. dirsfound/notfound
1128 # aren't filtered here because they will be tested later.
1136 # aren't filtered here because they will be tested later.
1129 if match.anypats():
1137 if match.anypats():
1130 for f in list(results):
1138 for f in list(results):
1131 if f == b'.hg' or f in subrepos:
1139 if f == b'.hg' or f in subrepos:
1132 # keep sentinel to disable further out-of-repo walks
1140 # keep sentinel to disable further out-of-repo walks
1133 continue
1141 continue
1134 if not match(f):
1142 if not match(f):
1135 del results[f]
1143 del results[f]
1136
1144
1137 # Case insensitive filesystems cannot rely on lstat() failing to detect
1145 # Case insensitive filesystems cannot rely on lstat() failing to detect
1138 # a case-only rename. Prune the stat object for any file that does not
1146 # a case-only rename. Prune the stat object for any file that does not
1139 # match the case in the filesystem, if there are multiple files that
1147 # match the case in the filesystem, if there are multiple files that
1140 # normalize to the same path.
1148 # normalize to the same path.
1141 if match.isexact() and self._checkcase:
1149 if match.isexact() and self._checkcase:
1142 normed = {}
1150 normed = {}
1143
1151
1144 for f, st in pycompat.iteritems(results):
1152 for f, st in pycompat.iteritems(results):
1145 if st is None:
1153 if st is None:
1146 continue
1154 continue
1147
1155
1148 nc = util.normcase(f)
1156 nc = util.normcase(f)
1149 paths = normed.get(nc)
1157 paths = normed.get(nc)
1150
1158
1151 if paths is None:
1159 if paths is None:
1152 paths = set()
1160 paths = set()
1153 normed[nc] = paths
1161 normed[nc] = paths
1154
1162
1155 paths.add(f)
1163 paths.add(f)
1156
1164
1157 for norm, paths in pycompat.iteritems(normed):
1165 for norm, paths in pycompat.iteritems(normed):
1158 if len(paths) > 1:
1166 if len(paths) > 1:
1159 for path in paths:
1167 for path in paths:
1160 folded = self._discoverpath(
1168 folded = self._discoverpath(
1161 path, norm, True, None, self._map.dirfoldmap
1169 path, norm, True, None, self._map.dirfoldmap
1162 )
1170 )
1163 if path != folded:
1171 if path != folded:
1164 results[path] = None
1172 results[path] = None
1165
1173
1166 return results, dirsfound, dirsnotfound
1174 return results, dirsfound, dirsnotfound
1167
1175
1168 def walk(self, match, subrepos, unknown, ignored, full=True):
1176 def walk(self, match, subrepos, unknown, ignored, full=True):
1169 """
1177 """
1170 Walk recursively through the directory tree, finding all files
1178 Walk recursively through the directory tree, finding all files
1171 matched by match.
1179 matched by match.
1172
1180
1173 If full is False, maybe skip some known-clean files.
1181 If full is False, maybe skip some known-clean files.
1174
1182
1175 Return a dict mapping filename to stat-like object (either
1183 Return a dict mapping filename to stat-like object (either
1176 mercurial.osutil.stat instance or return value of os.stat()).
1184 mercurial.osutil.stat instance or return value of os.stat()).
1177
1185
1178 """
1186 """
1179 # full is a flag that extensions that hook into walk can use -- this
1187 # full is a flag that extensions that hook into walk can use -- this
1180 # implementation doesn't use it at all. This satisfies the contract
1188 # implementation doesn't use it at all. This satisfies the contract
1181 # because we only guarantee a "maybe".
1189 # because we only guarantee a "maybe".
1182
1190
1183 if ignored:
1191 if ignored:
1184 ignore = util.never
1192 ignore = util.never
1185 dirignore = util.never
1193 dirignore = util.never
1186 elif unknown:
1194 elif unknown:
1187 ignore = self._ignore
1195 ignore = self._ignore
1188 dirignore = self._dirignore
1196 dirignore = self._dirignore
1189 else:
1197 else:
1190 # if not unknown and not ignored, drop dir recursion and step 2
1198 # if not unknown and not ignored, drop dir recursion and step 2
1191 ignore = util.always
1199 ignore = util.always
1192 dirignore = util.always
1200 dirignore = util.always
1193
1201
1194 matchfn = match.matchfn
1202 matchfn = match.matchfn
1195 matchalways = match.always()
1203 matchalways = match.always()
1196 matchtdir = match.traversedir
1204 matchtdir = match.traversedir
1197 dmap = self._map
1205 dmap = self._map
1198 listdir = util.listdir
1206 listdir = util.listdir
1199 lstat = os.lstat
1207 lstat = os.lstat
1200 dirkind = stat.S_IFDIR
1208 dirkind = stat.S_IFDIR
1201 regkind = stat.S_IFREG
1209 regkind = stat.S_IFREG
1202 lnkkind = stat.S_IFLNK
1210 lnkkind = stat.S_IFLNK
1203 join = self._join
1211 join = self._join
1204
1212
1205 exact = skipstep3 = False
1213 exact = skipstep3 = False
1206 if match.isexact(): # match.exact
1214 if match.isexact(): # match.exact
1207 exact = True
1215 exact = True
1208 dirignore = util.always # skip step 2
1216 dirignore = util.always # skip step 2
1209 elif match.prefix(): # match.match, no patterns
1217 elif match.prefix(): # match.match, no patterns
1210 skipstep3 = True
1218 skipstep3 = True
1211
1219
1212 if not exact and self._checkcase:
1220 if not exact and self._checkcase:
1213 normalize = self._normalize
1221 normalize = self._normalize
1214 normalizefile = self._normalizefile
1222 normalizefile = self._normalizefile
1215 skipstep3 = False
1223 skipstep3 = False
1216 else:
1224 else:
1217 normalize = self._normalize
1225 normalize = self._normalize
1218 normalizefile = None
1226 normalizefile = None
1219
1227
1220 # step 1: find all explicit files
1228 # step 1: find all explicit files
1221 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1229 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1222 if matchtdir:
1230 if matchtdir:
1223 for d in work:
1231 for d in work:
1224 matchtdir(d[0])
1232 matchtdir(d[0])
1225 for d in dirsnotfound:
1233 for d in dirsnotfound:
1226 matchtdir(d)
1234 matchtdir(d)
1227
1235
1228 skipstep3 = skipstep3 and not (work or dirsnotfound)
1236 skipstep3 = skipstep3 and not (work or dirsnotfound)
1229 work = [d for d in work if not dirignore(d[0])]
1237 work = [d for d in work if not dirignore(d[0])]
1230
1238
1231 # step 2: visit subdirectories
1239 # step 2: visit subdirectories
1232 def traverse(work, alreadynormed):
1240 def traverse(work, alreadynormed):
1233 wadd = work.append
1241 wadd = work.append
1234 while work:
1242 while work:
1235 tracing.counter('dirstate.walk work', len(work))
1243 tracing.counter('dirstate.walk work', len(work))
1236 nd = work.pop()
1244 nd = work.pop()
1237 visitentries = match.visitchildrenset(nd)
1245 visitentries = match.visitchildrenset(nd)
1238 if not visitentries:
1246 if not visitentries:
1239 continue
1247 continue
1240 if visitentries == b'this' or visitentries == b'all':
1248 if visitentries == b'this' or visitentries == b'all':
1241 visitentries = None
1249 visitentries = None
1242 skip = None
1250 skip = None
1243 if nd != b'':
1251 if nd != b'':
1244 skip = b'.hg'
1252 skip = b'.hg'
1245 try:
1253 try:
1246 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1254 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1247 entries = listdir(join(nd), stat=True, skip=skip)
1255 entries = listdir(join(nd), stat=True, skip=skip)
1248 except OSError as inst:
1256 except OSError as inst:
1249 if inst.errno in (errno.EACCES, errno.ENOENT):
1257 if inst.errno in (errno.EACCES, errno.ENOENT):
1250 match.bad(
1258 match.bad(
1251 self.pathto(nd), encoding.strtolocal(inst.strerror)
1259 self.pathto(nd), encoding.strtolocal(inst.strerror)
1252 )
1260 )
1253 continue
1261 continue
1254 raise
1262 raise
1255 for f, kind, st in entries:
1263 for f, kind, st in entries:
1256 # Some matchers may return files in the visitentries set,
1264 # Some matchers may return files in the visitentries set,
1257 # instead of 'this', if the matcher explicitly mentions them
1265 # instead of 'this', if the matcher explicitly mentions them
1258 # and is not an exactmatcher. This is acceptable; we do not
1266 # and is not an exactmatcher. This is acceptable; we do not
1259 # make any hard assumptions about file-or-directory below
1267 # make any hard assumptions about file-or-directory below
1260 # based on the presence of `f` in visitentries. If
1268 # based on the presence of `f` in visitentries. If
1261 # visitchildrenset returned a set, we can always skip the
1269 # visitchildrenset returned a set, we can always skip the
1262 # entries *not* in the set it provided regardless of whether
1270 # entries *not* in the set it provided regardless of whether
1263 # they're actually a file or a directory.
1271 # they're actually a file or a directory.
1264 if visitentries and f not in visitentries:
1272 if visitentries and f not in visitentries:
1265 continue
1273 continue
1266 if normalizefile:
1274 if normalizefile:
1267 # even though f might be a directory, we're only
1275 # even though f might be a directory, we're only
1268 # interested in comparing it to files currently in the
1276 # interested in comparing it to files currently in the
1269 # dmap -- therefore normalizefile is enough
1277 # dmap -- therefore normalizefile is enough
1270 nf = normalizefile(
1278 nf = normalizefile(
1271 nd and (nd + b"/" + f) or f, True, True
1279 nd and (nd + b"/" + f) or f, True, True
1272 )
1280 )
1273 else:
1281 else:
1274 nf = nd and (nd + b"/" + f) or f
1282 nf = nd and (nd + b"/" + f) or f
1275 if nf not in results:
1283 if nf not in results:
1276 if kind == dirkind:
1284 if kind == dirkind:
1277 if not ignore(nf):
1285 if not ignore(nf):
1278 if matchtdir:
1286 if matchtdir:
1279 matchtdir(nf)
1287 matchtdir(nf)
1280 wadd(nf)
1288 wadd(nf)
1281 if nf in dmap and (matchalways or matchfn(nf)):
1289 if nf in dmap and (matchalways or matchfn(nf)):
1282 results[nf] = None
1290 results[nf] = None
1283 elif kind == regkind or kind == lnkkind:
1291 elif kind == regkind or kind == lnkkind:
1284 if nf in dmap:
1292 if nf in dmap:
1285 if matchalways or matchfn(nf):
1293 if matchalways or matchfn(nf):
1286 results[nf] = st
1294 results[nf] = st
1287 elif (matchalways or matchfn(nf)) and not ignore(
1295 elif (matchalways or matchfn(nf)) and not ignore(
1288 nf
1296 nf
1289 ):
1297 ):
1290 # unknown file -- normalize if necessary
1298 # unknown file -- normalize if necessary
1291 if not alreadynormed:
1299 if not alreadynormed:
1292 nf = normalize(nf, False, True)
1300 nf = normalize(nf, False, True)
1293 results[nf] = st
1301 results[nf] = st
1294 elif nf in dmap and (matchalways or matchfn(nf)):
1302 elif nf in dmap and (matchalways or matchfn(nf)):
1295 results[nf] = None
1303 results[nf] = None
1296
1304
1297 for nd, d in work:
1305 for nd, d in work:
1298 # alreadynormed means that processwork doesn't have to do any
1306 # alreadynormed means that processwork doesn't have to do any
1299 # expensive directory normalization
1307 # expensive directory normalization
1300 alreadynormed = not normalize or nd == d
1308 alreadynormed = not normalize or nd == d
1301 traverse([d], alreadynormed)
1309 traverse([d], alreadynormed)
1302
1310
1303 for s in subrepos:
1311 for s in subrepos:
1304 del results[s]
1312 del results[s]
1305 del results[b'.hg']
1313 del results[b'.hg']
1306
1314
1307 # step 3: visit remaining files from dmap
1315 # step 3: visit remaining files from dmap
1308 if not skipstep3 and not exact:
1316 if not skipstep3 and not exact:
1309 # If a dmap file is not in results yet, it was either
1317 # If a dmap file is not in results yet, it was either
1310 # a) not matching matchfn b) ignored, c) missing, or d) under a
1318 # a) not matching matchfn b) ignored, c) missing, or d) under a
1311 # symlink directory.
1319 # symlink directory.
1312 if not results and matchalways:
1320 if not results and matchalways:
1313 visit = [f for f in dmap]
1321 visit = [f for f in dmap]
1314 else:
1322 else:
1315 visit = [f for f in dmap if f not in results and matchfn(f)]
1323 visit = [f for f in dmap if f not in results and matchfn(f)]
1316 visit.sort()
1324 visit.sort()
1317
1325
1318 if unknown:
1326 if unknown:
1319 # unknown == True means we walked all dirs under the roots
1327 # unknown == True means we walked all dirs under the roots
1320 # that wasn't ignored, and everything that matched was stat'ed
1328 # that wasn't ignored, and everything that matched was stat'ed
1321 # and is already in results.
1329 # and is already in results.
1322 # The rest must thus be ignored or under a symlink.
1330 # The rest must thus be ignored or under a symlink.
1323 audit_path = pathutil.pathauditor(self._root, cached=True)
1331 audit_path = pathutil.pathauditor(self._root, cached=True)
1324
1332
1325 for nf in iter(visit):
1333 for nf in iter(visit):
1326 # If a stat for the same file was already added with a
1334 # If a stat for the same file was already added with a
1327 # different case, don't add one for this, since that would
1335 # different case, don't add one for this, since that would
1328 # make it appear as if the file exists under both names
1336 # make it appear as if the file exists under both names
1329 # on disk.
1337 # on disk.
1330 if (
1338 if (
1331 normalizefile
1339 normalizefile
1332 and normalizefile(nf, True, True) in results
1340 and normalizefile(nf, True, True) in results
1333 ):
1341 ):
1334 results[nf] = None
1342 results[nf] = None
1335 # Report ignored items in the dmap as long as they are not
1343 # Report ignored items in the dmap as long as they are not
1336 # under a symlink directory.
1344 # under a symlink directory.
1337 elif audit_path.check(nf):
1345 elif audit_path.check(nf):
1338 try:
1346 try:
1339 results[nf] = lstat(join(nf))
1347 results[nf] = lstat(join(nf))
1340 # file was just ignored, no links, and exists
1348 # file was just ignored, no links, and exists
1341 except OSError:
1349 except OSError:
1342 # file doesn't exist
1350 # file doesn't exist
1343 results[nf] = None
1351 results[nf] = None
1344 else:
1352 else:
1345 # It's either missing or under a symlink directory
1353 # It's either missing or under a symlink directory
1346 # which we in this case report as missing
1354 # which we in this case report as missing
1347 results[nf] = None
1355 results[nf] = None
1348 else:
1356 else:
1349 # We may not have walked the full directory tree above,
1357 # We may not have walked the full directory tree above,
1350 # so stat and check everything we missed.
1358 # so stat and check everything we missed.
1351 iv = iter(visit)
1359 iv = iter(visit)
1352 for st in util.statfiles([join(i) for i in visit]):
1360 for st in util.statfiles([join(i) for i in visit]):
1353 results[next(iv)] = st
1361 results[next(iv)] = st
1354 return results
1362 return results
1355
1363
1356 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1364 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1357 # Force Rayon (Rust parallelism library) to respect the number of
1365 # Force Rayon (Rust parallelism library) to respect the number of
1358 # workers. This is a temporary workaround until Rust code knows
1366 # workers. This is a temporary workaround until Rust code knows
1359 # how to read the config file.
1367 # how to read the config file.
1360 numcpus = self._ui.configint(b"worker", b"numcpus")
1368 numcpus = self._ui.configint(b"worker", b"numcpus")
1361 if numcpus is not None:
1369 if numcpus is not None:
1362 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1370 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1363
1371
1364 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1372 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1365 if not workers_enabled:
1373 if not workers_enabled:
1366 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1374 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1367
1375
1368 (
1376 (
1369 lookup,
1377 lookup,
1370 modified,
1378 modified,
1371 added,
1379 added,
1372 removed,
1380 removed,
1373 deleted,
1381 deleted,
1374 clean,
1382 clean,
1375 ignored,
1383 ignored,
1376 unknown,
1384 unknown,
1377 warnings,
1385 warnings,
1378 bad,
1386 bad,
1379 traversed,
1387 traversed,
1380 dirty,
1388 dirty,
1381 ) = rustmod.status(
1389 ) = rustmod.status(
1382 self._map._rustmap,
1390 self._map._rustmap,
1383 matcher,
1391 matcher,
1384 self._rootdir,
1392 self._rootdir,
1385 self._ignorefiles(),
1393 self._ignorefiles(),
1386 self._checkexec,
1394 self._checkexec,
1387 self._lastnormaltime,
1395 self._lastnormaltime,
1388 bool(list_clean),
1396 bool(list_clean),
1389 bool(list_ignored),
1397 bool(list_ignored),
1390 bool(list_unknown),
1398 bool(list_unknown),
1391 bool(matcher.traversedir),
1399 bool(matcher.traversedir),
1392 )
1400 )
1393
1401
1394 self._dirty |= dirty
1402 self._dirty |= dirty
1395
1403
1396 if matcher.traversedir:
1404 if matcher.traversedir:
1397 for dir in traversed:
1405 for dir in traversed:
1398 matcher.traversedir(dir)
1406 matcher.traversedir(dir)
1399
1407
1400 if self._ui.warn:
1408 if self._ui.warn:
1401 for item in warnings:
1409 for item in warnings:
1402 if isinstance(item, tuple):
1410 if isinstance(item, tuple):
1403 file_path, syntax = item
1411 file_path, syntax = item
1404 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1412 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1405 file_path,
1413 file_path,
1406 syntax,
1414 syntax,
1407 )
1415 )
1408 self._ui.warn(msg)
1416 self._ui.warn(msg)
1409 else:
1417 else:
1410 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1418 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1411 self._ui.warn(
1419 self._ui.warn(
1412 msg
1420 msg
1413 % (
1421 % (
1414 pathutil.canonpath(
1422 pathutil.canonpath(
1415 self._rootdir, self._rootdir, item
1423 self._rootdir, self._rootdir, item
1416 ),
1424 ),
1417 b"No such file or directory",
1425 b"No such file or directory",
1418 )
1426 )
1419 )
1427 )
1420
1428
1421 for (fn, message) in bad:
1429 for (fn, message) in bad:
1422 matcher.bad(fn, encoding.strtolocal(message))
1430 matcher.bad(fn, encoding.strtolocal(message))
1423
1431
1424 status = scmutil.status(
1432 status = scmutil.status(
1425 modified=modified,
1433 modified=modified,
1426 added=added,
1434 added=added,
1427 removed=removed,
1435 removed=removed,
1428 deleted=deleted,
1436 deleted=deleted,
1429 unknown=unknown,
1437 unknown=unknown,
1430 ignored=ignored,
1438 ignored=ignored,
1431 clean=clean,
1439 clean=clean,
1432 )
1440 )
1433 return (lookup, status)
1441 return (lookup, status)
1434
1442
1435 def status(self, match, subrepos, ignored, clean, unknown):
1443 def status(self, match, subrepos, ignored, clean, unknown):
1436 """Determine the status of the working copy relative to the
1444 """Determine the status of the working copy relative to the
1437 dirstate and return a pair of (unsure, status), where status is of type
1445 dirstate and return a pair of (unsure, status), where status is of type
1438 scmutil.status and:
1446 scmutil.status and:
1439
1447
1440 unsure:
1448 unsure:
1441 files that might have been modified since the dirstate was
1449 files that might have been modified since the dirstate was
1442 written, but need to be read to be sure (size is the same
1450 written, but need to be read to be sure (size is the same
1443 but mtime differs)
1451 but mtime differs)
1444 status.modified:
1452 status.modified:
1445 files that have definitely been modified since the dirstate
1453 files that have definitely been modified since the dirstate
1446 was written (different size or mode)
1454 was written (different size or mode)
1447 status.clean:
1455 status.clean:
1448 files that have definitely not been modified since the
1456 files that have definitely not been modified since the
1449 dirstate was written
1457 dirstate was written
1450 """
1458 """
1451 listignored, listclean, listunknown = ignored, clean, unknown
1459 listignored, listclean, listunknown = ignored, clean, unknown
1452 lookup, modified, added, unknown, ignored = [], [], [], [], []
1460 lookup, modified, added, unknown, ignored = [], [], [], [], []
1453 removed, deleted, clean = [], [], []
1461 removed, deleted, clean = [], [], []
1454
1462
1455 dmap = self._map
1463 dmap = self._map
1456 dmap.preload()
1464 dmap.preload()
1457
1465
1458 use_rust = True
1466 use_rust = True
1459
1467
1460 allowed_matchers = (
1468 allowed_matchers = (
1461 matchmod.alwaysmatcher,
1469 matchmod.alwaysmatcher,
1462 matchmod.exactmatcher,
1470 matchmod.exactmatcher,
1463 matchmod.includematcher,
1471 matchmod.includematcher,
1464 )
1472 )
1465
1473
1466 if rustmod is None:
1474 if rustmod is None:
1467 use_rust = False
1475 use_rust = False
1468 elif self._checkcase:
1476 elif self._checkcase:
1469 # Case-insensitive filesystems are not handled yet
1477 # Case-insensitive filesystems are not handled yet
1470 use_rust = False
1478 use_rust = False
1471 elif subrepos:
1479 elif subrepos:
1472 use_rust = False
1480 use_rust = False
1473 elif sparse.enabled:
1481 elif sparse.enabled:
1474 use_rust = False
1482 use_rust = False
1475 elif not isinstance(match, allowed_matchers):
1483 elif not isinstance(match, allowed_matchers):
1476 # Some matchers have yet to be implemented
1484 # Some matchers have yet to be implemented
1477 use_rust = False
1485 use_rust = False
1478
1486
1479 if use_rust:
1487 if use_rust:
1480 try:
1488 try:
1481 return self._rust_status(
1489 return self._rust_status(
1482 match, listclean, listignored, listunknown
1490 match, listclean, listignored, listunknown
1483 )
1491 )
1484 except rustmod.FallbackError:
1492 except rustmod.FallbackError:
1485 pass
1493 pass
1486
1494
1487 def noop(f):
1495 def noop(f):
1488 pass
1496 pass
1489
1497
1490 dcontains = dmap.__contains__
1498 dcontains = dmap.__contains__
1491 dget = dmap.__getitem__
1499 dget = dmap.__getitem__
1492 ladd = lookup.append # aka "unsure"
1500 ladd = lookup.append # aka "unsure"
1493 madd = modified.append
1501 madd = modified.append
1494 aadd = added.append
1502 aadd = added.append
1495 uadd = unknown.append if listunknown else noop
1503 uadd = unknown.append if listunknown else noop
1496 iadd = ignored.append if listignored else noop
1504 iadd = ignored.append if listignored else noop
1497 radd = removed.append
1505 radd = removed.append
1498 dadd = deleted.append
1506 dadd = deleted.append
1499 cadd = clean.append if listclean else noop
1507 cadd = clean.append if listclean else noop
1500 mexact = match.exact
1508 mexact = match.exact
1501 dirignore = self._dirignore
1509 dirignore = self._dirignore
1502 checkexec = self._checkexec
1510 checkexec = self._checkexec
1503 copymap = self._map.copymap
1511 copymap = self._map.copymap
1504 lastnormaltime = self._lastnormaltime
1512 lastnormaltime = self._lastnormaltime
1505
1513
1506 # We need to do full walks when either
1514 # We need to do full walks when either
1507 # - we're listing all clean files, or
1515 # - we're listing all clean files, or
1508 # - match.traversedir does something, because match.traversedir should
1516 # - match.traversedir does something, because match.traversedir should
1509 # be called for every dir in the working dir
1517 # be called for every dir in the working dir
1510 full = listclean or match.traversedir is not None
1518 full = listclean or match.traversedir is not None
1511 for fn, st in pycompat.iteritems(
1519 for fn, st in pycompat.iteritems(
1512 self.walk(match, subrepos, listunknown, listignored, full=full)
1520 self.walk(match, subrepos, listunknown, listignored, full=full)
1513 ):
1521 ):
1514 if not dcontains(fn):
1522 if not dcontains(fn):
1515 if (listignored or mexact(fn)) and dirignore(fn):
1523 if (listignored or mexact(fn)) and dirignore(fn):
1516 if listignored:
1524 if listignored:
1517 iadd(fn)
1525 iadd(fn)
1518 else:
1526 else:
1519 uadd(fn)
1527 uadd(fn)
1520 continue
1528 continue
1521
1529
1522 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1530 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1523 # written like that for performance reasons. dmap[fn] is not a
1531 # written like that for performance reasons. dmap[fn] is not a
1524 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1532 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1525 # opcode has fast paths when the value to be unpacked is a tuple or
1533 # opcode has fast paths when the value to be unpacked is a tuple or
1526 # a list, but falls back to creating a full-fledged iterator in
1534 # a list, but falls back to creating a full-fledged iterator in
1527 # general. That is much slower than simply accessing and storing the
1535 # general. That is much slower than simply accessing and storing the
1528 # tuple members one by one.
1536 # tuple members one by one.
1529 t = dget(fn)
1537 t = dget(fn)
1530 mode = t.mode
1538 mode = t.mode
1531 size = t.size
1539 size = t.size
1532 time = t.mtime
1540 time = t.mtime
1533
1541
1534 if not st and t.tracked:
1542 if not st and t.tracked:
1535 dadd(fn)
1543 dadd(fn)
1536 elif t.merged:
1544 elif t.merged:
1537 madd(fn)
1545 madd(fn)
1538 elif t.added:
1546 elif t.added:
1539 aadd(fn)
1547 aadd(fn)
1540 elif t.removed:
1548 elif t.removed:
1541 radd(fn)
1549 radd(fn)
1542 elif t.tracked:
1550 elif t.tracked:
1543 if (
1551 if (
1544 size >= 0
1552 size >= 0
1545 and (
1553 and (
1546 (size != st.st_size and size != st.st_size & _rangemask)
1554 (size != st.st_size and size != st.st_size & _rangemask)
1547 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1555 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1548 )
1556 )
1549 or t.from_p2
1557 or t.from_p2
1550 or fn in copymap
1558 or fn in copymap
1551 ):
1559 ):
1552 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1560 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1553 # issue6456: Size returned may be longer due to
1561 # issue6456: Size returned may be longer due to
1554 # encryption on EXT-4 fscrypt, undecided.
1562 # encryption on EXT-4 fscrypt, undecided.
1555 ladd(fn)
1563 ladd(fn)
1556 else:
1564 else:
1557 madd(fn)
1565 madd(fn)
1558 elif (
1566 elif (
1559 time != st[stat.ST_MTIME]
1567 time != st[stat.ST_MTIME]
1560 and time != st[stat.ST_MTIME] & _rangemask
1568 and time != st[stat.ST_MTIME] & _rangemask
1561 ):
1569 ):
1562 ladd(fn)
1570 ladd(fn)
1563 elif st[stat.ST_MTIME] == lastnormaltime:
1571 elif st[stat.ST_MTIME] == lastnormaltime:
1564 # fn may have just been marked as normal and it may have
1572 # fn may have just been marked as normal and it may have
1565 # changed in the same second without changing its size.
1573 # changed in the same second without changing its size.
1566 # This can happen if we quickly do multiple commits.
1574 # This can happen if we quickly do multiple commits.
1567 # Force lookup, so we don't miss such a racy file change.
1575 # Force lookup, so we don't miss such a racy file change.
1568 ladd(fn)
1576 ladd(fn)
1569 elif listclean:
1577 elif listclean:
1570 cadd(fn)
1578 cadd(fn)
1571 status = scmutil.status(
1579 status = scmutil.status(
1572 modified, added, removed, deleted, unknown, ignored, clean
1580 modified, added, removed, deleted, unknown, ignored, clean
1573 )
1581 )
1574 return (lookup, status)
1582 return (lookup, status)
1575
1583
1576 def matches(self, match):
1584 def matches(self, match):
1577 """
1585 """
1578 return files in the dirstate (in whatever state) filtered by match
1586 return files in the dirstate (in whatever state) filtered by match
1579 """
1587 """
1580 dmap = self._map
1588 dmap = self._map
1581 if rustmod is not None:
1589 if rustmod is not None:
1582 dmap = self._map._rustmap
1590 dmap = self._map._rustmap
1583
1591
1584 if match.always():
1592 if match.always():
1585 return dmap.keys()
1593 return dmap.keys()
1586 files = match.files()
1594 files = match.files()
1587 if match.isexact():
1595 if match.isexact():
1588 # fast path -- filter the other way around, since typically files is
1596 # fast path -- filter the other way around, since typically files is
1589 # much smaller than dmap
1597 # much smaller than dmap
1590 return [f for f in files if f in dmap]
1598 return [f for f in files if f in dmap]
1591 if match.prefix() and all(fn in dmap for fn in files):
1599 if match.prefix() and all(fn in dmap for fn in files):
1592 # fast path -- all the values are known to be files, so just return
1600 # fast path -- all the values are known to be files, so just return
1593 # that
1601 # that
1594 return list(files)
1602 return list(files)
1595 return [f for f in dmap if match(f)]
1603 return [f for f in dmap if match(f)]
1596
1604
1597 def _actualfilename(self, tr):
1605 def _actualfilename(self, tr):
1598 if tr:
1606 if tr:
1599 return self._pendingfilename
1607 return self._pendingfilename
1600 else:
1608 else:
1601 return self._filename
1609 return self._filename
1602
1610
1603 def savebackup(self, tr, backupname):
1611 def savebackup(self, tr, backupname):
1604 '''Save current dirstate into backup file'''
1612 '''Save current dirstate into backup file'''
1605 filename = self._actualfilename(tr)
1613 filename = self._actualfilename(tr)
1606 assert backupname != filename
1614 assert backupname != filename
1607
1615
1608 # use '_writedirstate' instead of 'write' to write changes certainly,
1616 # use '_writedirstate' instead of 'write' to write changes certainly,
1609 # because the latter omits writing out if transaction is running.
1617 # because the latter omits writing out if transaction is running.
1610 # output file will be used to create backup of dirstate at this point.
1618 # output file will be used to create backup of dirstate at this point.
1611 if self._dirty or not self._opener.exists(filename):
1619 if self._dirty or not self._opener.exists(filename):
1612 self._writedirstate(
1620 self._writedirstate(
1613 tr,
1621 tr,
1614 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1622 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1615 )
1623 )
1616
1624
1617 if tr:
1625 if tr:
1618 # ensure that subsequent tr.writepending returns True for
1626 # ensure that subsequent tr.writepending returns True for
1619 # changes written out above, even if dirstate is never
1627 # changes written out above, even if dirstate is never
1620 # changed after this
1628 # changed after this
1621 tr.addfilegenerator(
1629 tr.addfilegenerator(
1622 b'dirstate',
1630 b'dirstate',
1623 (self._filename,),
1631 (self._filename,),
1624 lambda f: self._writedirstate(tr, f),
1632 lambda f: self._writedirstate(tr, f),
1625 location=b'plain',
1633 location=b'plain',
1626 )
1634 )
1627
1635
1628 # ensure that pending file written above is unlinked at
1636 # ensure that pending file written above is unlinked at
1629 # failure, even if tr.writepending isn't invoked until the
1637 # failure, even if tr.writepending isn't invoked until the
1630 # end of this transaction
1638 # end of this transaction
1631 tr.registertmp(filename, location=b'plain')
1639 tr.registertmp(filename, location=b'plain')
1632
1640
1633 self._opener.tryunlink(backupname)
1641 self._opener.tryunlink(backupname)
1634 # hardlink backup is okay because _writedirstate is always called
1642 # hardlink backup is okay because _writedirstate is always called
1635 # with an "atomictemp=True" file.
1643 # with an "atomictemp=True" file.
1636 util.copyfile(
1644 util.copyfile(
1637 self._opener.join(filename),
1645 self._opener.join(filename),
1638 self._opener.join(backupname),
1646 self._opener.join(backupname),
1639 hardlink=True,
1647 hardlink=True,
1640 )
1648 )
1641
1649
1642 def restorebackup(self, tr, backupname):
1650 def restorebackup(self, tr, backupname):
1643 '''Restore dirstate by backup file'''
1651 '''Restore dirstate by backup file'''
1644 # this "invalidate()" prevents "wlock.release()" from writing
1652 # this "invalidate()" prevents "wlock.release()" from writing
1645 # changes of dirstate out after restoring from backup file
1653 # changes of dirstate out after restoring from backup file
1646 self.invalidate()
1654 self.invalidate()
1647 filename = self._actualfilename(tr)
1655 filename = self._actualfilename(tr)
1648 o = self._opener
1656 o = self._opener
1649 if util.samefile(o.join(backupname), o.join(filename)):
1657 if util.samefile(o.join(backupname), o.join(filename)):
1650 o.unlink(backupname)
1658 o.unlink(backupname)
1651 else:
1659 else:
1652 o.rename(backupname, filename, checkambig=True)
1660 o.rename(backupname, filename, checkambig=True)
1653
1661
1654 def clearbackup(self, tr, backupname):
1662 def clearbackup(self, tr, backupname):
1655 '''Clear backup file'''
1663 '''Clear backup file'''
1656 self._opener.unlink(backupname)
1664 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now