##// END OF EJS Templates
dirstate: replace a dead conditional branch with an assert in `update_file`...
marmoute -
r48473:37825a85 default
parent child Browse files
Show More
@@ -1,1626 +1,1625 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self.normallookup(f)
406 self.normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self.normallookup(filename)
479 self.normallookup(filename)
480 return True
480 return True
481 return False
481 return False
482
482
483 @requires_no_parents_change
483 @requires_no_parents_change
484 def set_untracked(self, filename):
484 def set_untracked(self, filename):
485 """a "public" method for generic code to mark a file as untracked
485 """a "public" method for generic code to mark a file as untracked
486
486
487 This function is to be called outside of "update/merge" case. For
487 This function is to be called outside of "update/merge" case. For
488 example by a command like `hg remove X`.
488 example by a command like `hg remove X`.
489
489
490 return True the file was previously tracked, False otherwise.
490 return True the file was previously tracked, False otherwise.
491 """
491 """
492 entry = self._map.get(filename)
492 entry = self._map.get(filename)
493 if entry is None:
493 if entry is None:
494 return False
494 return False
495 elif entry.added:
495 elif entry.added:
496 self._drop(filename)
496 self._drop(filename)
497 return True
497 return True
498 else:
498 else:
499 self._remove(filename)
499 self._remove(filename)
500 return True
500 return True
501
501
502 @requires_parents_change
502 @requires_parents_change
503 def update_file_reference(
503 def update_file_reference(
504 self,
504 self,
505 filename,
505 filename,
506 p1_tracked,
506 p1_tracked,
507 ):
507 ):
508 """Set a file as tracked in the parent (or not)
508 """Set a file as tracked in the parent (or not)
509
509
510 This is to be called when adjust the dirstate to a new parent after an history
510 This is to be called when adjust the dirstate to a new parent after an history
511 rewriting operation.
511 rewriting operation.
512
512
513 It should not be called during a merge (p2 != nullid) and only within
513 It should not be called during a merge (p2 != nullid) and only within
514 a `with dirstate.parentchange():` context.
514 a `with dirstate.parentchange():` context.
515 """
515 """
516 if self.in_merge:
516 if self.in_merge:
517 msg = b'update_file_reference should not be called when merging'
517 msg = b'update_file_reference should not be called when merging'
518 raise error.ProgrammingError(msg)
518 raise error.ProgrammingError(msg)
519 entry = self._map.get(filename)
519 entry = self._map.get(filename)
520 if entry is None:
520 if entry is None:
521 wc_tracked = False
521 wc_tracked = False
522 else:
522 else:
523 wc_tracked = entry.tracked
523 wc_tracked = entry.tracked
524 if p1_tracked and wc_tracked:
524 if p1_tracked and wc_tracked:
525 # the underlying reference might have changed, we will have to
525 # the underlying reference might have changed, we will have to
526 # check it.
526 # check it.
527 self.normallookup(filename)
527 self.normallookup(filename)
528 elif not (p1_tracked or wc_tracked):
528 elif not (p1_tracked or wc_tracked):
529 # the file is no longer relevant to anyone
529 # the file is no longer relevant to anyone
530 self._drop(filename)
530 self._drop(filename)
531 elif (not p1_tracked) and wc_tracked:
531 elif (not p1_tracked) and wc_tracked:
532 if not entry.added:
532 if not entry.added:
533 self._add(filename)
533 self._add(filename)
534 elif p1_tracked and not wc_tracked:
534 elif p1_tracked and not wc_tracked:
535 if entry is None or not entry.removed:
535 if entry is None or not entry.removed:
536 self._remove(filename)
536 self._remove(filename)
537 else:
537 else:
538 assert False, 'unreachable'
538 assert False, 'unreachable'
539
539
540 @requires_parents_change
540 @requires_parents_change
541 def update_file(
541 def update_file(
542 self,
542 self,
543 filename,
543 filename,
544 wc_tracked,
544 wc_tracked,
545 p1_tracked,
545 p1_tracked,
546 p2_tracked=False,
546 p2_tracked=False,
547 merged=False,
547 merged=False,
548 clean_p1=False,
548 clean_p1=False,
549 clean_p2=False,
549 clean_p2=False,
550 possibly_dirty=False,
550 possibly_dirty=False,
551 ):
551 ):
552 """update the information about a file in the dirstate
552 """update the information about a file in the dirstate
553
553
554 This is to be called when the direstates parent changes to keep track
554 This is to be called when the direstates parent changes to keep track
555 of what is the file situation in regards to the working copy and its parent.
555 of what is the file situation in regards to the working copy and its parent.
556
556
557 This function must be called within a `dirstate.parentchange` context.
557 This function must be called within a `dirstate.parentchange` context.
558
558
559 note: the API is at an early stage and we might need to ajust it
559 note: the API is at an early stage and we might need to ajust it
560 depending of what information ends up being relevant and useful to
560 depending of what information ends up being relevant and useful to
561 other processing.
561 other processing.
562 """
562 """
563 if merged and (clean_p1 or clean_p2):
563 if merged and (clean_p1 or clean_p2):
564 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
564 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
565 raise error.ProgrammingError(msg)
565 raise error.ProgrammingError(msg)
566 if not (p1_tracked or p2_tracked or wc_tracked):
566 if not (p1_tracked or p2_tracked or wc_tracked):
567 self._drop(filename)
567 self._drop(filename)
568 elif merged:
568 elif merged:
569 assert wc_tracked
569 assert wc_tracked
570 if not self.in_merge:
570 assert self.in_merge # we are never in the "normallookup" case
571 self.normallookup(filename)
572 self.otherparent(filename)
571 self.otherparent(filename)
573 elif not (p1_tracked or p2_tracked) and wc_tracked:
572 elif not (p1_tracked or p2_tracked) and wc_tracked:
574 self._addpath(filename, added=True, possibly_dirty=possibly_dirty)
573 self._addpath(filename, added=True, possibly_dirty=possibly_dirty)
575 self._map.copymap.pop(filename, None)
574 self._map.copymap.pop(filename, None)
576 elif (p1_tracked or p2_tracked) and not wc_tracked:
575 elif (p1_tracked or p2_tracked) and not wc_tracked:
577 self._remove(filename)
576 self._remove(filename)
578 elif clean_p2 and wc_tracked:
577 elif clean_p2 and wc_tracked:
579 assert p2_tracked
578 assert p2_tracked
580 self.otherparent(filename)
579 self.otherparent(filename)
581 elif not p1_tracked and p2_tracked and wc_tracked:
580 elif not p1_tracked and p2_tracked and wc_tracked:
582 self._addpath(filename, from_p2=True, possibly_dirty=possibly_dirty)
581 self._addpath(filename, from_p2=True, possibly_dirty=possibly_dirty)
583 self._map.copymap.pop(filename, None)
582 self._map.copymap.pop(filename, None)
584 elif possibly_dirty:
583 elif possibly_dirty:
585 self._addpath(filename, possibly_dirty=possibly_dirty)
584 self._addpath(filename, possibly_dirty=possibly_dirty)
586 elif wc_tracked:
585 elif wc_tracked:
587 self.normal(filename)
586 self.normal(filename)
588 # XXX We need something for file that are dirty after an update
587 # XXX We need something for file that are dirty after an update
589 else:
588 else:
590 assert False, 'unreachable'
589 assert False, 'unreachable'
591
590
592 @requires_parents_change
591 @requires_parents_change
593 def update_parent_file_data(self, f, filedata):
592 def update_parent_file_data(self, f, filedata):
594 """update the information about the content of a file
593 """update the information about the content of a file
595
594
596 This function should be called within a `dirstate.parentchange` context.
595 This function should be called within a `dirstate.parentchange` context.
597 """
596 """
598 self.normal(f, parentfiledata=filedata)
597 self.normal(f, parentfiledata=filedata)
599
598
600 def _addpath(
599 def _addpath(
601 self,
600 self,
602 f,
601 f,
603 mode=0,
602 mode=0,
604 size=None,
603 size=None,
605 mtime=None,
604 mtime=None,
606 added=False,
605 added=False,
607 merged=False,
606 merged=False,
608 from_p2=False,
607 from_p2=False,
609 possibly_dirty=False,
608 possibly_dirty=False,
610 ):
609 ):
611 entry = self._map.get(f)
610 entry = self._map.get(f)
612 if added or entry is not None and entry.removed:
611 if added or entry is not None and entry.removed:
613 scmutil.checkfilename(f)
612 scmutil.checkfilename(f)
614 if self._map.hastrackeddir(f):
613 if self._map.hastrackeddir(f):
615 msg = _(b'directory %r already in dirstate')
614 msg = _(b'directory %r already in dirstate')
616 msg %= pycompat.bytestr(f)
615 msg %= pycompat.bytestr(f)
617 raise error.Abort(msg)
616 raise error.Abort(msg)
618 # shadows
617 # shadows
619 for d in pathutil.finddirs(f):
618 for d in pathutil.finddirs(f):
620 if self._map.hastrackeddir(d):
619 if self._map.hastrackeddir(d):
621 break
620 break
622 entry = self._map.get(d)
621 entry = self._map.get(d)
623 if entry is not None and not entry.removed:
622 if entry is not None and not entry.removed:
624 msg = _(b'file %r in dirstate clashes with %r')
623 msg = _(b'file %r in dirstate clashes with %r')
625 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
624 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
626 raise error.Abort(msg)
625 raise error.Abort(msg)
627 self._dirty = True
626 self._dirty = True
628 self._updatedfiles.add(f)
627 self._updatedfiles.add(f)
629 self._map.addfile(
628 self._map.addfile(
630 f,
629 f,
631 mode=mode,
630 mode=mode,
632 size=size,
631 size=size,
633 mtime=mtime,
632 mtime=mtime,
634 added=added,
633 added=added,
635 merged=merged,
634 merged=merged,
636 from_p2=from_p2,
635 from_p2=from_p2,
637 possibly_dirty=possibly_dirty,
636 possibly_dirty=possibly_dirty,
638 )
637 )
639
638
640 def normal(self, f, parentfiledata=None):
639 def normal(self, f, parentfiledata=None):
641 """Mark a file normal and clean.
640 """Mark a file normal and clean.
642
641
643 parentfiledata: (mode, size, mtime) of the clean file
642 parentfiledata: (mode, size, mtime) of the clean file
644
643
645 parentfiledata should be computed from memory (for mode,
644 parentfiledata should be computed from memory (for mode,
646 size), as or close as possible from the point where we
645 size), as or close as possible from the point where we
647 determined the file was clean, to limit the risk of the
646 determined the file was clean, to limit the risk of the
648 file having been changed by an external process between the
647 file having been changed by an external process between the
649 moment where the file was determined to be clean and now."""
648 moment where the file was determined to be clean and now."""
650 if parentfiledata:
649 if parentfiledata:
651 (mode, size, mtime) = parentfiledata
650 (mode, size, mtime) = parentfiledata
652 else:
651 else:
653 s = os.lstat(self._join(f))
652 s = os.lstat(self._join(f))
654 mode = s.st_mode
653 mode = s.st_mode
655 size = s.st_size
654 size = s.st_size
656 mtime = s[stat.ST_MTIME]
655 mtime = s[stat.ST_MTIME]
657 self._addpath(f, mode=mode, size=size, mtime=mtime)
656 self._addpath(f, mode=mode, size=size, mtime=mtime)
658 self._map.copymap.pop(f, None)
657 self._map.copymap.pop(f, None)
659 if f in self._map.nonnormalset:
658 if f in self._map.nonnormalset:
660 self._map.nonnormalset.remove(f)
659 self._map.nonnormalset.remove(f)
661 if mtime > self._lastnormaltime:
660 if mtime > self._lastnormaltime:
662 # Remember the most recent modification timeslot for status(),
661 # Remember the most recent modification timeslot for status(),
663 # to make sure we won't miss future size-preserving file content
662 # to make sure we won't miss future size-preserving file content
664 # modifications that happen within the same timeslot.
663 # modifications that happen within the same timeslot.
665 self._lastnormaltime = mtime
664 self._lastnormaltime = mtime
666
665
667 def normallookup(self, f):
666 def normallookup(self, f):
668 '''Mark a file normal, but possibly dirty.'''
667 '''Mark a file normal, but possibly dirty.'''
669 if self.in_merge:
668 if self.in_merge:
670 # if there is a merge going on and the file was either
669 # if there is a merge going on and the file was either
671 # "merged" or coming from other parent (-2) before
670 # "merged" or coming from other parent (-2) before
672 # being removed, restore that state.
671 # being removed, restore that state.
673 entry = self._map.get(f)
672 entry = self._map.get(f)
674 if entry is not None:
673 if entry is not None:
675 # XXX this should probably be dealt with a a lower level
674 # XXX this should probably be dealt with a a lower level
676 # (see `merged_removed` and `from_p2_removed`)
675 # (see `merged_removed` and `from_p2_removed`)
677 if entry.merged_removed or entry.from_p2_removed:
676 if entry.merged_removed or entry.from_p2_removed:
678 source = self._map.copymap.get(f)
677 source = self._map.copymap.get(f)
679 if entry.merged_removed:
678 if entry.merged_removed:
680 self.merge(f)
679 self.merge(f)
681 elif entry.from_p2_removed:
680 elif entry.from_p2_removed:
682 self.otherparent(f)
681 self.otherparent(f)
683 if source is not None:
682 if source is not None:
684 self.copy(source, f)
683 self.copy(source, f)
685 return
684 return
686 elif entry.merged or entry.from_p2:
685 elif entry.merged or entry.from_p2:
687 return
686 return
688 self._addpath(f, possibly_dirty=True)
687 self._addpath(f, possibly_dirty=True)
689 self._map.copymap.pop(f, None)
688 self._map.copymap.pop(f, None)
690
689
691 def otherparent(self, f):
690 def otherparent(self, f):
692 '''Mark as coming from the other parent, always dirty.'''
691 '''Mark as coming from the other parent, always dirty.'''
693 if not self.in_merge:
692 if not self.in_merge:
694 msg = _(b"setting %r to other parent only allowed in merges") % f
693 msg = _(b"setting %r to other parent only allowed in merges") % f
695 raise error.Abort(msg)
694 raise error.Abort(msg)
696 entry = self._map.get(f)
695 entry = self._map.get(f)
697 if entry is not None and entry.tracked:
696 if entry is not None and entry.tracked:
698 # merge-like
697 # merge-like
699 self._addpath(f, merged=True)
698 self._addpath(f, merged=True)
700 else:
699 else:
701 # add-like
700 # add-like
702 self._addpath(f, from_p2=True)
701 self._addpath(f, from_p2=True)
703 self._map.copymap.pop(f, None)
702 self._map.copymap.pop(f, None)
704
703
705 def add(self, f):
704 def add(self, f):
706 '''Mark a file added.'''
705 '''Mark a file added.'''
707 if not self.pendingparentchange():
706 if not self.pendingparentchange():
708 util.nouideprecwarn(
707 util.nouideprecwarn(
709 b"do not use `add` outside of update/merge context."
708 b"do not use `add` outside of update/merge context."
710 b" Use `set_tracked`",
709 b" Use `set_tracked`",
711 b'6.0',
710 b'6.0',
712 stacklevel=2,
711 stacklevel=2,
713 )
712 )
714 self._add(f)
713 self._add(f)
715
714
716 def _add(self, filename):
715 def _add(self, filename):
717 """internal function to mark a file as added"""
716 """internal function to mark a file as added"""
718 self._addpath(filename, added=True)
717 self._addpath(filename, added=True)
719 self._map.copymap.pop(filename, None)
718 self._map.copymap.pop(filename, None)
720
719
721 def remove(self, f):
720 def remove(self, f):
722 '''Mark a file removed'''
721 '''Mark a file removed'''
723 if not self.pendingparentchange():
722 if not self.pendingparentchange():
724 util.nouideprecwarn(
723 util.nouideprecwarn(
725 b"do not use `remove` outside of update/merge context."
724 b"do not use `remove` outside of update/merge context."
726 b" Use `set_untracked`",
725 b" Use `set_untracked`",
727 b'6.0',
726 b'6.0',
728 stacklevel=2,
727 stacklevel=2,
729 )
728 )
730 self._remove(f)
729 self._remove(f)
731
730
732 def _remove(self, filename):
731 def _remove(self, filename):
733 """internal function to mark a file removed"""
732 """internal function to mark a file removed"""
734 self._dirty = True
733 self._dirty = True
735 self._updatedfiles.add(filename)
734 self._updatedfiles.add(filename)
736 self._map.removefile(filename, in_merge=self.in_merge)
735 self._map.removefile(filename, in_merge=self.in_merge)
737
736
738 def merge(self, f):
737 def merge(self, f):
739 '''Mark a file merged.'''
738 '''Mark a file merged.'''
740 if not self.in_merge:
739 if not self.in_merge:
741 return self.normallookup(f)
740 return self.normallookup(f)
742 return self.otherparent(f)
741 return self.otherparent(f)
743
742
744 def drop(self, f):
743 def drop(self, f):
745 '''Drop a file from the dirstate'''
744 '''Drop a file from the dirstate'''
746 if not self.pendingparentchange():
745 if not self.pendingparentchange():
747 util.nouideprecwarn(
746 util.nouideprecwarn(
748 b"do not use `drop` outside of update/merge context."
747 b"do not use `drop` outside of update/merge context."
749 b" Use `set_untracked`",
748 b" Use `set_untracked`",
750 b'6.0',
749 b'6.0',
751 stacklevel=2,
750 stacklevel=2,
752 )
751 )
753 self._drop(f)
752 self._drop(f)
754
753
755 def _drop(self, filename):
754 def _drop(self, filename):
756 """internal function to drop a file from the dirstate"""
755 """internal function to drop a file from the dirstate"""
757 if self._map.dropfile(filename):
756 if self._map.dropfile(filename):
758 self._dirty = True
757 self._dirty = True
759 self._updatedfiles.add(filename)
758 self._updatedfiles.add(filename)
760 self._map.copymap.pop(filename, None)
759 self._map.copymap.pop(filename, None)
761
760
762 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
761 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
763 if exists is None:
762 if exists is None:
764 exists = os.path.lexists(os.path.join(self._root, path))
763 exists = os.path.lexists(os.path.join(self._root, path))
765 if not exists:
764 if not exists:
766 # Maybe a path component exists
765 # Maybe a path component exists
767 if not ignoremissing and b'/' in path:
766 if not ignoremissing and b'/' in path:
768 d, f = path.rsplit(b'/', 1)
767 d, f = path.rsplit(b'/', 1)
769 d = self._normalize(d, False, ignoremissing, None)
768 d = self._normalize(d, False, ignoremissing, None)
770 folded = d + b"/" + f
769 folded = d + b"/" + f
771 else:
770 else:
772 # No path components, preserve original case
771 # No path components, preserve original case
773 folded = path
772 folded = path
774 else:
773 else:
775 # recursively normalize leading directory components
774 # recursively normalize leading directory components
776 # against dirstate
775 # against dirstate
777 if b'/' in normed:
776 if b'/' in normed:
778 d, f = normed.rsplit(b'/', 1)
777 d, f = normed.rsplit(b'/', 1)
779 d = self._normalize(d, False, ignoremissing, True)
778 d = self._normalize(d, False, ignoremissing, True)
780 r = self._root + b"/" + d
779 r = self._root + b"/" + d
781 folded = d + b"/" + util.fspath(f, r)
780 folded = d + b"/" + util.fspath(f, r)
782 else:
781 else:
783 folded = util.fspath(normed, self._root)
782 folded = util.fspath(normed, self._root)
784 storemap[normed] = folded
783 storemap[normed] = folded
785
784
786 return folded
785 return folded
787
786
788 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
787 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
789 normed = util.normcase(path)
788 normed = util.normcase(path)
790 folded = self._map.filefoldmap.get(normed, None)
789 folded = self._map.filefoldmap.get(normed, None)
791 if folded is None:
790 if folded is None:
792 if isknown:
791 if isknown:
793 folded = path
792 folded = path
794 else:
793 else:
795 folded = self._discoverpath(
794 folded = self._discoverpath(
796 path, normed, ignoremissing, exists, self._map.filefoldmap
795 path, normed, ignoremissing, exists, self._map.filefoldmap
797 )
796 )
798 return folded
797 return folded
799
798
800 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
799 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
801 normed = util.normcase(path)
800 normed = util.normcase(path)
802 folded = self._map.filefoldmap.get(normed, None)
801 folded = self._map.filefoldmap.get(normed, None)
803 if folded is None:
802 if folded is None:
804 folded = self._map.dirfoldmap.get(normed, None)
803 folded = self._map.dirfoldmap.get(normed, None)
805 if folded is None:
804 if folded is None:
806 if isknown:
805 if isknown:
807 folded = path
806 folded = path
808 else:
807 else:
809 # store discovered result in dirfoldmap so that future
808 # store discovered result in dirfoldmap so that future
810 # normalizefile calls don't start matching directories
809 # normalizefile calls don't start matching directories
811 folded = self._discoverpath(
810 folded = self._discoverpath(
812 path, normed, ignoremissing, exists, self._map.dirfoldmap
811 path, normed, ignoremissing, exists, self._map.dirfoldmap
813 )
812 )
814 return folded
813 return folded
815
814
816 def normalize(self, path, isknown=False, ignoremissing=False):
815 def normalize(self, path, isknown=False, ignoremissing=False):
817 """
816 """
818 normalize the case of a pathname when on a casefolding filesystem
817 normalize the case of a pathname when on a casefolding filesystem
819
818
820 isknown specifies whether the filename came from walking the
819 isknown specifies whether the filename came from walking the
821 disk, to avoid extra filesystem access.
820 disk, to avoid extra filesystem access.
822
821
823 If ignoremissing is True, missing path are returned
822 If ignoremissing is True, missing path are returned
824 unchanged. Otherwise, we try harder to normalize possibly
823 unchanged. Otherwise, we try harder to normalize possibly
825 existing path components.
824 existing path components.
826
825
827 The normalized case is determined based on the following precedence:
826 The normalized case is determined based on the following precedence:
828
827
829 - version of name already stored in the dirstate
828 - version of name already stored in the dirstate
830 - version of name stored on disk
829 - version of name stored on disk
831 - version provided via command arguments
830 - version provided via command arguments
832 """
831 """
833
832
834 if self._checkcase:
833 if self._checkcase:
835 return self._normalize(path, isknown, ignoremissing)
834 return self._normalize(path, isknown, ignoremissing)
836 return path
835 return path
837
836
838 def clear(self):
837 def clear(self):
839 self._map.clear()
838 self._map.clear()
840 self._lastnormaltime = 0
839 self._lastnormaltime = 0
841 self._updatedfiles.clear()
840 self._updatedfiles.clear()
842 self._dirty = True
841 self._dirty = True
843
842
844 def rebuild(self, parent, allfiles, changedfiles=None):
843 def rebuild(self, parent, allfiles, changedfiles=None):
845 if changedfiles is None:
844 if changedfiles is None:
846 # Rebuild entire dirstate
845 # Rebuild entire dirstate
847 to_lookup = allfiles
846 to_lookup = allfiles
848 to_drop = []
847 to_drop = []
849 lastnormaltime = self._lastnormaltime
848 lastnormaltime = self._lastnormaltime
850 self.clear()
849 self.clear()
851 self._lastnormaltime = lastnormaltime
850 self._lastnormaltime = lastnormaltime
852 elif len(changedfiles) < 10:
851 elif len(changedfiles) < 10:
853 # Avoid turning allfiles into a set, which can be expensive if it's
852 # Avoid turning allfiles into a set, which can be expensive if it's
854 # large.
853 # large.
855 to_lookup = []
854 to_lookup = []
856 to_drop = []
855 to_drop = []
857 for f in changedfiles:
856 for f in changedfiles:
858 if f in allfiles:
857 if f in allfiles:
859 to_lookup.append(f)
858 to_lookup.append(f)
860 else:
859 else:
861 to_drop.append(f)
860 to_drop.append(f)
862 else:
861 else:
863 changedfilesset = set(changedfiles)
862 changedfilesset = set(changedfiles)
864 to_lookup = changedfilesset & set(allfiles)
863 to_lookup = changedfilesset & set(allfiles)
865 to_drop = changedfilesset - to_lookup
864 to_drop = changedfilesset - to_lookup
866
865
867 if self._origpl is None:
866 if self._origpl is None:
868 self._origpl = self._pl
867 self._origpl = self._pl
869 self._map.setparents(parent, self._nodeconstants.nullid)
868 self._map.setparents(parent, self._nodeconstants.nullid)
870
869
871 for f in to_lookup:
870 for f in to_lookup:
872 self.normallookup(f)
871 self.normallookup(f)
873 for f in to_drop:
872 for f in to_drop:
874 self._drop(f)
873 self._drop(f)
875
874
876 self._dirty = True
875 self._dirty = True
877
876
878 def identity(self):
877 def identity(self):
879 """Return identity of dirstate itself to detect changing in storage
878 """Return identity of dirstate itself to detect changing in storage
880
879
881 If identity of previous dirstate is equal to this, writing
880 If identity of previous dirstate is equal to this, writing
882 changes based on the former dirstate out can keep consistency.
881 changes based on the former dirstate out can keep consistency.
883 """
882 """
884 return self._map.identity
883 return self._map.identity
885
884
886 def write(self, tr):
885 def write(self, tr):
887 if not self._dirty:
886 if not self._dirty:
888 return
887 return
889
888
890 filename = self._filename
889 filename = self._filename
891 if tr:
890 if tr:
892 # 'dirstate.write()' is not only for writing in-memory
891 # 'dirstate.write()' is not only for writing in-memory
893 # changes out, but also for dropping ambiguous timestamp.
892 # changes out, but also for dropping ambiguous timestamp.
894 # delayed writing re-raise "ambiguous timestamp issue".
893 # delayed writing re-raise "ambiguous timestamp issue".
895 # See also the wiki page below for detail:
894 # See also the wiki page below for detail:
896 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
895 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
897
896
898 # emulate dropping timestamp in 'parsers.pack_dirstate'
897 # emulate dropping timestamp in 'parsers.pack_dirstate'
899 now = _getfsnow(self._opener)
898 now = _getfsnow(self._opener)
900 self._map.clearambiguoustimes(self._updatedfiles, now)
899 self._map.clearambiguoustimes(self._updatedfiles, now)
901
900
902 # emulate that all 'dirstate.normal' results are written out
901 # emulate that all 'dirstate.normal' results are written out
903 self._lastnormaltime = 0
902 self._lastnormaltime = 0
904 self._updatedfiles.clear()
903 self._updatedfiles.clear()
905
904
906 # delay writing in-memory changes out
905 # delay writing in-memory changes out
907 tr.addfilegenerator(
906 tr.addfilegenerator(
908 b'dirstate',
907 b'dirstate',
909 (self._filename,),
908 (self._filename,),
910 self._writedirstate,
909 self._writedirstate,
911 location=b'plain',
910 location=b'plain',
912 )
911 )
913 return
912 return
914
913
915 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
914 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
916 self._writedirstate(st)
915 self._writedirstate(st)
917
916
918 def addparentchangecallback(self, category, callback):
917 def addparentchangecallback(self, category, callback):
919 """add a callback to be called when the wd parents are changed
918 """add a callback to be called when the wd parents are changed
920
919
921 Callback will be called with the following arguments:
920 Callback will be called with the following arguments:
922 dirstate, (oldp1, oldp2), (newp1, newp2)
921 dirstate, (oldp1, oldp2), (newp1, newp2)
923
922
924 Category is a unique identifier to allow overwriting an old callback
923 Category is a unique identifier to allow overwriting an old callback
925 with a newer callback.
924 with a newer callback.
926 """
925 """
927 self._plchangecallbacks[category] = callback
926 self._plchangecallbacks[category] = callback
928
927
929 def _writedirstate(self, st):
928 def _writedirstate(self, st):
930 # notify callbacks about parents change
929 # notify callbacks about parents change
931 if self._origpl is not None and self._origpl != self._pl:
930 if self._origpl is not None and self._origpl != self._pl:
932 for c, callback in sorted(
931 for c, callback in sorted(
933 pycompat.iteritems(self._plchangecallbacks)
932 pycompat.iteritems(self._plchangecallbacks)
934 ):
933 ):
935 callback(self, self._origpl, self._pl)
934 callback(self, self._origpl, self._pl)
936 self._origpl = None
935 self._origpl = None
937 # use the modification time of the newly created temporary file as the
936 # use the modification time of the newly created temporary file as the
938 # filesystem's notion of 'now'
937 # filesystem's notion of 'now'
939 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
938 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
940
939
941 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
940 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
942 # timestamp of each entries in dirstate, because of 'now > mtime'
941 # timestamp of each entries in dirstate, because of 'now > mtime'
943 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
942 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
944 if delaywrite > 0:
943 if delaywrite > 0:
945 # do we have any files to delay for?
944 # do we have any files to delay for?
946 for f, e in pycompat.iteritems(self._map):
945 for f, e in pycompat.iteritems(self._map):
947 if e.need_delay(now):
946 if e.need_delay(now):
948 import time # to avoid useless import
947 import time # to avoid useless import
949
948
950 # rather than sleep n seconds, sleep until the next
949 # rather than sleep n seconds, sleep until the next
951 # multiple of n seconds
950 # multiple of n seconds
952 clock = time.time()
951 clock = time.time()
953 start = int(clock) - (int(clock) % delaywrite)
952 start = int(clock) - (int(clock) % delaywrite)
954 end = start + delaywrite
953 end = start + delaywrite
955 time.sleep(end - clock)
954 time.sleep(end - clock)
956 now = end # trust our estimate that the end is near now
955 now = end # trust our estimate that the end is near now
957 break
956 break
958
957
959 self._map.write(st, now)
958 self._map.write(st, now)
960 self._lastnormaltime = 0
959 self._lastnormaltime = 0
961 self._dirty = False
960 self._dirty = False
962
961
963 def _dirignore(self, f):
962 def _dirignore(self, f):
964 if self._ignore(f):
963 if self._ignore(f):
965 return True
964 return True
966 for p in pathutil.finddirs(f):
965 for p in pathutil.finddirs(f):
967 if self._ignore(p):
966 if self._ignore(p):
968 return True
967 return True
969 return False
968 return False
970
969
971 def _ignorefiles(self):
970 def _ignorefiles(self):
972 files = []
971 files = []
973 if os.path.exists(self._join(b'.hgignore')):
972 if os.path.exists(self._join(b'.hgignore')):
974 files.append(self._join(b'.hgignore'))
973 files.append(self._join(b'.hgignore'))
975 for name, path in self._ui.configitems(b"ui"):
974 for name, path in self._ui.configitems(b"ui"):
976 if name == b'ignore' or name.startswith(b'ignore.'):
975 if name == b'ignore' or name.startswith(b'ignore.'):
977 # we need to use os.path.join here rather than self._join
976 # we need to use os.path.join here rather than self._join
978 # because path is arbitrary and user-specified
977 # because path is arbitrary and user-specified
979 files.append(os.path.join(self._rootdir, util.expandpath(path)))
978 files.append(os.path.join(self._rootdir, util.expandpath(path)))
980 return files
979 return files
981
980
982 def _ignorefileandline(self, f):
981 def _ignorefileandline(self, f):
983 files = collections.deque(self._ignorefiles())
982 files = collections.deque(self._ignorefiles())
984 visited = set()
983 visited = set()
985 while files:
984 while files:
986 i = files.popleft()
985 i = files.popleft()
987 patterns = matchmod.readpatternfile(
986 patterns = matchmod.readpatternfile(
988 i, self._ui.warn, sourceinfo=True
987 i, self._ui.warn, sourceinfo=True
989 )
988 )
990 for pattern, lineno, line in patterns:
989 for pattern, lineno, line in patterns:
991 kind, p = matchmod._patsplit(pattern, b'glob')
990 kind, p = matchmod._patsplit(pattern, b'glob')
992 if kind == b"subinclude":
991 if kind == b"subinclude":
993 if p not in visited:
992 if p not in visited:
994 files.append(p)
993 files.append(p)
995 continue
994 continue
996 m = matchmod.match(
995 m = matchmod.match(
997 self._root, b'', [], [pattern], warn=self._ui.warn
996 self._root, b'', [], [pattern], warn=self._ui.warn
998 )
997 )
999 if m(f):
998 if m(f):
1000 return (i, lineno, line)
999 return (i, lineno, line)
1001 visited.add(i)
1000 visited.add(i)
1002 return (None, -1, b"")
1001 return (None, -1, b"")
1003
1002
1004 def _walkexplicit(self, match, subrepos):
1003 def _walkexplicit(self, match, subrepos):
1005 """Get stat data about the files explicitly specified by match.
1004 """Get stat data about the files explicitly specified by match.
1006
1005
1007 Return a triple (results, dirsfound, dirsnotfound).
1006 Return a triple (results, dirsfound, dirsnotfound).
1008 - results is a mapping from filename to stat result. It also contains
1007 - results is a mapping from filename to stat result. It also contains
1009 listings mapping subrepos and .hg to None.
1008 listings mapping subrepos and .hg to None.
1010 - dirsfound is a list of files found to be directories.
1009 - dirsfound is a list of files found to be directories.
1011 - dirsnotfound is a list of files that the dirstate thinks are
1010 - dirsnotfound is a list of files that the dirstate thinks are
1012 directories and that were not found."""
1011 directories and that were not found."""
1013
1012
1014 def badtype(mode):
1013 def badtype(mode):
1015 kind = _(b'unknown')
1014 kind = _(b'unknown')
1016 if stat.S_ISCHR(mode):
1015 if stat.S_ISCHR(mode):
1017 kind = _(b'character device')
1016 kind = _(b'character device')
1018 elif stat.S_ISBLK(mode):
1017 elif stat.S_ISBLK(mode):
1019 kind = _(b'block device')
1018 kind = _(b'block device')
1020 elif stat.S_ISFIFO(mode):
1019 elif stat.S_ISFIFO(mode):
1021 kind = _(b'fifo')
1020 kind = _(b'fifo')
1022 elif stat.S_ISSOCK(mode):
1021 elif stat.S_ISSOCK(mode):
1023 kind = _(b'socket')
1022 kind = _(b'socket')
1024 elif stat.S_ISDIR(mode):
1023 elif stat.S_ISDIR(mode):
1025 kind = _(b'directory')
1024 kind = _(b'directory')
1026 return _(b'unsupported file type (type is %s)') % kind
1025 return _(b'unsupported file type (type is %s)') % kind
1027
1026
1028 badfn = match.bad
1027 badfn = match.bad
1029 dmap = self._map
1028 dmap = self._map
1030 lstat = os.lstat
1029 lstat = os.lstat
1031 getkind = stat.S_IFMT
1030 getkind = stat.S_IFMT
1032 dirkind = stat.S_IFDIR
1031 dirkind = stat.S_IFDIR
1033 regkind = stat.S_IFREG
1032 regkind = stat.S_IFREG
1034 lnkkind = stat.S_IFLNK
1033 lnkkind = stat.S_IFLNK
1035 join = self._join
1034 join = self._join
1036 dirsfound = []
1035 dirsfound = []
1037 foundadd = dirsfound.append
1036 foundadd = dirsfound.append
1038 dirsnotfound = []
1037 dirsnotfound = []
1039 notfoundadd = dirsnotfound.append
1038 notfoundadd = dirsnotfound.append
1040
1039
1041 if not match.isexact() and self._checkcase:
1040 if not match.isexact() and self._checkcase:
1042 normalize = self._normalize
1041 normalize = self._normalize
1043 else:
1042 else:
1044 normalize = None
1043 normalize = None
1045
1044
1046 files = sorted(match.files())
1045 files = sorted(match.files())
1047 subrepos.sort()
1046 subrepos.sort()
1048 i, j = 0, 0
1047 i, j = 0, 0
1049 while i < len(files) and j < len(subrepos):
1048 while i < len(files) and j < len(subrepos):
1050 subpath = subrepos[j] + b"/"
1049 subpath = subrepos[j] + b"/"
1051 if files[i] < subpath:
1050 if files[i] < subpath:
1052 i += 1
1051 i += 1
1053 continue
1052 continue
1054 while i < len(files) and files[i].startswith(subpath):
1053 while i < len(files) and files[i].startswith(subpath):
1055 del files[i]
1054 del files[i]
1056 j += 1
1055 j += 1
1057
1056
1058 if not files or b'' in files:
1057 if not files or b'' in files:
1059 files = [b'']
1058 files = [b'']
1060 # constructing the foldmap is expensive, so don't do it for the
1059 # constructing the foldmap is expensive, so don't do it for the
1061 # common case where files is ['']
1060 # common case where files is ['']
1062 normalize = None
1061 normalize = None
1063 results = dict.fromkeys(subrepos)
1062 results = dict.fromkeys(subrepos)
1064 results[b'.hg'] = None
1063 results[b'.hg'] = None
1065
1064
1066 for ff in files:
1065 for ff in files:
1067 if normalize:
1066 if normalize:
1068 nf = normalize(ff, False, True)
1067 nf = normalize(ff, False, True)
1069 else:
1068 else:
1070 nf = ff
1069 nf = ff
1071 if nf in results:
1070 if nf in results:
1072 continue
1071 continue
1073
1072
1074 try:
1073 try:
1075 st = lstat(join(nf))
1074 st = lstat(join(nf))
1076 kind = getkind(st.st_mode)
1075 kind = getkind(st.st_mode)
1077 if kind == dirkind:
1076 if kind == dirkind:
1078 if nf in dmap:
1077 if nf in dmap:
1079 # file replaced by dir on disk but still in dirstate
1078 # file replaced by dir on disk but still in dirstate
1080 results[nf] = None
1079 results[nf] = None
1081 foundadd((nf, ff))
1080 foundadd((nf, ff))
1082 elif kind == regkind or kind == lnkkind:
1081 elif kind == regkind or kind == lnkkind:
1083 results[nf] = st
1082 results[nf] = st
1084 else:
1083 else:
1085 badfn(ff, badtype(kind))
1084 badfn(ff, badtype(kind))
1086 if nf in dmap:
1085 if nf in dmap:
1087 results[nf] = None
1086 results[nf] = None
1088 except OSError as inst: # nf not found on disk - it is dirstate only
1087 except OSError as inst: # nf not found on disk - it is dirstate only
1089 if nf in dmap: # does it exactly match a missing file?
1088 if nf in dmap: # does it exactly match a missing file?
1090 results[nf] = None
1089 results[nf] = None
1091 else: # does it match a missing directory?
1090 else: # does it match a missing directory?
1092 if self._map.hasdir(nf):
1091 if self._map.hasdir(nf):
1093 notfoundadd(nf)
1092 notfoundadd(nf)
1094 else:
1093 else:
1095 badfn(ff, encoding.strtolocal(inst.strerror))
1094 badfn(ff, encoding.strtolocal(inst.strerror))
1096
1095
1097 # match.files() may contain explicitly-specified paths that shouldn't
1096 # match.files() may contain explicitly-specified paths that shouldn't
1098 # be taken; drop them from the list of files found. dirsfound/notfound
1097 # be taken; drop them from the list of files found. dirsfound/notfound
1099 # aren't filtered here because they will be tested later.
1098 # aren't filtered here because they will be tested later.
1100 if match.anypats():
1099 if match.anypats():
1101 for f in list(results):
1100 for f in list(results):
1102 if f == b'.hg' or f in subrepos:
1101 if f == b'.hg' or f in subrepos:
1103 # keep sentinel to disable further out-of-repo walks
1102 # keep sentinel to disable further out-of-repo walks
1104 continue
1103 continue
1105 if not match(f):
1104 if not match(f):
1106 del results[f]
1105 del results[f]
1107
1106
1108 # Case insensitive filesystems cannot rely on lstat() failing to detect
1107 # Case insensitive filesystems cannot rely on lstat() failing to detect
1109 # a case-only rename. Prune the stat object for any file that does not
1108 # a case-only rename. Prune the stat object for any file that does not
1110 # match the case in the filesystem, if there are multiple files that
1109 # match the case in the filesystem, if there are multiple files that
1111 # normalize to the same path.
1110 # normalize to the same path.
1112 if match.isexact() and self._checkcase:
1111 if match.isexact() and self._checkcase:
1113 normed = {}
1112 normed = {}
1114
1113
1115 for f, st in pycompat.iteritems(results):
1114 for f, st in pycompat.iteritems(results):
1116 if st is None:
1115 if st is None:
1117 continue
1116 continue
1118
1117
1119 nc = util.normcase(f)
1118 nc = util.normcase(f)
1120 paths = normed.get(nc)
1119 paths = normed.get(nc)
1121
1120
1122 if paths is None:
1121 if paths is None:
1123 paths = set()
1122 paths = set()
1124 normed[nc] = paths
1123 normed[nc] = paths
1125
1124
1126 paths.add(f)
1125 paths.add(f)
1127
1126
1128 for norm, paths in pycompat.iteritems(normed):
1127 for norm, paths in pycompat.iteritems(normed):
1129 if len(paths) > 1:
1128 if len(paths) > 1:
1130 for path in paths:
1129 for path in paths:
1131 folded = self._discoverpath(
1130 folded = self._discoverpath(
1132 path, norm, True, None, self._map.dirfoldmap
1131 path, norm, True, None, self._map.dirfoldmap
1133 )
1132 )
1134 if path != folded:
1133 if path != folded:
1135 results[path] = None
1134 results[path] = None
1136
1135
1137 return results, dirsfound, dirsnotfound
1136 return results, dirsfound, dirsnotfound
1138
1137
1139 def walk(self, match, subrepos, unknown, ignored, full=True):
1138 def walk(self, match, subrepos, unknown, ignored, full=True):
1140 """
1139 """
1141 Walk recursively through the directory tree, finding all files
1140 Walk recursively through the directory tree, finding all files
1142 matched by match.
1141 matched by match.
1143
1142
1144 If full is False, maybe skip some known-clean files.
1143 If full is False, maybe skip some known-clean files.
1145
1144
1146 Return a dict mapping filename to stat-like object (either
1145 Return a dict mapping filename to stat-like object (either
1147 mercurial.osutil.stat instance or return value of os.stat()).
1146 mercurial.osutil.stat instance or return value of os.stat()).
1148
1147
1149 """
1148 """
1150 # full is a flag that extensions that hook into walk can use -- this
1149 # full is a flag that extensions that hook into walk can use -- this
1151 # implementation doesn't use it at all. This satisfies the contract
1150 # implementation doesn't use it at all. This satisfies the contract
1152 # because we only guarantee a "maybe".
1151 # because we only guarantee a "maybe".
1153
1152
1154 if ignored:
1153 if ignored:
1155 ignore = util.never
1154 ignore = util.never
1156 dirignore = util.never
1155 dirignore = util.never
1157 elif unknown:
1156 elif unknown:
1158 ignore = self._ignore
1157 ignore = self._ignore
1159 dirignore = self._dirignore
1158 dirignore = self._dirignore
1160 else:
1159 else:
1161 # if not unknown and not ignored, drop dir recursion and step 2
1160 # if not unknown and not ignored, drop dir recursion and step 2
1162 ignore = util.always
1161 ignore = util.always
1163 dirignore = util.always
1162 dirignore = util.always
1164
1163
1165 matchfn = match.matchfn
1164 matchfn = match.matchfn
1166 matchalways = match.always()
1165 matchalways = match.always()
1167 matchtdir = match.traversedir
1166 matchtdir = match.traversedir
1168 dmap = self._map
1167 dmap = self._map
1169 listdir = util.listdir
1168 listdir = util.listdir
1170 lstat = os.lstat
1169 lstat = os.lstat
1171 dirkind = stat.S_IFDIR
1170 dirkind = stat.S_IFDIR
1172 regkind = stat.S_IFREG
1171 regkind = stat.S_IFREG
1173 lnkkind = stat.S_IFLNK
1172 lnkkind = stat.S_IFLNK
1174 join = self._join
1173 join = self._join
1175
1174
1176 exact = skipstep3 = False
1175 exact = skipstep3 = False
1177 if match.isexact(): # match.exact
1176 if match.isexact(): # match.exact
1178 exact = True
1177 exact = True
1179 dirignore = util.always # skip step 2
1178 dirignore = util.always # skip step 2
1180 elif match.prefix(): # match.match, no patterns
1179 elif match.prefix(): # match.match, no patterns
1181 skipstep3 = True
1180 skipstep3 = True
1182
1181
1183 if not exact and self._checkcase:
1182 if not exact and self._checkcase:
1184 normalize = self._normalize
1183 normalize = self._normalize
1185 normalizefile = self._normalizefile
1184 normalizefile = self._normalizefile
1186 skipstep3 = False
1185 skipstep3 = False
1187 else:
1186 else:
1188 normalize = self._normalize
1187 normalize = self._normalize
1189 normalizefile = None
1188 normalizefile = None
1190
1189
1191 # step 1: find all explicit files
1190 # step 1: find all explicit files
1192 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1191 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1193 if matchtdir:
1192 if matchtdir:
1194 for d in work:
1193 for d in work:
1195 matchtdir(d[0])
1194 matchtdir(d[0])
1196 for d in dirsnotfound:
1195 for d in dirsnotfound:
1197 matchtdir(d)
1196 matchtdir(d)
1198
1197
1199 skipstep3 = skipstep3 and not (work or dirsnotfound)
1198 skipstep3 = skipstep3 and not (work or dirsnotfound)
1200 work = [d for d in work if not dirignore(d[0])]
1199 work = [d for d in work if not dirignore(d[0])]
1201
1200
1202 # step 2: visit subdirectories
1201 # step 2: visit subdirectories
1203 def traverse(work, alreadynormed):
1202 def traverse(work, alreadynormed):
1204 wadd = work.append
1203 wadd = work.append
1205 while work:
1204 while work:
1206 tracing.counter('dirstate.walk work', len(work))
1205 tracing.counter('dirstate.walk work', len(work))
1207 nd = work.pop()
1206 nd = work.pop()
1208 visitentries = match.visitchildrenset(nd)
1207 visitentries = match.visitchildrenset(nd)
1209 if not visitentries:
1208 if not visitentries:
1210 continue
1209 continue
1211 if visitentries == b'this' or visitentries == b'all':
1210 if visitentries == b'this' or visitentries == b'all':
1212 visitentries = None
1211 visitentries = None
1213 skip = None
1212 skip = None
1214 if nd != b'':
1213 if nd != b'':
1215 skip = b'.hg'
1214 skip = b'.hg'
1216 try:
1215 try:
1217 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1216 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1218 entries = listdir(join(nd), stat=True, skip=skip)
1217 entries = listdir(join(nd), stat=True, skip=skip)
1219 except OSError as inst:
1218 except OSError as inst:
1220 if inst.errno in (errno.EACCES, errno.ENOENT):
1219 if inst.errno in (errno.EACCES, errno.ENOENT):
1221 match.bad(
1220 match.bad(
1222 self.pathto(nd), encoding.strtolocal(inst.strerror)
1221 self.pathto(nd), encoding.strtolocal(inst.strerror)
1223 )
1222 )
1224 continue
1223 continue
1225 raise
1224 raise
1226 for f, kind, st in entries:
1225 for f, kind, st in entries:
1227 # Some matchers may return files in the visitentries set,
1226 # Some matchers may return files in the visitentries set,
1228 # instead of 'this', if the matcher explicitly mentions them
1227 # instead of 'this', if the matcher explicitly mentions them
1229 # and is not an exactmatcher. This is acceptable; we do not
1228 # and is not an exactmatcher. This is acceptable; we do not
1230 # make any hard assumptions about file-or-directory below
1229 # make any hard assumptions about file-or-directory below
1231 # based on the presence of `f` in visitentries. If
1230 # based on the presence of `f` in visitentries. If
1232 # visitchildrenset returned a set, we can always skip the
1231 # visitchildrenset returned a set, we can always skip the
1233 # entries *not* in the set it provided regardless of whether
1232 # entries *not* in the set it provided regardless of whether
1234 # they're actually a file or a directory.
1233 # they're actually a file or a directory.
1235 if visitentries and f not in visitentries:
1234 if visitentries and f not in visitentries:
1236 continue
1235 continue
1237 if normalizefile:
1236 if normalizefile:
1238 # even though f might be a directory, we're only
1237 # even though f might be a directory, we're only
1239 # interested in comparing it to files currently in the
1238 # interested in comparing it to files currently in the
1240 # dmap -- therefore normalizefile is enough
1239 # dmap -- therefore normalizefile is enough
1241 nf = normalizefile(
1240 nf = normalizefile(
1242 nd and (nd + b"/" + f) or f, True, True
1241 nd and (nd + b"/" + f) or f, True, True
1243 )
1242 )
1244 else:
1243 else:
1245 nf = nd and (nd + b"/" + f) or f
1244 nf = nd and (nd + b"/" + f) or f
1246 if nf not in results:
1245 if nf not in results:
1247 if kind == dirkind:
1246 if kind == dirkind:
1248 if not ignore(nf):
1247 if not ignore(nf):
1249 if matchtdir:
1248 if matchtdir:
1250 matchtdir(nf)
1249 matchtdir(nf)
1251 wadd(nf)
1250 wadd(nf)
1252 if nf in dmap and (matchalways or matchfn(nf)):
1251 if nf in dmap and (matchalways or matchfn(nf)):
1253 results[nf] = None
1252 results[nf] = None
1254 elif kind == regkind or kind == lnkkind:
1253 elif kind == regkind or kind == lnkkind:
1255 if nf in dmap:
1254 if nf in dmap:
1256 if matchalways or matchfn(nf):
1255 if matchalways or matchfn(nf):
1257 results[nf] = st
1256 results[nf] = st
1258 elif (matchalways or matchfn(nf)) and not ignore(
1257 elif (matchalways or matchfn(nf)) and not ignore(
1259 nf
1258 nf
1260 ):
1259 ):
1261 # unknown file -- normalize if necessary
1260 # unknown file -- normalize if necessary
1262 if not alreadynormed:
1261 if not alreadynormed:
1263 nf = normalize(nf, False, True)
1262 nf = normalize(nf, False, True)
1264 results[nf] = st
1263 results[nf] = st
1265 elif nf in dmap and (matchalways or matchfn(nf)):
1264 elif nf in dmap and (matchalways or matchfn(nf)):
1266 results[nf] = None
1265 results[nf] = None
1267
1266
1268 for nd, d in work:
1267 for nd, d in work:
1269 # alreadynormed means that processwork doesn't have to do any
1268 # alreadynormed means that processwork doesn't have to do any
1270 # expensive directory normalization
1269 # expensive directory normalization
1271 alreadynormed = not normalize or nd == d
1270 alreadynormed = not normalize or nd == d
1272 traverse([d], alreadynormed)
1271 traverse([d], alreadynormed)
1273
1272
1274 for s in subrepos:
1273 for s in subrepos:
1275 del results[s]
1274 del results[s]
1276 del results[b'.hg']
1275 del results[b'.hg']
1277
1276
1278 # step 3: visit remaining files from dmap
1277 # step 3: visit remaining files from dmap
1279 if not skipstep3 and not exact:
1278 if not skipstep3 and not exact:
1280 # If a dmap file is not in results yet, it was either
1279 # If a dmap file is not in results yet, it was either
1281 # a) not matching matchfn b) ignored, c) missing, or d) under a
1280 # a) not matching matchfn b) ignored, c) missing, or d) under a
1282 # symlink directory.
1281 # symlink directory.
1283 if not results and matchalways:
1282 if not results and matchalways:
1284 visit = [f for f in dmap]
1283 visit = [f for f in dmap]
1285 else:
1284 else:
1286 visit = [f for f in dmap if f not in results and matchfn(f)]
1285 visit = [f for f in dmap if f not in results and matchfn(f)]
1287 visit.sort()
1286 visit.sort()
1288
1287
1289 if unknown:
1288 if unknown:
1290 # unknown == True means we walked all dirs under the roots
1289 # unknown == True means we walked all dirs under the roots
1291 # that wasn't ignored, and everything that matched was stat'ed
1290 # that wasn't ignored, and everything that matched was stat'ed
1292 # and is already in results.
1291 # and is already in results.
1293 # The rest must thus be ignored or under a symlink.
1292 # The rest must thus be ignored or under a symlink.
1294 audit_path = pathutil.pathauditor(self._root, cached=True)
1293 audit_path = pathutil.pathauditor(self._root, cached=True)
1295
1294
1296 for nf in iter(visit):
1295 for nf in iter(visit):
1297 # If a stat for the same file was already added with a
1296 # If a stat for the same file was already added with a
1298 # different case, don't add one for this, since that would
1297 # different case, don't add one for this, since that would
1299 # make it appear as if the file exists under both names
1298 # make it appear as if the file exists under both names
1300 # on disk.
1299 # on disk.
1301 if (
1300 if (
1302 normalizefile
1301 normalizefile
1303 and normalizefile(nf, True, True) in results
1302 and normalizefile(nf, True, True) in results
1304 ):
1303 ):
1305 results[nf] = None
1304 results[nf] = None
1306 # Report ignored items in the dmap as long as they are not
1305 # Report ignored items in the dmap as long as they are not
1307 # under a symlink directory.
1306 # under a symlink directory.
1308 elif audit_path.check(nf):
1307 elif audit_path.check(nf):
1309 try:
1308 try:
1310 results[nf] = lstat(join(nf))
1309 results[nf] = lstat(join(nf))
1311 # file was just ignored, no links, and exists
1310 # file was just ignored, no links, and exists
1312 except OSError:
1311 except OSError:
1313 # file doesn't exist
1312 # file doesn't exist
1314 results[nf] = None
1313 results[nf] = None
1315 else:
1314 else:
1316 # It's either missing or under a symlink directory
1315 # It's either missing or under a symlink directory
1317 # which we in this case report as missing
1316 # which we in this case report as missing
1318 results[nf] = None
1317 results[nf] = None
1319 else:
1318 else:
1320 # We may not have walked the full directory tree above,
1319 # We may not have walked the full directory tree above,
1321 # so stat and check everything we missed.
1320 # so stat and check everything we missed.
1322 iv = iter(visit)
1321 iv = iter(visit)
1323 for st in util.statfiles([join(i) for i in visit]):
1322 for st in util.statfiles([join(i) for i in visit]):
1324 results[next(iv)] = st
1323 results[next(iv)] = st
1325 return results
1324 return results
1326
1325
1327 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1326 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1328 # Force Rayon (Rust parallelism library) to respect the number of
1327 # Force Rayon (Rust parallelism library) to respect the number of
1329 # workers. This is a temporary workaround until Rust code knows
1328 # workers. This is a temporary workaround until Rust code knows
1330 # how to read the config file.
1329 # how to read the config file.
1331 numcpus = self._ui.configint(b"worker", b"numcpus")
1330 numcpus = self._ui.configint(b"worker", b"numcpus")
1332 if numcpus is not None:
1331 if numcpus is not None:
1333 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1332 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1334
1333
1335 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1334 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1336 if not workers_enabled:
1335 if not workers_enabled:
1337 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1336 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1338
1337
1339 (
1338 (
1340 lookup,
1339 lookup,
1341 modified,
1340 modified,
1342 added,
1341 added,
1343 removed,
1342 removed,
1344 deleted,
1343 deleted,
1345 clean,
1344 clean,
1346 ignored,
1345 ignored,
1347 unknown,
1346 unknown,
1348 warnings,
1347 warnings,
1349 bad,
1348 bad,
1350 traversed,
1349 traversed,
1351 dirty,
1350 dirty,
1352 ) = rustmod.status(
1351 ) = rustmod.status(
1353 self._map._rustmap,
1352 self._map._rustmap,
1354 matcher,
1353 matcher,
1355 self._rootdir,
1354 self._rootdir,
1356 self._ignorefiles(),
1355 self._ignorefiles(),
1357 self._checkexec,
1356 self._checkexec,
1358 self._lastnormaltime,
1357 self._lastnormaltime,
1359 bool(list_clean),
1358 bool(list_clean),
1360 bool(list_ignored),
1359 bool(list_ignored),
1361 bool(list_unknown),
1360 bool(list_unknown),
1362 bool(matcher.traversedir),
1361 bool(matcher.traversedir),
1363 )
1362 )
1364
1363
1365 self._dirty |= dirty
1364 self._dirty |= dirty
1366
1365
1367 if matcher.traversedir:
1366 if matcher.traversedir:
1368 for dir in traversed:
1367 for dir in traversed:
1369 matcher.traversedir(dir)
1368 matcher.traversedir(dir)
1370
1369
1371 if self._ui.warn:
1370 if self._ui.warn:
1372 for item in warnings:
1371 for item in warnings:
1373 if isinstance(item, tuple):
1372 if isinstance(item, tuple):
1374 file_path, syntax = item
1373 file_path, syntax = item
1375 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1374 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1376 file_path,
1375 file_path,
1377 syntax,
1376 syntax,
1378 )
1377 )
1379 self._ui.warn(msg)
1378 self._ui.warn(msg)
1380 else:
1379 else:
1381 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1380 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1382 self._ui.warn(
1381 self._ui.warn(
1383 msg
1382 msg
1384 % (
1383 % (
1385 pathutil.canonpath(
1384 pathutil.canonpath(
1386 self._rootdir, self._rootdir, item
1385 self._rootdir, self._rootdir, item
1387 ),
1386 ),
1388 b"No such file or directory",
1387 b"No such file or directory",
1389 )
1388 )
1390 )
1389 )
1391
1390
1392 for (fn, message) in bad:
1391 for (fn, message) in bad:
1393 matcher.bad(fn, encoding.strtolocal(message))
1392 matcher.bad(fn, encoding.strtolocal(message))
1394
1393
1395 status = scmutil.status(
1394 status = scmutil.status(
1396 modified=modified,
1395 modified=modified,
1397 added=added,
1396 added=added,
1398 removed=removed,
1397 removed=removed,
1399 deleted=deleted,
1398 deleted=deleted,
1400 unknown=unknown,
1399 unknown=unknown,
1401 ignored=ignored,
1400 ignored=ignored,
1402 clean=clean,
1401 clean=clean,
1403 )
1402 )
1404 return (lookup, status)
1403 return (lookup, status)
1405
1404
1406 def status(self, match, subrepos, ignored, clean, unknown):
1405 def status(self, match, subrepos, ignored, clean, unknown):
1407 """Determine the status of the working copy relative to the
1406 """Determine the status of the working copy relative to the
1408 dirstate and return a pair of (unsure, status), where status is of type
1407 dirstate and return a pair of (unsure, status), where status is of type
1409 scmutil.status and:
1408 scmutil.status and:
1410
1409
1411 unsure:
1410 unsure:
1412 files that might have been modified since the dirstate was
1411 files that might have been modified since the dirstate was
1413 written, but need to be read to be sure (size is the same
1412 written, but need to be read to be sure (size is the same
1414 but mtime differs)
1413 but mtime differs)
1415 status.modified:
1414 status.modified:
1416 files that have definitely been modified since the dirstate
1415 files that have definitely been modified since the dirstate
1417 was written (different size or mode)
1416 was written (different size or mode)
1418 status.clean:
1417 status.clean:
1419 files that have definitely not been modified since the
1418 files that have definitely not been modified since the
1420 dirstate was written
1419 dirstate was written
1421 """
1420 """
1422 listignored, listclean, listunknown = ignored, clean, unknown
1421 listignored, listclean, listunknown = ignored, clean, unknown
1423 lookup, modified, added, unknown, ignored = [], [], [], [], []
1422 lookup, modified, added, unknown, ignored = [], [], [], [], []
1424 removed, deleted, clean = [], [], []
1423 removed, deleted, clean = [], [], []
1425
1424
1426 dmap = self._map
1425 dmap = self._map
1427 dmap.preload()
1426 dmap.preload()
1428
1427
1429 use_rust = True
1428 use_rust = True
1430
1429
1431 allowed_matchers = (
1430 allowed_matchers = (
1432 matchmod.alwaysmatcher,
1431 matchmod.alwaysmatcher,
1433 matchmod.exactmatcher,
1432 matchmod.exactmatcher,
1434 matchmod.includematcher,
1433 matchmod.includematcher,
1435 )
1434 )
1436
1435
1437 if rustmod is None:
1436 if rustmod is None:
1438 use_rust = False
1437 use_rust = False
1439 elif self._checkcase:
1438 elif self._checkcase:
1440 # Case-insensitive filesystems are not handled yet
1439 # Case-insensitive filesystems are not handled yet
1441 use_rust = False
1440 use_rust = False
1442 elif subrepos:
1441 elif subrepos:
1443 use_rust = False
1442 use_rust = False
1444 elif sparse.enabled:
1443 elif sparse.enabled:
1445 use_rust = False
1444 use_rust = False
1446 elif not isinstance(match, allowed_matchers):
1445 elif not isinstance(match, allowed_matchers):
1447 # Some matchers have yet to be implemented
1446 # Some matchers have yet to be implemented
1448 use_rust = False
1447 use_rust = False
1449
1448
1450 if use_rust:
1449 if use_rust:
1451 try:
1450 try:
1452 return self._rust_status(
1451 return self._rust_status(
1453 match, listclean, listignored, listunknown
1452 match, listclean, listignored, listunknown
1454 )
1453 )
1455 except rustmod.FallbackError:
1454 except rustmod.FallbackError:
1456 pass
1455 pass
1457
1456
1458 def noop(f):
1457 def noop(f):
1459 pass
1458 pass
1460
1459
1461 dcontains = dmap.__contains__
1460 dcontains = dmap.__contains__
1462 dget = dmap.__getitem__
1461 dget = dmap.__getitem__
1463 ladd = lookup.append # aka "unsure"
1462 ladd = lookup.append # aka "unsure"
1464 madd = modified.append
1463 madd = modified.append
1465 aadd = added.append
1464 aadd = added.append
1466 uadd = unknown.append if listunknown else noop
1465 uadd = unknown.append if listunknown else noop
1467 iadd = ignored.append if listignored else noop
1466 iadd = ignored.append if listignored else noop
1468 radd = removed.append
1467 radd = removed.append
1469 dadd = deleted.append
1468 dadd = deleted.append
1470 cadd = clean.append if listclean else noop
1469 cadd = clean.append if listclean else noop
1471 mexact = match.exact
1470 mexact = match.exact
1472 dirignore = self._dirignore
1471 dirignore = self._dirignore
1473 checkexec = self._checkexec
1472 checkexec = self._checkexec
1474 copymap = self._map.copymap
1473 copymap = self._map.copymap
1475 lastnormaltime = self._lastnormaltime
1474 lastnormaltime = self._lastnormaltime
1476
1475
1477 # We need to do full walks when either
1476 # We need to do full walks when either
1478 # - we're listing all clean files, or
1477 # - we're listing all clean files, or
1479 # - match.traversedir does something, because match.traversedir should
1478 # - match.traversedir does something, because match.traversedir should
1480 # be called for every dir in the working dir
1479 # be called for every dir in the working dir
1481 full = listclean or match.traversedir is not None
1480 full = listclean or match.traversedir is not None
1482 for fn, st in pycompat.iteritems(
1481 for fn, st in pycompat.iteritems(
1483 self.walk(match, subrepos, listunknown, listignored, full=full)
1482 self.walk(match, subrepos, listunknown, listignored, full=full)
1484 ):
1483 ):
1485 if not dcontains(fn):
1484 if not dcontains(fn):
1486 if (listignored or mexact(fn)) and dirignore(fn):
1485 if (listignored or mexact(fn)) and dirignore(fn):
1487 if listignored:
1486 if listignored:
1488 iadd(fn)
1487 iadd(fn)
1489 else:
1488 else:
1490 uadd(fn)
1489 uadd(fn)
1491 continue
1490 continue
1492
1491
1493 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1492 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1494 # written like that for performance reasons. dmap[fn] is not a
1493 # written like that for performance reasons. dmap[fn] is not a
1495 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1494 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1496 # opcode has fast paths when the value to be unpacked is a tuple or
1495 # opcode has fast paths when the value to be unpacked is a tuple or
1497 # a list, but falls back to creating a full-fledged iterator in
1496 # a list, but falls back to creating a full-fledged iterator in
1498 # general. That is much slower than simply accessing and storing the
1497 # general. That is much slower than simply accessing and storing the
1499 # tuple members one by one.
1498 # tuple members one by one.
1500 t = dget(fn)
1499 t = dget(fn)
1501 mode = t.mode
1500 mode = t.mode
1502 size = t.size
1501 size = t.size
1503 time = t.mtime
1502 time = t.mtime
1504
1503
1505 if not st and t.tracked:
1504 if not st and t.tracked:
1506 dadd(fn)
1505 dadd(fn)
1507 elif t.merged:
1506 elif t.merged:
1508 madd(fn)
1507 madd(fn)
1509 elif t.added:
1508 elif t.added:
1510 aadd(fn)
1509 aadd(fn)
1511 elif t.removed:
1510 elif t.removed:
1512 radd(fn)
1511 radd(fn)
1513 elif t.tracked:
1512 elif t.tracked:
1514 if (
1513 if (
1515 size >= 0
1514 size >= 0
1516 and (
1515 and (
1517 (size != st.st_size and size != st.st_size & _rangemask)
1516 (size != st.st_size and size != st.st_size & _rangemask)
1518 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1517 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1519 )
1518 )
1520 or t.from_p2
1519 or t.from_p2
1521 or fn in copymap
1520 or fn in copymap
1522 ):
1521 ):
1523 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1522 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1524 # issue6456: Size returned may be longer due to
1523 # issue6456: Size returned may be longer due to
1525 # encryption on EXT-4 fscrypt, undecided.
1524 # encryption on EXT-4 fscrypt, undecided.
1526 ladd(fn)
1525 ladd(fn)
1527 else:
1526 else:
1528 madd(fn)
1527 madd(fn)
1529 elif (
1528 elif (
1530 time != st[stat.ST_MTIME]
1529 time != st[stat.ST_MTIME]
1531 and time != st[stat.ST_MTIME] & _rangemask
1530 and time != st[stat.ST_MTIME] & _rangemask
1532 ):
1531 ):
1533 ladd(fn)
1532 ladd(fn)
1534 elif st[stat.ST_MTIME] == lastnormaltime:
1533 elif st[stat.ST_MTIME] == lastnormaltime:
1535 # fn may have just been marked as normal and it may have
1534 # fn may have just been marked as normal and it may have
1536 # changed in the same second without changing its size.
1535 # changed in the same second without changing its size.
1537 # This can happen if we quickly do multiple commits.
1536 # This can happen if we quickly do multiple commits.
1538 # Force lookup, so we don't miss such a racy file change.
1537 # Force lookup, so we don't miss such a racy file change.
1539 ladd(fn)
1538 ladd(fn)
1540 elif listclean:
1539 elif listclean:
1541 cadd(fn)
1540 cadd(fn)
1542 status = scmutil.status(
1541 status = scmutil.status(
1543 modified, added, removed, deleted, unknown, ignored, clean
1542 modified, added, removed, deleted, unknown, ignored, clean
1544 )
1543 )
1545 return (lookup, status)
1544 return (lookup, status)
1546
1545
1547 def matches(self, match):
1546 def matches(self, match):
1548 """
1547 """
1549 return files in the dirstate (in whatever state) filtered by match
1548 return files in the dirstate (in whatever state) filtered by match
1550 """
1549 """
1551 dmap = self._map
1550 dmap = self._map
1552 if rustmod is not None:
1551 if rustmod is not None:
1553 dmap = self._map._rustmap
1552 dmap = self._map._rustmap
1554
1553
1555 if match.always():
1554 if match.always():
1556 return dmap.keys()
1555 return dmap.keys()
1557 files = match.files()
1556 files = match.files()
1558 if match.isexact():
1557 if match.isexact():
1559 # fast path -- filter the other way around, since typically files is
1558 # fast path -- filter the other way around, since typically files is
1560 # much smaller than dmap
1559 # much smaller than dmap
1561 return [f for f in files if f in dmap]
1560 return [f for f in files if f in dmap]
1562 if match.prefix() and all(fn in dmap for fn in files):
1561 if match.prefix() and all(fn in dmap for fn in files):
1563 # fast path -- all the values are known to be files, so just return
1562 # fast path -- all the values are known to be files, so just return
1564 # that
1563 # that
1565 return list(files)
1564 return list(files)
1566 return [f for f in dmap if match(f)]
1565 return [f for f in dmap if match(f)]
1567
1566
1568 def _actualfilename(self, tr):
1567 def _actualfilename(self, tr):
1569 if tr:
1568 if tr:
1570 return self._pendingfilename
1569 return self._pendingfilename
1571 else:
1570 else:
1572 return self._filename
1571 return self._filename
1573
1572
1574 def savebackup(self, tr, backupname):
1573 def savebackup(self, tr, backupname):
1575 '''Save current dirstate into backup file'''
1574 '''Save current dirstate into backup file'''
1576 filename = self._actualfilename(tr)
1575 filename = self._actualfilename(tr)
1577 assert backupname != filename
1576 assert backupname != filename
1578
1577
1579 # use '_writedirstate' instead of 'write' to write changes certainly,
1578 # use '_writedirstate' instead of 'write' to write changes certainly,
1580 # because the latter omits writing out if transaction is running.
1579 # because the latter omits writing out if transaction is running.
1581 # output file will be used to create backup of dirstate at this point.
1580 # output file will be used to create backup of dirstate at this point.
1582 if self._dirty or not self._opener.exists(filename):
1581 if self._dirty or not self._opener.exists(filename):
1583 self._writedirstate(
1582 self._writedirstate(
1584 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1583 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1585 )
1584 )
1586
1585
1587 if tr:
1586 if tr:
1588 # ensure that subsequent tr.writepending returns True for
1587 # ensure that subsequent tr.writepending returns True for
1589 # changes written out above, even if dirstate is never
1588 # changes written out above, even if dirstate is never
1590 # changed after this
1589 # changed after this
1591 tr.addfilegenerator(
1590 tr.addfilegenerator(
1592 b'dirstate',
1591 b'dirstate',
1593 (self._filename,),
1592 (self._filename,),
1594 self._writedirstate,
1593 self._writedirstate,
1595 location=b'plain',
1594 location=b'plain',
1596 )
1595 )
1597
1596
1598 # ensure that pending file written above is unlinked at
1597 # ensure that pending file written above is unlinked at
1599 # failure, even if tr.writepending isn't invoked until the
1598 # failure, even if tr.writepending isn't invoked until the
1600 # end of this transaction
1599 # end of this transaction
1601 tr.registertmp(filename, location=b'plain')
1600 tr.registertmp(filename, location=b'plain')
1602
1601
1603 self._opener.tryunlink(backupname)
1602 self._opener.tryunlink(backupname)
1604 # hardlink backup is okay because _writedirstate is always called
1603 # hardlink backup is okay because _writedirstate is always called
1605 # with an "atomictemp=True" file.
1604 # with an "atomictemp=True" file.
1606 util.copyfile(
1605 util.copyfile(
1607 self._opener.join(filename),
1606 self._opener.join(filename),
1608 self._opener.join(backupname),
1607 self._opener.join(backupname),
1609 hardlink=True,
1608 hardlink=True,
1610 )
1609 )
1611
1610
1612 def restorebackup(self, tr, backupname):
1611 def restorebackup(self, tr, backupname):
1613 '''Restore dirstate by backup file'''
1612 '''Restore dirstate by backup file'''
1614 # this "invalidate()" prevents "wlock.release()" from writing
1613 # this "invalidate()" prevents "wlock.release()" from writing
1615 # changes of dirstate out after restoring from backup file
1614 # changes of dirstate out after restoring from backup file
1616 self.invalidate()
1615 self.invalidate()
1617 filename = self._actualfilename(tr)
1616 filename = self._actualfilename(tr)
1618 o = self._opener
1617 o = self._opener
1619 if util.samefile(o.join(backupname), o.join(filename)):
1618 if util.samefile(o.join(backupname), o.join(filename)):
1620 o.unlink(backupname)
1619 o.unlink(backupname)
1621 else:
1620 else:
1622 o.rename(backupname, filename, checkambig=True)
1621 o.rename(backupname, filename, checkambig=True)
1623
1622
1624 def clearbackup(self, tr, backupname):
1623 def clearbackup(self, tr, backupname):
1625 '''Clear backup file'''
1624 '''Clear backup file'''
1626 self._opener.unlink(backupname)
1625 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now