##// END OF EJS Templates
dirstate: deprecated `remove` outside of `update/merge`...
marmoute -
r48462:1a79bb8c default
parent child Browse files
Show More
@@ -1,1616 +1,1623 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self.normallookup(f)
406 self.normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self.normallookup(filename)
479 self.normallookup(filename)
480 return True
480 return True
481 return False
481 return False
482
482
483 @requires_no_parents_change
483 @requires_no_parents_change
484 def set_untracked(self, filename):
484 def set_untracked(self, filename):
485 """a "public" method for generic code to mark a file as untracked
485 """a "public" method for generic code to mark a file as untracked
486
486
487 This function is to be called outside of "update/merge" case. For
487 This function is to be called outside of "update/merge" case. For
488 example by a command like `hg remove X`.
488 example by a command like `hg remove X`.
489
489
490 return True the file was previously tracked, False otherwise.
490 return True the file was previously tracked, False otherwise.
491 """
491 """
492 entry = self._map.get(filename)
492 entry = self._map.get(filename)
493 if entry is None:
493 if entry is None:
494 return False
494 return False
495 elif entry.added:
495 elif entry.added:
496 self._drop(filename)
496 self._drop(filename)
497 return True
497 return True
498 else:
498 else:
499 self._remove(filename)
499 self._remove(filename)
500 return True
500 return True
501
501
502 @requires_parents_change
502 @requires_parents_change
503 def update_file_reference(
503 def update_file_reference(
504 self,
504 self,
505 filename,
505 filename,
506 p1_tracked,
506 p1_tracked,
507 ):
507 ):
508 """Set a file as tracked in the parent (or not)
508 """Set a file as tracked in the parent (or not)
509
509
510 This is to be called when adjust the dirstate to a new parent after an history
510 This is to be called when adjust the dirstate to a new parent after an history
511 rewriting operation.
511 rewriting operation.
512
512
513 It should not be called during a merge (p2 != nullid) and only within
513 It should not be called during a merge (p2 != nullid) and only within
514 a `with dirstate.parentchange():` context.
514 a `with dirstate.parentchange():` context.
515 """
515 """
516 if self.in_merge:
516 if self.in_merge:
517 msg = b'update_file_reference should not be called when merging'
517 msg = b'update_file_reference should not be called when merging'
518 raise error.ProgrammingError(msg)
518 raise error.ProgrammingError(msg)
519 entry = self._map.get(filename)
519 entry = self._map.get(filename)
520 if entry is None:
520 if entry is None:
521 wc_tracked = False
521 wc_tracked = False
522 else:
522 else:
523 wc_tracked = entry.tracked
523 wc_tracked = entry.tracked
524 if p1_tracked and wc_tracked:
524 if p1_tracked and wc_tracked:
525 # the underlying reference might have changed, we will have to
525 # the underlying reference might have changed, we will have to
526 # check it.
526 # check it.
527 self.normallookup(filename)
527 self.normallookup(filename)
528 elif not (p1_tracked or wc_tracked):
528 elif not (p1_tracked or wc_tracked):
529 # the file is no longer relevant to anyone
529 # the file is no longer relevant to anyone
530 self._drop(filename)
530 self._drop(filename)
531 elif (not p1_tracked) and wc_tracked:
531 elif (not p1_tracked) and wc_tracked:
532 if not entry.added:
532 if not entry.added:
533 self._add(filename)
533 self._add(filename)
534 elif p1_tracked and not wc_tracked:
534 elif p1_tracked and not wc_tracked:
535 if entry is None or not entry.removed:
535 if entry is None or not entry.removed:
536 self._remove(filename)
536 self._remove(filename)
537 else:
537 else:
538 assert False, 'unreachable'
538 assert False, 'unreachable'
539
539
540 @requires_parents_change
540 @requires_parents_change
541 def update_file(
541 def update_file(
542 self,
542 self,
543 filename,
543 filename,
544 wc_tracked,
544 wc_tracked,
545 p1_tracked,
545 p1_tracked,
546 p2_tracked=False,
546 p2_tracked=False,
547 merged=False,
547 merged=False,
548 clean_p1=False,
548 clean_p1=False,
549 clean_p2=False,
549 clean_p2=False,
550 possibly_dirty=False,
550 possibly_dirty=False,
551 ):
551 ):
552 """update the information about a file in the dirstate
552 """update the information about a file in the dirstate
553
553
554 This is to be called when the direstates parent changes to keep track
554 This is to be called when the direstates parent changes to keep track
555 of what is the file situation in regards to the working copy and its parent.
555 of what is the file situation in regards to the working copy and its parent.
556
556
557 This function must be called within a `dirstate.parentchange` context.
557 This function must be called within a `dirstate.parentchange` context.
558
558
559 note: the API is at an early stage and we might need to ajust it
559 note: the API is at an early stage and we might need to ajust it
560 depending of what information ends up being relevant and useful to
560 depending of what information ends up being relevant and useful to
561 other processing.
561 other processing.
562 """
562 """
563 if not self.pendingparentchange():
563 if not self.pendingparentchange():
564 msg = b'calling `update_file` outside of a parentchange context'
564 msg = b'calling `update_file` outside of a parentchange context'
565 raise error.ProgrammingError(msg)
565 raise error.ProgrammingError(msg)
566 if merged and (clean_p1 or clean_p2):
566 if merged and (clean_p1 or clean_p2):
567 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
567 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
568 raise error.ProgrammingError(msg)
568 raise error.ProgrammingError(msg)
569 assert not (merged and (clean_p1 or clean_p1))
569 assert not (merged and (clean_p1 or clean_p1))
570 if not (p1_tracked or p2_tracked or wc_tracked):
570 if not (p1_tracked or p2_tracked or wc_tracked):
571 self._drop(filename)
571 self._drop(filename)
572 elif merged:
572 elif merged:
573 assert wc_tracked
573 assert wc_tracked
574 if not self.in_merge:
574 if not self.in_merge:
575 self.normallookup(filename)
575 self.normallookup(filename)
576 self.otherparent(filename)
576 self.otherparent(filename)
577 elif not (p1_tracked or p2_tracked) and wc_tracked:
577 elif not (p1_tracked or p2_tracked) and wc_tracked:
578 self._addpath(filename, added=True, possibly_dirty=possibly_dirty)
578 self._addpath(filename, added=True, possibly_dirty=possibly_dirty)
579 self._map.copymap.pop(filename, None)
579 self._map.copymap.pop(filename, None)
580 elif (p1_tracked or p2_tracked) and not wc_tracked:
580 elif (p1_tracked or p2_tracked) and not wc_tracked:
581 self._remove(filename)
581 self._remove(filename)
582 elif clean_p2 and wc_tracked:
582 elif clean_p2 and wc_tracked:
583 assert p2_tracked
583 assert p2_tracked
584 self.otherparent(filename)
584 self.otherparent(filename)
585 elif not p1_tracked and p2_tracked and wc_tracked:
585 elif not p1_tracked and p2_tracked and wc_tracked:
586 self._addpath(filename, from_p2=True, possibly_dirty=possibly_dirty)
586 self._addpath(filename, from_p2=True, possibly_dirty=possibly_dirty)
587 self._map.copymap.pop(filename, None)
587 self._map.copymap.pop(filename, None)
588 elif possibly_dirty:
588 elif possibly_dirty:
589 self._addpath(filename, possibly_dirty=possibly_dirty)
589 self._addpath(filename, possibly_dirty=possibly_dirty)
590 elif wc_tracked:
590 elif wc_tracked:
591 self.normal(filename)
591 self.normal(filename)
592 # XXX We need something for file that are dirty after an update
592 # XXX We need something for file that are dirty after an update
593 else:
593 else:
594 assert False, 'unreachable'
594 assert False, 'unreachable'
595
595
596 @requires_parents_change
596 @requires_parents_change
597 def update_parent_file_data(self, f, filedata):
597 def update_parent_file_data(self, f, filedata):
598 """update the information about the content of a file
598 """update the information about the content of a file
599
599
600 This function should be called within a `dirstate.parentchange` context.
600 This function should be called within a `dirstate.parentchange` context.
601 """
601 """
602 self.normal(f, parentfiledata=filedata)
602 self.normal(f, parentfiledata=filedata)
603
603
604 def _addpath(
604 def _addpath(
605 self,
605 self,
606 f,
606 f,
607 mode=0,
607 mode=0,
608 size=None,
608 size=None,
609 mtime=None,
609 mtime=None,
610 added=False,
610 added=False,
611 merged=False,
611 merged=False,
612 from_p2=False,
612 from_p2=False,
613 possibly_dirty=False,
613 possibly_dirty=False,
614 ):
614 ):
615 entry = self._map.get(f)
615 entry = self._map.get(f)
616 if added or entry is not None and entry.removed:
616 if added or entry is not None and entry.removed:
617 scmutil.checkfilename(f)
617 scmutil.checkfilename(f)
618 if self._map.hastrackeddir(f):
618 if self._map.hastrackeddir(f):
619 msg = _(b'directory %r already in dirstate')
619 msg = _(b'directory %r already in dirstate')
620 msg %= pycompat.bytestr(f)
620 msg %= pycompat.bytestr(f)
621 raise error.Abort(msg)
621 raise error.Abort(msg)
622 # shadows
622 # shadows
623 for d in pathutil.finddirs(f):
623 for d in pathutil.finddirs(f):
624 if self._map.hastrackeddir(d):
624 if self._map.hastrackeddir(d):
625 break
625 break
626 entry = self._map.get(d)
626 entry = self._map.get(d)
627 if entry is not None and not entry.removed:
627 if entry is not None and not entry.removed:
628 msg = _(b'file %r in dirstate clashes with %r')
628 msg = _(b'file %r in dirstate clashes with %r')
629 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
629 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
630 raise error.Abort(msg)
630 raise error.Abort(msg)
631 self._dirty = True
631 self._dirty = True
632 self._updatedfiles.add(f)
632 self._updatedfiles.add(f)
633 self._map.addfile(
633 self._map.addfile(
634 f,
634 f,
635 mode=mode,
635 mode=mode,
636 size=size,
636 size=size,
637 mtime=mtime,
637 mtime=mtime,
638 added=added,
638 added=added,
639 merged=merged,
639 merged=merged,
640 from_p2=from_p2,
640 from_p2=from_p2,
641 possibly_dirty=possibly_dirty,
641 possibly_dirty=possibly_dirty,
642 )
642 )
643
643
644 def normal(self, f, parentfiledata=None):
644 def normal(self, f, parentfiledata=None):
645 """Mark a file normal and clean.
645 """Mark a file normal and clean.
646
646
647 parentfiledata: (mode, size, mtime) of the clean file
647 parentfiledata: (mode, size, mtime) of the clean file
648
648
649 parentfiledata should be computed from memory (for mode,
649 parentfiledata should be computed from memory (for mode,
650 size), as or close as possible from the point where we
650 size), as or close as possible from the point where we
651 determined the file was clean, to limit the risk of the
651 determined the file was clean, to limit the risk of the
652 file having been changed by an external process between the
652 file having been changed by an external process between the
653 moment where the file was determined to be clean and now."""
653 moment where the file was determined to be clean and now."""
654 if parentfiledata:
654 if parentfiledata:
655 (mode, size, mtime) = parentfiledata
655 (mode, size, mtime) = parentfiledata
656 else:
656 else:
657 s = os.lstat(self._join(f))
657 s = os.lstat(self._join(f))
658 mode = s.st_mode
658 mode = s.st_mode
659 size = s.st_size
659 size = s.st_size
660 mtime = s[stat.ST_MTIME]
660 mtime = s[stat.ST_MTIME]
661 self._addpath(f, mode=mode, size=size, mtime=mtime)
661 self._addpath(f, mode=mode, size=size, mtime=mtime)
662 self._map.copymap.pop(f, None)
662 self._map.copymap.pop(f, None)
663 if f in self._map.nonnormalset:
663 if f in self._map.nonnormalset:
664 self._map.nonnormalset.remove(f)
664 self._map.nonnormalset.remove(f)
665 if mtime > self._lastnormaltime:
665 if mtime > self._lastnormaltime:
666 # Remember the most recent modification timeslot for status(),
666 # Remember the most recent modification timeslot for status(),
667 # to make sure we won't miss future size-preserving file content
667 # to make sure we won't miss future size-preserving file content
668 # modifications that happen within the same timeslot.
668 # modifications that happen within the same timeslot.
669 self._lastnormaltime = mtime
669 self._lastnormaltime = mtime
670
670
671 def normallookup(self, f):
671 def normallookup(self, f):
672 '''Mark a file normal, but possibly dirty.'''
672 '''Mark a file normal, but possibly dirty.'''
673 if self.in_merge:
673 if self.in_merge:
674 # if there is a merge going on and the file was either
674 # if there is a merge going on and the file was either
675 # "merged" or coming from other parent (-2) before
675 # "merged" or coming from other parent (-2) before
676 # being removed, restore that state.
676 # being removed, restore that state.
677 entry = self._map.get(f)
677 entry = self._map.get(f)
678 if entry is not None:
678 if entry is not None:
679 # XXX this should probably be dealt with a a lower level
679 # XXX this should probably be dealt with a a lower level
680 # (see `merged_removed` and `from_p2_removed`)
680 # (see `merged_removed` and `from_p2_removed`)
681 if entry.merged_removed or entry.from_p2_removed:
681 if entry.merged_removed or entry.from_p2_removed:
682 source = self._map.copymap.get(f)
682 source = self._map.copymap.get(f)
683 if entry.merged_removed:
683 if entry.merged_removed:
684 self.merge(f)
684 self.merge(f)
685 elif entry.from_p2_removed:
685 elif entry.from_p2_removed:
686 self.otherparent(f)
686 self.otherparent(f)
687 if source is not None:
687 if source is not None:
688 self.copy(source, f)
688 self.copy(source, f)
689 return
689 return
690 elif entry.merged or entry.from_p2:
690 elif entry.merged or entry.from_p2:
691 return
691 return
692 self._addpath(f, possibly_dirty=True)
692 self._addpath(f, possibly_dirty=True)
693 self._map.copymap.pop(f, None)
693 self._map.copymap.pop(f, None)
694
694
695 def otherparent(self, f):
695 def otherparent(self, f):
696 '''Mark as coming from the other parent, always dirty.'''
696 '''Mark as coming from the other parent, always dirty.'''
697 if not self.in_merge:
697 if not self.in_merge:
698 msg = _(b"setting %r to other parent only allowed in merges") % f
698 msg = _(b"setting %r to other parent only allowed in merges") % f
699 raise error.Abort(msg)
699 raise error.Abort(msg)
700 entry = self._map.get(f)
700 entry = self._map.get(f)
701 if entry is not None and entry.tracked:
701 if entry is not None and entry.tracked:
702 # merge-like
702 # merge-like
703 self._addpath(f, merged=True)
703 self._addpath(f, merged=True)
704 else:
704 else:
705 # add-like
705 # add-like
706 self._addpath(f, from_p2=True)
706 self._addpath(f, from_p2=True)
707 self._map.copymap.pop(f, None)
707 self._map.copymap.pop(f, None)
708
708
709 def add(self, f):
709 def add(self, f):
710 '''Mark a file added.'''
710 '''Mark a file added.'''
711 if not self.pendingparentchange():
711 if not self.pendingparentchange():
712 util.nouideprecwarn(
712 util.nouideprecwarn(
713 b"do not use `add` outside of update/merge context."
713 b"do not use `add` outside of update/merge context."
714 b" Use `set_tracked`",
714 b" Use `set_tracked`",
715 b'6.0',
715 b'6.0',
716 stacklevel=2,
716 stacklevel=2,
717 )
717 )
718 self._add(f)
718 self._add(f)
719
719
720 def _add(self, filename):
720 def _add(self, filename):
721 """internal function to mark a file as added"""
721 """internal function to mark a file as added"""
722 self._addpath(filename, added=True)
722 self._addpath(filename, added=True)
723 self._map.copymap.pop(filename, None)
723 self._map.copymap.pop(filename, None)
724
724
725 def remove(self, f):
725 def remove(self, f):
726 '''Mark a file removed'''
726 '''Mark a file removed'''
727 if not self.pendingparentchange():
728 util.nouideprecwarn(
729 b"do not use `remove` outside of update/merge context."
730 b" Use `set_untracked`",
731 b'6.0',
732 stacklevel=2,
733 )
727 self._remove(f)
734 self._remove(f)
728
735
729 def _remove(self, filename):
736 def _remove(self, filename):
730 """internal function to mark a file removed"""
737 """internal function to mark a file removed"""
731 self._dirty = True
738 self._dirty = True
732 self._updatedfiles.add(filename)
739 self._updatedfiles.add(filename)
733 self._map.removefile(filename, in_merge=self.in_merge)
740 self._map.removefile(filename, in_merge=self.in_merge)
734
741
735 def merge(self, f):
742 def merge(self, f):
736 '''Mark a file merged.'''
743 '''Mark a file merged.'''
737 if not self.in_merge:
744 if not self.in_merge:
738 return self.normallookup(f)
745 return self.normallookup(f)
739 return self.otherparent(f)
746 return self.otherparent(f)
740
747
741 def drop(self, f):
748 def drop(self, f):
742 '''Drop a file from the dirstate'''
749 '''Drop a file from the dirstate'''
743 self._drop(f)
750 self._drop(f)
744
751
745 def _drop(self, filename):
752 def _drop(self, filename):
746 """internal function to drop a file from the dirstate"""
753 """internal function to drop a file from the dirstate"""
747 if self._map.dropfile(filename):
754 if self._map.dropfile(filename):
748 self._dirty = True
755 self._dirty = True
749 self._updatedfiles.add(filename)
756 self._updatedfiles.add(filename)
750 self._map.copymap.pop(filename, None)
757 self._map.copymap.pop(filename, None)
751
758
752 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
759 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
753 if exists is None:
760 if exists is None:
754 exists = os.path.lexists(os.path.join(self._root, path))
761 exists = os.path.lexists(os.path.join(self._root, path))
755 if not exists:
762 if not exists:
756 # Maybe a path component exists
763 # Maybe a path component exists
757 if not ignoremissing and b'/' in path:
764 if not ignoremissing and b'/' in path:
758 d, f = path.rsplit(b'/', 1)
765 d, f = path.rsplit(b'/', 1)
759 d = self._normalize(d, False, ignoremissing, None)
766 d = self._normalize(d, False, ignoremissing, None)
760 folded = d + b"/" + f
767 folded = d + b"/" + f
761 else:
768 else:
762 # No path components, preserve original case
769 # No path components, preserve original case
763 folded = path
770 folded = path
764 else:
771 else:
765 # recursively normalize leading directory components
772 # recursively normalize leading directory components
766 # against dirstate
773 # against dirstate
767 if b'/' in normed:
774 if b'/' in normed:
768 d, f = normed.rsplit(b'/', 1)
775 d, f = normed.rsplit(b'/', 1)
769 d = self._normalize(d, False, ignoremissing, True)
776 d = self._normalize(d, False, ignoremissing, True)
770 r = self._root + b"/" + d
777 r = self._root + b"/" + d
771 folded = d + b"/" + util.fspath(f, r)
778 folded = d + b"/" + util.fspath(f, r)
772 else:
779 else:
773 folded = util.fspath(normed, self._root)
780 folded = util.fspath(normed, self._root)
774 storemap[normed] = folded
781 storemap[normed] = folded
775
782
776 return folded
783 return folded
777
784
778 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
785 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
779 normed = util.normcase(path)
786 normed = util.normcase(path)
780 folded = self._map.filefoldmap.get(normed, None)
787 folded = self._map.filefoldmap.get(normed, None)
781 if folded is None:
788 if folded is None:
782 if isknown:
789 if isknown:
783 folded = path
790 folded = path
784 else:
791 else:
785 folded = self._discoverpath(
792 folded = self._discoverpath(
786 path, normed, ignoremissing, exists, self._map.filefoldmap
793 path, normed, ignoremissing, exists, self._map.filefoldmap
787 )
794 )
788 return folded
795 return folded
789
796
790 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
797 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
791 normed = util.normcase(path)
798 normed = util.normcase(path)
792 folded = self._map.filefoldmap.get(normed, None)
799 folded = self._map.filefoldmap.get(normed, None)
793 if folded is None:
800 if folded is None:
794 folded = self._map.dirfoldmap.get(normed, None)
801 folded = self._map.dirfoldmap.get(normed, None)
795 if folded is None:
802 if folded is None:
796 if isknown:
803 if isknown:
797 folded = path
804 folded = path
798 else:
805 else:
799 # store discovered result in dirfoldmap so that future
806 # store discovered result in dirfoldmap so that future
800 # normalizefile calls don't start matching directories
807 # normalizefile calls don't start matching directories
801 folded = self._discoverpath(
808 folded = self._discoverpath(
802 path, normed, ignoremissing, exists, self._map.dirfoldmap
809 path, normed, ignoremissing, exists, self._map.dirfoldmap
803 )
810 )
804 return folded
811 return folded
805
812
806 def normalize(self, path, isknown=False, ignoremissing=False):
813 def normalize(self, path, isknown=False, ignoremissing=False):
807 """
814 """
808 normalize the case of a pathname when on a casefolding filesystem
815 normalize the case of a pathname when on a casefolding filesystem
809
816
810 isknown specifies whether the filename came from walking the
817 isknown specifies whether the filename came from walking the
811 disk, to avoid extra filesystem access.
818 disk, to avoid extra filesystem access.
812
819
813 If ignoremissing is True, missing path are returned
820 If ignoremissing is True, missing path are returned
814 unchanged. Otherwise, we try harder to normalize possibly
821 unchanged. Otherwise, we try harder to normalize possibly
815 existing path components.
822 existing path components.
816
823
817 The normalized case is determined based on the following precedence:
824 The normalized case is determined based on the following precedence:
818
825
819 - version of name already stored in the dirstate
826 - version of name already stored in the dirstate
820 - version of name stored on disk
827 - version of name stored on disk
821 - version provided via command arguments
828 - version provided via command arguments
822 """
829 """
823
830
824 if self._checkcase:
831 if self._checkcase:
825 return self._normalize(path, isknown, ignoremissing)
832 return self._normalize(path, isknown, ignoremissing)
826 return path
833 return path
827
834
828 def clear(self):
835 def clear(self):
829 self._map.clear()
836 self._map.clear()
830 self._lastnormaltime = 0
837 self._lastnormaltime = 0
831 self._updatedfiles.clear()
838 self._updatedfiles.clear()
832 self._dirty = True
839 self._dirty = True
833
840
834 def rebuild(self, parent, allfiles, changedfiles=None):
841 def rebuild(self, parent, allfiles, changedfiles=None):
835 if changedfiles is None:
842 if changedfiles is None:
836 # Rebuild entire dirstate
843 # Rebuild entire dirstate
837 to_lookup = allfiles
844 to_lookup = allfiles
838 to_drop = []
845 to_drop = []
839 lastnormaltime = self._lastnormaltime
846 lastnormaltime = self._lastnormaltime
840 self.clear()
847 self.clear()
841 self._lastnormaltime = lastnormaltime
848 self._lastnormaltime = lastnormaltime
842 elif len(changedfiles) < 10:
849 elif len(changedfiles) < 10:
843 # Avoid turning allfiles into a set, which can be expensive if it's
850 # Avoid turning allfiles into a set, which can be expensive if it's
844 # large.
851 # large.
845 to_lookup = []
852 to_lookup = []
846 to_drop = []
853 to_drop = []
847 for f in changedfiles:
854 for f in changedfiles:
848 if f in allfiles:
855 if f in allfiles:
849 to_lookup.append(f)
856 to_lookup.append(f)
850 else:
857 else:
851 to_drop.append(f)
858 to_drop.append(f)
852 else:
859 else:
853 changedfilesset = set(changedfiles)
860 changedfilesset = set(changedfiles)
854 to_lookup = changedfilesset & set(allfiles)
861 to_lookup = changedfilesset & set(allfiles)
855 to_drop = changedfilesset - to_lookup
862 to_drop = changedfilesset - to_lookup
856
863
857 if self._origpl is None:
864 if self._origpl is None:
858 self._origpl = self._pl
865 self._origpl = self._pl
859 self._map.setparents(parent, self._nodeconstants.nullid)
866 self._map.setparents(parent, self._nodeconstants.nullid)
860
867
861 for f in to_lookup:
868 for f in to_lookup:
862 self.normallookup(f)
869 self.normallookup(f)
863 for f in to_drop:
870 for f in to_drop:
864 self._drop(f)
871 self._drop(f)
865
872
866 self._dirty = True
873 self._dirty = True
867
874
868 def identity(self):
875 def identity(self):
869 """Return identity of dirstate itself to detect changing in storage
876 """Return identity of dirstate itself to detect changing in storage
870
877
871 If identity of previous dirstate is equal to this, writing
878 If identity of previous dirstate is equal to this, writing
872 changes based on the former dirstate out can keep consistency.
879 changes based on the former dirstate out can keep consistency.
873 """
880 """
874 return self._map.identity
881 return self._map.identity
875
882
876 def write(self, tr):
883 def write(self, tr):
877 if not self._dirty:
884 if not self._dirty:
878 return
885 return
879
886
880 filename = self._filename
887 filename = self._filename
881 if tr:
888 if tr:
882 # 'dirstate.write()' is not only for writing in-memory
889 # 'dirstate.write()' is not only for writing in-memory
883 # changes out, but also for dropping ambiguous timestamp.
890 # changes out, but also for dropping ambiguous timestamp.
884 # delayed writing re-raise "ambiguous timestamp issue".
891 # delayed writing re-raise "ambiguous timestamp issue".
885 # See also the wiki page below for detail:
892 # See also the wiki page below for detail:
886 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
893 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
887
894
888 # emulate dropping timestamp in 'parsers.pack_dirstate'
895 # emulate dropping timestamp in 'parsers.pack_dirstate'
889 now = _getfsnow(self._opener)
896 now = _getfsnow(self._opener)
890 self._map.clearambiguoustimes(self._updatedfiles, now)
897 self._map.clearambiguoustimes(self._updatedfiles, now)
891
898
892 # emulate that all 'dirstate.normal' results are written out
899 # emulate that all 'dirstate.normal' results are written out
893 self._lastnormaltime = 0
900 self._lastnormaltime = 0
894 self._updatedfiles.clear()
901 self._updatedfiles.clear()
895
902
896 # delay writing in-memory changes out
903 # delay writing in-memory changes out
897 tr.addfilegenerator(
904 tr.addfilegenerator(
898 b'dirstate',
905 b'dirstate',
899 (self._filename,),
906 (self._filename,),
900 self._writedirstate,
907 self._writedirstate,
901 location=b'plain',
908 location=b'plain',
902 )
909 )
903 return
910 return
904
911
905 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
912 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
906 self._writedirstate(st)
913 self._writedirstate(st)
907
914
908 def addparentchangecallback(self, category, callback):
915 def addparentchangecallback(self, category, callback):
909 """add a callback to be called when the wd parents are changed
916 """add a callback to be called when the wd parents are changed
910
917
911 Callback will be called with the following arguments:
918 Callback will be called with the following arguments:
912 dirstate, (oldp1, oldp2), (newp1, newp2)
919 dirstate, (oldp1, oldp2), (newp1, newp2)
913
920
914 Category is a unique identifier to allow overwriting an old callback
921 Category is a unique identifier to allow overwriting an old callback
915 with a newer callback.
922 with a newer callback.
916 """
923 """
917 self._plchangecallbacks[category] = callback
924 self._plchangecallbacks[category] = callback
918
925
919 def _writedirstate(self, st):
926 def _writedirstate(self, st):
920 # notify callbacks about parents change
927 # notify callbacks about parents change
921 if self._origpl is not None and self._origpl != self._pl:
928 if self._origpl is not None and self._origpl != self._pl:
922 for c, callback in sorted(
929 for c, callback in sorted(
923 pycompat.iteritems(self._plchangecallbacks)
930 pycompat.iteritems(self._plchangecallbacks)
924 ):
931 ):
925 callback(self, self._origpl, self._pl)
932 callback(self, self._origpl, self._pl)
926 self._origpl = None
933 self._origpl = None
927 # use the modification time of the newly created temporary file as the
934 # use the modification time of the newly created temporary file as the
928 # filesystem's notion of 'now'
935 # filesystem's notion of 'now'
929 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
936 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
930
937
931 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
938 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
932 # timestamp of each entries in dirstate, because of 'now > mtime'
939 # timestamp of each entries in dirstate, because of 'now > mtime'
933 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
940 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
934 if delaywrite > 0:
941 if delaywrite > 0:
935 # do we have any files to delay for?
942 # do we have any files to delay for?
936 for f, e in pycompat.iteritems(self._map):
943 for f, e in pycompat.iteritems(self._map):
937 if e.need_delay(now):
944 if e.need_delay(now):
938 import time # to avoid useless import
945 import time # to avoid useless import
939
946
940 # rather than sleep n seconds, sleep until the next
947 # rather than sleep n seconds, sleep until the next
941 # multiple of n seconds
948 # multiple of n seconds
942 clock = time.time()
949 clock = time.time()
943 start = int(clock) - (int(clock) % delaywrite)
950 start = int(clock) - (int(clock) % delaywrite)
944 end = start + delaywrite
951 end = start + delaywrite
945 time.sleep(end - clock)
952 time.sleep(end - clock)
946 now = end # trust our estimate that the end is near now
953 now = end # trust our estimate that the end is near now
947 break
954 break
948
955
949 self._map.write(st, now)
956 self._map.write(st, now)
950 self._lastnormaltime = 0
957 self._lastnormaltime = 0
951 self._dirty = False
958 self._dirty = False
952
959
953 def _dirignore(self, f):
960 def _dirignore(self, f):
954 if self._ignore(f):
961 if self._ignore(f):
955 return True
962 return True
956 for p in pathutil.finddirs(f):
963 for p in pathutil.finddirs(f):
957 if self._ignore(p):
964 if self._ignore(p):
958 return True
965 return True
959 return False
966 return False
960
967
961 def _ignorefiles(self):
968 def _ignorefiles(self):
962 files = []
969 files = []
963 if os.path.exists(self._join(b'.hgignore')):
970 if os.path.exists(self._join(b'.hgignore')):
964 files.append(self._join(b'.hgignore'))
971 files.append(self._join(b'.hgignore'))
965 for name, path in self._ui.configitems(b"ui"):
972 for name, path in self._ui.configitems(b"ui"):
966 if name == b'ignore' or name.startswith(b'ignore.'):
973 if name == b'ignore' or name.startswith(b'ignore.'):
967 # we need to use os.path.join here rather than self._join
974 # we need to use os.path.join here rather than self._join
968 # because path is arbitrary and user-specified
975 # because path is arbitrary and user-specified
969 files.append(os.path.join(self._rootdir, util.expandpath(path)))
976 files.append(os.path.join(self._rootdir, util.expandpath(path)))
970 return files
977 return files
971
978
972 def _ignorefileandline(self, f):
979 def _ignorefileandline(self, f):
973 files = collections.deque(self._ignorefiles())
980 files = collections.deque(self._ignorefiles())
974 visited = set()
981 visited = set()
975 while files:
982 while files:
976 i = files.popleft()
983 i = files.popleft()
977 patterns = matchmod.readpatternfile(
984 patterns = matchmod.readpatternfile(
978 i, self._ui.warn, sourceinfo=True
985 i, self._ui.warn, sourceinfo=True
979 )
986 )
980 for pattern, lineno, line in patterns:
987 for pattern, lineno, line in patterns:
981 kind, p = matchmod._patsplit(pattern, b'glob')
988 kind, p = matchmod._patsplit(pattern, b'glob')
982 if kind == b"subinclude":
989 if kind == b"subinclude":
983 if p not in visited:
990 if p not in visited:
984 files.append(p)
991 files.append(p)
985 continue
992 continue
986 m = matchmod.match(
993 m = matchmod.match(
987 self._root, b'', [], [pattern], warn=self._ui.warn
994 self._root, b'', [], [pattern], warn=self._ui.warn
988 )
995 )
989 if m(f):
996 if m(f):
990 return (i, lineno, line)
997 return (i, lineno, line)
991 visited.add(i)
998 visited.add(i)
992 return (None, -1, b"")
999 return (None, -1, b"")
993
1000
994 def _walkexplicit(self, match, subrepos):
1001 def _walkexplicit(self, match, subrepos):
995 """Get stat data about the files explicitly specified by match.
1002 """Get stat data about the files explicitly specified by match.
996
1003
997 Return a triple (results, dirsfound, dirsnotfound).
1004 Return a triple (results, dirsfound, dirsnotfound).
998 - results is a mapping from filename to stat result. It also contains
1005 - results is a mapping from filename to stat result. It also contains
999 listings mapping subrepos and .hg to None.
1006 listings mapping subrepos and .hg to None.
1000 - dirsfound is a list of files found to be directories.
1007 - dirsfound is a list of files found to be directories.
1001 - dirsnotfound is a list of files that the dirstate thinks are
1008 - dirsnotfound is a list of files that the dirstate thinks are
1002 directories and that were not found."""
1009 directories and that were not found."""
1003
1010
1004 def badtype(mode):
1011 def badtype(mode):
1005 kind = _(b'unknown')
1012 kind = _(b'unknown')
1006 if stat.S_ISCHR(mode):
1013 if stat.S_ISCHR(mode):
1007 kind = _(b'character device')
1014 kind = _(b'character device')
1008 elif stat.S_ISBLK(mode):
1015 elif stat.S_ISBLK(mode):
1009 kind = _(b'block device')
1016 kind = _(b'block device')
1010 elif stat.S_ISFIFO(mode):
1017 elif stat.S_ISFIFO(mode):
1011 kind = _(b'fifo')
1018 kind = _(b'fifo')
1012 elif stat.S_ISSOCK(mode):
1019 elif stat.S_ISSOCK(mode):
1013 kind = _(b'socket')
1020 kind = _(b'socket')
1014 elif stat.S_ISDIR(mode):
1021 elif stat.S_ISDIR(mode):
1015 kind = _(b'directory')
1022 kind = _(b'directory')
1016 return _(b'unsupported file type (type is %s)') % kind
1023 return _(b'unsupported file type (type is %s)') % kind
1017
1024
1018 badfn = match.bad
1025 badfn = match.bad
1019 dmap = self._map
1026 dmap = self._map
1020 lstat = os.lstat
1027 lstat = os.lstat
1021 getkind = stat.S_IFMT
1028 getkind = stat.S_IFMT
1022 dirkind = stat.S_IFDIR
1029 dirkind = stat.S_IFDIR
1023 regkind = stat.S_IFREG
1030 regkind = stat.S_IFREG
1024 lnkkind = stat.S_IFLNK
1031 lnkkind = stat.S_IFLNK
1025 join = self._join
1032 join = self._join
1026 dirsfound = []
1033 dirsfound = []
1027 foundadd = dirsfound.append
1034 foundadd = dirsfound.append
1028 dirsnotfound = []
1035 dirsnotfound = []
1029 notfoundadd = dirsnotfound.append
1036 notfoundadd = dirsnotfound.append
1030
1037
1031 if not match.isexact() and self._checkcase:
1038 if not match.isexact() and self._checkcase:
1032 normalize = self._normalize
1039 normalize = self._normalize
1033 else:
1040 else:
1034 normalize = None
1041 normalize = None
1035
1042
1036 files = sorted(match.files())
1043 files = sorted(match.files())
1037 subrepos.sort()
1044 subrepos.sort()
1038 i, j = 0, 0
1045 i, j = 0, 0
1039 while i < len(files) and j < len(subrepos):
1046 while i < len(files) and j < len(subrepos):
1040 subpath = subrepos[j] + b"/"
1047 subpath = subrepos[j] + b"/"
1041 if files[i] < subpath:
1048 if files[i] < subpath:
1042 i += 1
1049 i += 1
1043 continue
1050 continue
1044 while i < len(files) and files[i].startswith(subpath):
1051 while i < len(files) and files[i].startswith(subpath):
1045 del files[i]
1052 del files[i]
1046 j += 1
1053 j += 1
1047
1054
1048 if not files or b'' in files:
1055 if not files or b'' in files:
1049 files = [b'']
1056 files = [b'']
1050 # constructing the foldmap is expensive, so don't do it for the
1057 # constructing the foldmap is expensive, so don't do it for the
1051 # common case where files is ['']
1058 # common case where files is ['']
1052 normalize = None
1059 normalize = None
1053 results = dict.fromkeys(subrepos)
1060 results = dict.fromkeys(subrepos)
1054 results[b'.hg'] = None
1061 results[b'.hg'] = None
1055
1062
1056 for ff in files:
1063 for ff in files:
1057 if normalize:
1064 if normalize:
1058 nf = normalize(ff, False, True)
1065 nf = normalize(ff, False, True)
1059 else:
1066 else:
1060 nf = ff
1067 nf = ff
1061 if nf in results:
1068 if nf in results:
1062 continue
1069 continue
1063
1070
1064 try:
1071 try:
1065 st = lstat(join(nf))
1072 st = lstat(join(nf))
1066 kind = getkind(st.st_mode)
1073 kind = getkind(st.st_mode)
1067 if kind == dirkind:
1074 if kind == dirkind:
1068 if nf in dmap:
1075 if nf in dmap:
1069 # file replaced by dir on disk but still in dirstate
1076 # file replaced by dir on disk but still in dirstate
1070 results[nf] = None
1077 results[nf] = None
1071 foundadd((nf, ff))
1078 foundadd((nf, ff))
1072 elif kind == regkind or kind == lnkkind:
1079 elif kind == regkind or kind == lnkkind:
1073 results[nf] = st
1080 results[nf] = st
1074 else:
1081 else:
1075 badfn(ff, badtype(kind))
1082 badfn(ff, badtype(kind))
1076 if nf in dmap:
1083 if nf in dmap:
1077 results[nf] = None
1084 results[nf] = None
1078 except OSError as inst: # nf not found on disk - it is dirstate only
1085 except OSError as inst: # nf not found on disk - it is dirstate only
1079 if nf in dmap: # does it exactly match a missing file?
1086 if nf in dmap: # does it exactly match a missing file?
1080 results[nf] = None
1087 results[nf] = None
1081 else: # does it match a missing directory?
1088 else: # does it match a missing directory?
1082 if self._map.hasdir(nf):
1089 if self._map.hasdir(nf):
1083 notfoundadd(nf)
1090 notfoundadd(nf)
1084 else:
1091 else:
1085 badfn(ff, encoding.strtolocal(inst.strerror))
1092 badfn(ff, encoding.strtolocal(inst.strerror))
1086
1093
1087 # match.files() may contain explicitly-specified paths that shouldn't
1094 # match.files() may contain explicitly-specified paths that shouldn't
1088 # be taken; drop them from the list of files found. dirsfound/notfound
1095 # be taken; drop them from the list of files found. dirsfound/notfound
1089 # aren't filtered here because they will be tested later.
1096 # aren't filtered here because they will be tested later.
1090 if match.anypats():
1097 if match.anypats():
1091 for f in list(results):
1098 for f in list(results):
1092 if f == b'.hg' or f in subrepos:
1099 if f == b'.hg' or f in subrepos:
1093 # keep sentinel to disable further out-of-repo walks
1100 # keep sentinel to disable further out-of-repo walks
1094 continue
1101 continue
1095 if not match(f):
1102 if not match(f):
1096 del results[f]
1103 del results[f]
1097
1104
1098 # Case insensitive filesystems cannot rely on lstat() failing to detect
1105 # Case insensitive filesystems cannot rely on lstat() failing to detect
1099 # a case-only rename. Prune the stat object for any file that does not
1106 # a case-only rename. Prune the stat object for any file that does not
1100 # match the case in the filesystem, if there are multiple files that
1107 # match the case in the filesystem, if there are multiple files that
1101 # normalize to the same path.
1108 # normalize to the same path.
1102 if match.isexact() and self._checkcase:
1109 if match.isexact() and self._checkcase:
1103 normed = {}
1110 normed = {}
1104
1111
1105 for f, st in pycompat.iteritems(results):
1112 for f, st in pycompat.iteritems(results):
1106 if st is None:
1113 if st is None:
1107 continue
1114 continue
1108
1115
1109 nc = util.normcase(f)
1116 nc = util.normcase(f)
1110 paths = normed.get(nc)
1117 paths = normed.get(nc)
1111
1118
1112 if paths is None:
1119 if paths is None:
1113 paths = set()
1120 paths = set()
1114 normed[nc] = paths
1121 normed[nc] = paths
1115
1122
1116 paths.add(f)
1123 paths.add(f)
1117
1124
1118 for norm, paths in pycompat.iteritems(normed):
1125 for norm, paths in pycompat.iteritems(normed):
1119 if len(paths) > 1:
1126 if len(paths) > 1:
1120 for path in paths:
1127 for path in paths:
1121 folded = self._discoverpath(
1128 folded = self._discoverpath(
1122 path, norm, True, None, self._map.dirfoldmap
1129 path, norm, True, None, self._map.dirfoldmap
1123 )
1130 )
1124 if path != folded:
1131 if path != folded:
1125 results[path] = None
1132 results[path] = None
1126
1133
1127 return results, dirsfound, dirsnotfound
1134 return results, dirsfound, dirsnotfound
1128
1135
1129 def walk(self, match, subrepos, unknown, ignored, full=True):
1136 def walk(self, match, subrepos, unknown, ignored, full=True):
1130 """
1137 """
1131 Walk recursively through the directory tree, finding all files
1138 Walk recursively through the directory tree, finding all files
1132 matched by match.
1139 matched by match.
1133
1140
1134 If full is False, maybe skip some known-clean files.
1141 If full is False, maybe skip some known-clean files.
1135
1142
1136 Return a dict mapping filename to stat-like object (either
1143 Return a dict mapping filename to stat-like object (either
1137 mercurial.osutil.stat instance or return value of os.stat()).
1144 mercurial.osutil.stat instance or return value of os.stat()).
1138
1145
1139 """
1146 """
1140 # full is a flag that extensions that hook into walk can use -- this
1147 # full is a flag that extensions that hook into walk can use -- this
1141 # implementation doesn't use it at all. This satisfies the contract
1148 # implementation doesn't use it at all. This satisfies the contract
1142 # because we only guarantee a "maybe".
1149 # because we only guarantee a "maybe".
1143
1150
1144 if ignored:
1151 if ignored:
1145 ignore = util.never
1152 ignore = util.never
1146 dirignore = util.never
1153 dirignore = util.never
1147 elif unknown:
1154 elif unknown:
1148 ignore = self._ignore
1155 ignore = self._ignore
1149 dirignore = self._dirignore
1156 dirignore = self._dirignore
1150 else:
1157 else:
1151 # if not unknown and not ignored, drop dir recursion and step 2
1158 # if not unknown and not ignored, drop dir recursion and step 2
1152 ignore = util.always
1159 ignore = util.always
1153 dirignore = util.always
1160 dirignore = util.always
1154
1161
1155 matchfn = match.matchfn
1162 matchfn = match.matchfn
1156 matchalways = match.always()
1163 matchalways = match.always()
1157 matchtdir = match.traversedir
1164 matchtdir = match.traversedir
1158 dmap = self._map
1165 dmap = self._map
1159 listdir = util.listdir
1166 listdir = util.listdir
1160 lstat = os.lstat
1167 lstat = os.lstat
1161 dirkind = stat.S_IFDIR
1168 dirkind = stat.S_IFDIR
1162 regkind = stat.S_IFREG
1169 regkind = stat.S_IFREG
1163 lnkkind = stat.S_IFLNK
1170 lnkkind = stat.S_IFLNK
1164 join = self._join
1171 join = self._join
1165
1172
1166 exact = skipstep3 = False
1173 exact = skipstep3 = False
1167 if match.isexact(): # match.exact
1174 if match.isexact(): # match.exact
1168 exact = True
1175 exact = True
1169 dirignore = util.always # skip step 2
1176 dirignore = util.always # skip step 2
1170 elif match.prefix(): # match.match, no patterns
1177 elif match.prefix(): # match.match, no patterns
1171 skipstep3 = True
1178 skipstep3 = True
1172
1179
1173 if not exact and self._checkcase:
1180 if not exact and self._checkcase:
1174 normalize = self._normalize
1181 normalize = self._normalize
1175 normalizefile = self._normalizefile
1182 normalizefile = self._normalizefile
1176 skipstep3 = False
1183 skipstep3 = False
1177 else:
1184 else:
1178 normalize = self._normalize
1185 normalize = self._normalize
1179 normalizefile = None
1186 normalizefile = None
1180
1187
1181 # step 1: find all explicit files
1188 # step 1: find all explicit files
1182 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1189 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1183 if matchtdir:
1190 if matchtdir:
1184 for d in work:
1191 for d in work:
1185 matchtdir(d[0])
1192 matchtdir(d[0])
1186 for d in dirsnotfound:
1193 for d in dirsnotfound:
1187 matchtdir(d)
1194 matchtdir(d)
1188
1195
1189 skipstep3 = skipstep3 and not (work or dirsnotfound)
1196 skipstep3 = skipstep3 and not (work or dirsnotfound)
1190 work = [d for d in work if not dirignore(d[0])]
1197 work = [d for d in work if not dirignore(d[0])]
1191
1198
1192 # step 2: visit subdirectories
1199 # step 2: visit subdirectories
1193 def traverse(work, alreadynormed):
1200 def traverse(work, alreadynormed):
1194 wadd = work.append
1201 wadd = work.append
1195 while work:
1202 while work:
1196 tracing.counter('dirstate.walk work', len(work))
1203 tracing.counter('dirstate.walk work', len(work))
1197 nd = work.pop()
1204 nd = work.pop()
1198 visitentries = match.visitchildrenset(nd)
1205 visitentries = match.visitchildrenset(nd)
1199 if not visitentries:
1206 if not visitentries:
1200 continue
1207 continue
1201 if visitentries == b'this' or visitentries == b'all':
1208 if visitentries == b'this' or visitentries == b'all':
1202 visitentries = None
1209 visitentries = None
1203 skip = None
1210 skip = None
1204 if nd != b'':
1211 if nd != b'':
1205 skip = b'.hg'
1212 skip = b'.hg'
1206 try:
1213 try:
1207 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1214 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1208 entries = listdir(join(nd), stat=True, skip=skip)
1215 entries = listdir(join(nd), stat=True, skip=skip)
1209 except OSError as inst:
1216 except OSError as inst:
1210 if inst.errno in (errno.EACCES, errno.ENOENT):
1217 if inst.errno in (errno.EACCES, errno.ENOENT):
1211 match.bad(
1218 match.bad(
1212 self.pathto(nd), encoding.strtolocal(inst.strerror)
1219 self.pathto(nd), encoding.strtolocal(inst.strerror)
1213 )
1220 )
1214 continue
1221 continue
1215 raise
1222 raise
1216 for f, kind, st in entries:
1223 for f, kind, st in entries:
1217 # Some matchers may return files in the visitentries set,
1224 # Some matchers may return files in the visitentries set,
1218 # instead of 'this', if the matcher explicitly mentions them
1225 # instead of 'this', if the matcher explicitly mentions them
1219 # and is not an exactmatcher. This is acceptable; we do not
1226 # and is not an exactmatcher. This is acceptable; we do not
1220 # make any hard assumptions about file-or-directory below
1227 # make any hard assumptions about file-or-directory below
1221 # based on the presence of `f` in visitentries. If
1228 # based on the presence of `f` in visitentries. If
1222 # visitchildrenset returned a set, we can always skip the
1229 # visitchildrenset returned a set, we can always skip the
1223 # entries *not* in the set it provided regardless of whether
1230 # entries *not* in the set it provided regardless of whether
1224 # they're actually a file or a directory.
1231 # they're actually a file or a directory.
1225 if visitentries and f not in visitentries:
1232 if visitentries and f not in visitentries:
1226 continue
1233 continue
1227 if normalizefile:
1234 if normalizefile:
1228 # even though f might be a directory, we're only
1235 # even though f might be a directory, we're only
1229 # interested in comparing it to files currently in the
1236 # interested in comparing it to files currently in the
1230 # dmap -- therefore normalizefile is enough
1237 # dmap -- therefore normalizefile is enough
1231 nf = normalizefile(
1238 nf = normalizefile(
1232 nd and (nd + b"/" + f) or f, True, True
1239 nd and (nd + b"/" + f) or f, True, True
1233 )
1240 )
1234 else:
1241 else:
1235 nf = nd and (nd + b"/" + f) or f
1242 nf = nd and (nd + b"/" + f) or f
1236 if nf not in results:
1243 if nf not in results:
1237 if kind == dirkind:
1244 if kind == dirkind:
1238 if not ignore(nf):
1245 if not ignore(nf):
1239 if matchtdir:
1246 if matchtdir:
1240 matchtdir(nf)
1247 matchtdir(nf)
1241 wadd(nf)
1248 wadd(nf)
1242 if nf in dmap and (matchalways or matchfn(nf)):
1249 if nf in dmap and (matchalways or matchfn(nf)):
1243 results[nf] = None
1250 results[nf] = None
1244 elif kind == regkind or kind == lnkkind:
1251 elif kind == regkind or kind == lnkkind:
1245 if nf in dmap:
1252 if nf in dmap:
1246 if matchalways or matchfn(nf):
1253 if matchalways or matchfn(nf):
1247 results[nf] = st
1254 results[nf] = st
1248 elif (matchalways or matchfn(nf)) and not ignore(
1255 elif (matchalways or matchfn(nf)) and not ignore(
1249 nf
1256 nf
1250 ):
1257 ):
1251 # unknown file -- normalize if necessary
1258 # unknown file -- normalize if necessary
1252 if not alreadynormed:
1259 if not alreadynormed:
1253 nf = normalize(nf, False, True)
1260 nf = normalize(nf, False, True)
1254 results[nf] = st
1261 results[nf] = st
1255 elif nf in dmap and (matchalways or matchfn(nf)):
1262 elif nf in dmap and (matchalways or matchfn(nf)):
1256 results[nf] = None
1263 results[nf] = None
1257
1264
1258 for nd, d in work:
1265 for nd, d in work:
1259 # alreadynormed means that processwork doesn't have to do any
1266 # alreadynormed means that processwork doesn't have to do any
1260 # expensive directory normalization
1267 # expensive directory normalization
1261 alreadynormed = not normalize or nd == d
1268 alreadynormed = not normalize or nd == d
1262 traverse([d], alreadynormed)
1269 traverse([d], alreadynormed)
1263
1270
1264 for s in subrepos:
1271 for s in subrepos:
1265 del results[s]
1272 del results[s]
1266 del results[b'.hg']
1273 del results[b'.hg']
1267
1274
1268 # step 3: visit remaining files from dmap
1275 # step 3: visit remaining files from dmap
1269 if not skipstep3 and not exact:
1276 if not skipstep3 and not exact:
1270 # If a dmap file is not in results yet, it was either
1277 # If a dmap file is not in results yet, it was either
1271 # a) not matching matchfn b) ignored, c) missing, or d) under a
1278 # a) not matching matchfn b) ignored, c) missing, or d) under a
1272 # symlink directory.
1279 # symlink directory.
1273 if not results and matchalways:
1280 if not results and matchalways:
1274 visit = [f for f in dmap]
1281 visit = [f for f in dmap]
1275 else:
1282 else:
1276 visit = [f for f in dmap if f not in results and matchfn(f)]
1283 visit = [f for f in dmap if f not in results and matchfn(f)]
1277 visit.sort()
1284 visit.sort()
1278
1285
1279 if unknown:
1286 if unknown:
1280 # unknown == True means we walked all dirs under the roots
1287 # unknown == True means we walked all dirs under the roots
1281 # that wasn't ignored, and everything that matched was stat'ed
1288 # that wasn't ignored, and everything that matched was stat'ed
1282 # and is already in results.
1289 # and is already in results.
1283 # The rest must thus be ignored or under a symlink.
1290 # The rest must thus be ignored or under a symlink.
1284 audit_path = pathutil.pathauditor(self._root, cached=True)
1291 audit_path = pathutil.pathauditor(self._root, cached=True)
1285
1292
1286 for nf in iter(visit):
1293 for nf in iter(visit):
1287 # If a stat for the same file was already added with a
1294 # If a stat for the same file was already added with a
1288 # different case, don't add one for this, since that would
1295 # different case, don't add one for this, since that would
1289 # make it appear as if the file exists under both names
1296 # make it appear as if the file exists under both names
1290 # on disk.
1297 # on disk.
1291 if (
1298 if (
1292 normalizefile
1299 normalizefile
1293 and normalizefile(nf, True, True) in results
1300 and normalizefile(nf, True, True) in results
1294 ):
1301 ):
1295 results[nf] = None
1302 results[nf] = None
1296 # Report ignored items in the dmap as long as they are not
1303 # Report ignored items in the dmap as long as they are not
1297 # under a symlink directory.
1304 # under a symlink directory.
1298 elif audit_path.check(nf):
1305 elif audit_path.check(nf):
1299 try:
1306 try:
1300 results[nf] = lstat(join(nf))
1307 results[nf] = lstat(join(nf))
1301 # file was just ignored, no links, and exists
1308 # file was just ignored, no links, and exists
1302 except OSError:
1309 except OSError:
1303 # file doesn't exist
1310 # file doesn't exist
1304 results[nf] = None
1311 results[nf] = None
1305 else:
1312 else:
1306 # It's either missing or under a symlink directory
1313 # It's either missing or under a symlink directory
1307 # which we in this case report as missing
1314 # which we in this case report as missing
1308 results[nf] = None
1315 results[nf] = None
1309 else:
1316 else:
1310 # We may not have walked the full directory tree above,
1317 # We may not have walked the full directory tree above,
1311 # so stat and check everything we missed.
1318 # so stat and check everything we missed.
1312 iv = iter(visit)
1319 iv = iter(visit)
1313 for st in util.statfiles([join(i) for i in visit]):
1320 for st in util.statfiles([join(i) for i in visit]):
1314 results[next(iv)] = st
1321 results[next(iv)] = st
1315 return results
1322 return results
1316
1323
1317 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1324 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1318 # Force Rayon (Rust parallelism library) to respect the number of
1325 # Force Rayon (Rust parallelism library) to respect the number of
1319 # workers. This is a temporary workaround until Rust code knows
1326 # workers. This is a temporary workaround until Rust code knows
1320 # how to read the config file.
1327 # how to read the config file.
1321 numcpus = self._ui.configint(b"worker", b"numcpus")
1328 numcpus = self._ui.configint(b"worker", b"numcpus")
1322 if numcpus is not None:
1329 if numcpus is not None:
1323 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1330 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1324
1331
1325 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1332 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1326 if not workers_enabled:
1333 if not workers_enabled:
1327 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1334 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1328
1335
1329 (
1336 (
1330 lookup,
1337 lookup,
1331 modified,
1338 modified,
1332 added,
1339 added,
1333 removed,
1340 removed,
1334 deleted,
1341 deleted,
1335 clean,
1342 clean,
1336 ignored,
1343 ignored,
1337 unknown,
1344 unknown,
1338 warnings,
1345 warnings,
1339 bad,
1346 bad,
1340 traversed,
1347 traversed,
1341 dirty,
1348 dirty,
1342 ) = rustmod.status(
1349 ) = rustmod.status(
1343 self._map._rustmap,
1350 self._map._rustmap,
1344 matcher,
1351 matcher,
1345 self._rootdir,
1352 self._rootdir,
1346 self._ignorefiles(),
1353 self._ignorefiles(),
1347 self._checkexec,
1354 self._checkexec,
1348 self._lastnormaltime,
1355 self._lastnormaltime,
1349 bool(list_clean),
1356 bool(list_clean),
1350 bool(list_ignored),
1357 bool(list_ignored),
1351 bool(list_unknown),
1358 bool(list_unknown),
1352 bool(matcher.traversedir),
1359 bool(matcher.traversedir),
1353 )
1360 )
1354
1361
1355 self._dirty |= dirty
1362 self._dirty |= dirty
1356
1363
1357 if matcher.traversedir:
1364 if matcher.traversedir:
1358 for dir in traversed:
1365 for dir in traversed:
1359 matcher.traversedir(dir)
1366 matcher.traversedir(dir)
1360
1367
1361 if self._ui.warn:
1368 if self._ui.warn:
1362 for item in warnings:
1369 for item in warnings:
1363 if isinstance(item, tuple):
1370 if isinstance(item, tuple):
1364 file_path, syntax = item
1371 file_path, syntax = item
1365 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1372 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1366 file_path,
1373 file_path,
1367 syntax,
1374 syntax,
1368 )
1375 )
1369 self._ui.warn(msg)
1376 self._ui.warn(msg)
1370 else:
1377 else:
1371 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1378 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1372 self._ui.warn(
1379 self._ui.warn(
1373 msg
1380 msg
1374 % (
1381 % (
1375 pathutil.canonpath(
1382 pathutil.canonpath(
1376 self._rootdir, self._rootdir, item
1383 self._rootdir, self._rootdir, item
1377 ),
1384 ),
1378 b"No such file or directory",
1385 b"No such file or directory",
1379 )
1386 )
1380 )
1387 )
1381
1388
1382 for (fn, message) in bad:
1389 for (fn, message) in bad:
1383 matcher.bad(fn, encoding.strtolocal(message))
1390 matcher.bad(fn, encoding.strtolocal(message))
1384
1391
1385 status = scmutil.status(
1392 status = scmutil.status(
1386 modified=modified,
1393 modified=modified,
1387 added=added,
1394 added=added,
1388 removed=removed,
1395 removed=removed,
1389 deleted=deleted,
1396 deleted=deleted,
1390 unknown=unknown,
1397 unknown=unknown,
1391 ignored=ignored,
1398 ignored=ignored,
1392 clean=clean,
1399 clean=clean,
1393 )
1400 )
1394 return (lookup, status)
1401 return (lookup, status)
1395
1402
1396 def status(self, match, subrepos, ignored, clean, unknown):
1403 def status(self, match, subrepos, ignored, clean, unknown):
1397 """Determine the status of the working copy relative to the
1404 """Determine the status of the working copy relative to the
1398 dirstate and return a pair of (unsure, status), where status is of type
1405 dirstate and return a pair of (unsure, status), where status is of type
1399 scmutil.status and:
1406 scmutil.status and:
1400
1407
1401 unsure:
1408 unsure:
1402 files that might have been modified since the dirstate was
1409 files that might have been modified since the dirstate was
1403 written, but need to be read to be sure (size is the same
1410 written, but need to be read to be sure (size is the same
1404 but mtime differs)
1411 but mtime differs)
1405 status.modified:
1412 status.modified:
1406 files that have definitely been modified since the dirstate
1413 files that have definitely been modified since the dirstate
1407 was written (different size or mode)
1414 was written (different size or mode)
1408 status.clean:
1415 status.clean:
1409 files that have definitely not been modified since the
1416 files that have definitely not been modified since the
1410 dirstate was written
1417 dirstate was written
1411 """
1418 """
1412 listignored, listclean, listunknown = ignored, clean, unknown
1419 listignored, listclean, listunknown = ignored, clean, unknown
1413 lookup, modified, added, unknown, ignored = [], [], [], [], []
1420 lookup, modified, added, unknown, ignored = [], [], [], [], []
1414 removed, deleted, clean = [], [], []
1421 removed, deleted, clean = [], [], []
1415
1422
1416 dmap = self._map
1423 dmap = self._map
1417 dmap.preload()
1424 dmap.preload()
1418
1425
1419 use_rust = True
1426 use_rust = True
1420
1427
1421 allowed_matchers = (
1428 allowed_matchers = (
1422 matchmod.alwaysmatcher,
1429 matchmod.alwaysmatcher,
1423 matchmod.exactmatcher,
1430 matchmod.exactmatcher,
1424 matchmod.includematcher,
1431 matchmod.includematcher,
1425 )
1432 )
1426
1433
1427 if rustmod is None:
1434 if rustmod is None:
1428 use_rust = False
1435 use_rust = False
1429 elif self._checkcase:
1436 elif self._checkcase:
1430 # Case-insensitive filesystems are not handled yet
1437 # Case-insensitive filesystems are not handled yet
1431 use_rust = False
1438 use_rust = False
1432 elif subrepos:
1439 elif subrepos:
1433 use_rust = False
1440 use_rust = False
1434 elif sparse.enabled:
1441 elif sparse.enabled:
1435 use_rust = False
1442 use_rust = False
1436 elif not isinstance(match, allowed_matchers):
1443 elif not isinstance(match, allowed_matchers):
1437 # Some matchers have yet to be implemented
1444 # Some matchers have yet to be implemented
1438 use_rust = False
1445 use_rust = False
1439
1446
1440 if use_rust:
1447 if use_rust:
1441 try:
1448 try:
1442 return self._rust_status(
1449 return self._rust_status(
1443 match, listclean, listignored, listunknown
1450 match, listclean, listignored, listunknown
1444 )
1451 )
1445 except rustmod.FallbackError:
1452 except rustmod.FallbackError:
1446 pass
1453 pass
1447
1454
1448 def noop(f):
1455 def noop(f):
1449 pass
1456 pass
1450
1457
1451 dcontains = dmap.__contains__
1458 dcontains = dmap.__contains__
1452 dget = dmap.__getitem__
1459 dget = dmap.__getitem__
1453 ladd = lookup.append # aka "unsure"
1460 ladd = lookup.append # aka "unsure"
1454 madd = modified.append
1461 madd = modified.append
1455 aadd = added.append
1462 aadd = added.append
1456 uadd = unknown.append if listunknown else noop
1463 uadd = unknown.append if listunknown else noop
1457 iadd = ignored.append if listignored else noop
1464 iadd = ignored.append if listignored else noop
1458 radd = removed.append
1465 radd = removed.append
1459 dadd = deleted.append
1466 dadd = deleted.append
1460 cadd = clean.append if listclean else noop
1467 cadd = clean.append if listclean else noop
1461 mexact = match.exact
1468 mexact = match.exact
1462 dirignore = self._dirignore
1469 dirignore = self._dirignore
1463 checkexec = self._checkexec
1470 checkexec = self._checkexec
1464 copymap = self._map.copymap
1471 copymap = self._map.copymap
1465 lastnormaltime = self._lastnormaltime
1472 lastnormaltime = self._lastnormaltime
1466
1473
1467 # We need to do full walks when either
1474 # We need to do full walks when either
1468 # - we're listing all clean files, or
1475 # - we're listing all clean files, or
1469 # - match.traversedir does something, because match.traversedir should
1476 # - match.traversedir does something, because match.traversedir should
1470 # be called for every dir in the working dir
1477 # be called for every dir in the working dir
1471 full = listclean or match.traversedir is not None
1478 full = listclean or match.traversedir is not None
1472 for fn, st in pycompat.iteritems(
1479 for fn, st in pycompat.iteritems(
1473 self.walk(match, subrepos, listunknown, listignored, full=full)
1480 self.walk(match, subrepos, listunknown, listignored, full=full)
1474 ):
1481 ):
1475 if not dcontains(fn):
1482 if not dcontains(fn):
1476 if (listignored or mexact(fn)) and dirignore(fn):
1483 if (listignored or mexact(fn)) and dirignore(fn):
1477 if listignored:
1484 if listignored:
1478 iadd(fn)
1485 iadd(fn)
1479 else:
1486 else:
1480 uadd(fn)
1487 uadd(fn)
1481 continue
1488 continue
1482
1489
1483 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1490 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1484 # written like that for performance reasons. dmap[fn] is not a
1491 # written like that for performance reasons. dmap[fn] is not a
1485 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1492 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1486 # opcode has fast paths when the value to be unpacked is a tuple or
1493 # opcode has fast paths when the value to be unpacked is a tuple or
1487 # a list, but falls back to creating a full-fledged iterator in
1494 # a list, but falls back to creating a full-fledged iterator in
1488 # general. That is much slower than simply accessing and storing the
1495 # general. That is much slower than simply accessing and storing the
1489 # tuple members one by one.
1496 # tuple members one by one.
1490 t = dget(fn)
1497 t = dget(fn)
1491 mode = t.mode
1498 mode = t.mode
1492 size = t.size
1499 size = t.size
1493 time = t.mtime
1500 time = t.mtime
1494
1501
1495 if not st and t.tracked:
1502 if not st and t.tracked:
1496 dadd(fn)
1503 dadd(fn)
1497 elif t.merged:
1504 elif t.merged:
1498 madd(fn)
1505 madd(fn)
1499 elif t.added:
1506 elif t.added:
1500 aadd(fn)
1507 aadd(fn)
1501 elif t.removed:
1508 elif t.removed:
1502 radd(fn)
1509 radd(fn)
1503 elif t.tracked:
1510 elif t.tracked:
1504 if (
1511 if (
1505 size >= 0
1512 size >= 0
1506 and (
1513 and (
1507 (size != st.st_size and size != st.st_size & _rangemask)
1514 (size != st.st_size and size != st.st_size & _rangemask)
1508 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1515 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1509 )
1516 )
1510 or t.from_p2
1517 or t.from_p2
1511 or fn in copymap
1518 or fn in copymap
1512 ):
1519 ):
1513 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1520 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1514 # issue6456: Size returned may be longer due to
1521 # issue6456: Size returned may be longer due to
1515 # encryption on EXT-4 fscrypt, undecided.
1522 # encryption on EXT-4 fscrypt, undecided.
1516 ladd(fn)
1523 ladd(fn)
1517 else:
1524 else:
1518 madd(fn)
1525 madd(fn)
1519 elif (
1526 elif (
1520 time != st[stat.ST_MTIME]
1527 time != st[stat.ST_MTIME]
1521 and time != st[stat.ST_MTIME] & _rangemask
1528 and time != st[stat.ST_MTIME] & _rangemask
1522 ):
1529 ):
1523 ladd(fn)
1530 ladd(fn)
1524 elif st[stat.ST_MTIME] == lastnormaltime:
1531 elif st[stat.ST_MTIME] == lastnormaltime:
1525 # fn may have just been marked as normal and it may have
1532 # fn may have just been marked as normal and it may have
1526 # changed in the same second without changing its size.
1533 # changed in the same second without changing its size.
1527 # This can happen if we quickly do multiple commits.
1534 # This can happen if we quickly do multiple commits.
1528 # Force lookup, so we don't miss such a racy file change.
1535 # Force lookup, so we don't miss such a racy file change.
1529 ladd(fn)
1536 ladd(fn)
1530 elif listclean:
1537 elif listclean:
1531 cadd(fn)
1538 cadd(fn)
1532 status = scmutil.status(
1539 status = scmutil.status(
1533 modified, added, removed, deleted, unknown, ignored, clean
1540 modified, added, removed, deleted, unknown, ignored, clean
1534 )
1541 )
1535 return (lookup, status)
1542 return (lookup, status)
1536
1543
1537 def matches(self, match):
1544 def matches(self, match):
1538 """
1545 """
1539 return files in the dirstate (in whatever state) filtered by match
1546 return files in the dirstate (in whatever state) filtered by match
1540 """
1547 """
1541 dmap = self._map
1548 dmap = self._map
1542 if rustmod is not None:
1549 if rustmod is not None:
1543 dmap = self._map._rustmap
1550 dmap = self._map._rustmap
1544
1551
1545 if match.always():
1552 if match.always():
1546 return dmap.keys()
1553 return dmap.keys()
1547 files = match.files()
1554 files = match.files()
1548 if match.isexact():
1555 if match.isexact():
1549 # fast path -- filter the other way around, since typically files is
1556 # fast path -- filter the other way around, since typically files is
1550 # much smaller than dmap
1557 # much smaller than dmap
1551 return [f for f in files if f in dmap]
1558 return [f for f in files if f in dmap]
1552 if match.prefix() and all(fn in dmap for fn in files):
1559 if match.prefix() and all(fn in dmap for fn in files):
1553 # fast path -- all the values are known to be files, so just return
1560 # fast path -- all the values are known to be files, so just return
1554 # that
1561 # that
1555 return list(files)
1562 return list(files)
1556 return [f for f in dmap if match(f)]
1563 return [f for f in dmap if match(f)]
1557
1564
1558 def _actualfilename(self, tr):
1565 def _actualfilename(self, tr):
1559 if tr:
1566 if tr:
1560 return self._pendingfilename
1567 return self._pendingfilename
1561 else:
1568 else:
1562 return self._filename
1569 return self._filename
1563
1570
1564 def savebackup(self, tr, backupname):
1571 def savebackup(self, tr, backupname):
1565 '''Save current dirstate into backup file'''
1572 '''Save current dirstate into backup file'''
1566 filename = self._actualfilename(tr)
1573 filename = self._actualfilename(tr)
1567 assert backupname != filename
1574 assert backupname != filename
1568
1575
1569 # use '_writedirstate' instead of 'write' to write changes certainly,
1576 # use '_writedirstate' instead of 'write' to write changes certainly,
1570 # because the latter omits writing out if transaction is running.
1577 # because the latter omits writing out if transaction is running.
1571 # output file will be used to create backup of dirstate at this point.
1578 # output file will be used to create backup of dirstate at this point.
1572 if self._dirty or not self._opener.exists(filename):
1579 if self._dirty or not self._opener.exists(filename):
1573 self._writedirstate(
1580 self._writedirstate(
1574 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1581 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1575 )
1582 )
1576
1583
1577 if tr:
1584 if tr:
1578 # ensure that subsequent tr.writepending returns True for
1585 # ensure that subsequent tr.writepending returns True for
1579 # changes written out above, even if dirstate is never
1586 # changes written out above, even if dirstate is never
1580 # changed after this
1587 # changed after this
1581 tr.addfilegenerator(
1588 tr.addfilegenerator(
1582 b'dirstate',
1589 b'dirstate',
1583 (self._filename,),
1590 (self._filename,),
1584 self._writedirstate,
1591 self._writedirstate,
1585 location=b'plain',
1592 location=b'plain',
1586 )
1593 )
1587
1594
1588 # ensure that pending file written above is unlinked at
1595 # ensure that pending file written above is unlinked at
1589 # failure, even if tr.writepending isn't invoked until the
1596 # failure, even if tr.writepending isn't invoked until the
1590 # end of this transaction
1597 # end of this transaction
1591 tr.registertmp(filename, location=b'plain')
1598 tr.registertmp(filename, location=b'plain')
1592
1599
1593 self._opener.tryunlink(backupname)
1600 self._opener.tryunlink(backupname)
1594 # hardlink backup is okay because _writedirstate is always called
1601 # hardlink backup is okay because _writedirstate is always called
1595 # with an "atomictemp=True" file.
1602 # with an "atomictemp=True" file.
1596 util.copyfile(
1603 util.copyfile(
1597 self._opener.join(filename),
1604 self._opener.join(filename),
1598 self._opener.join(backupname),
1605 self._opener.join(backupname),
1599 hardlink=True,
1606 hardlink=True,
1600 )
1607 )
1601
1608
1602 def restorebackup(self, tr, backupname):
1609 def restorebackup(self, tr, backupname):
1603 '''Restore dirstate by backup file'''
1610 '''Restore dirstate by backup file'''
1604 # this "invalidate()" prevents "wlock.release()" from writing
1611 # this "invalidate()" prevents "wlock.release()" from writing
1605 # changes of dirstate out after restoring from backup file
1612 # changes of dirstate out after restoring from backup file
1606 self.invalidate()
1613 self.invalidate()
1607 filename = self._actualfilename(tr)
1614 filename = self._actualfilename(tr)
1608 o = self._opener
1615 o = self._opener
1609 if util.samefile(o.join(backupname), o.join(filename)):
1616 if util.samefile(o.join(backupname), o.join(filename)):
1610 o.unlink(backupname)
1617 o.unlink(backupname)
1611 else:
1618 else:
1612 o.rename(backupname, filename, checkambig=True)
1619 o.rename(backupname, filename, checkambig=True)
1613
1620
1614 def clearbackup(self, tr, backupname):
1621 def clearbackup(self, tr, backupname):
1615 '''Clear backup file'''
1622 '''Clear backup file'''
1616 self._opener.unlink(backupname)
1623 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now