##// END OF EJS Templates
dirstate: drop the `_otherparent` method...
marmoute -
r48729:496a8e38 default
parent child Browse files
Show More
@@ -1,1634 +1,1619 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self._normallookup(f)
406 self._normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self._normallookup(filename)
479 self._normallookup(filename)
480 return True
480 return True
481 # XXX This is probably overkill for more case, but we need this to
481 # XXX This is probably overkill for more case, but we need this to
482 # fully replace the `normallookup` call with `set_tracked` one.
482 # fully replace the `normallookup` call with `set_tracked` one.
483 # Consider smoothing this in the future.
483 # Consider smoothing this in the future.
484 self.set_possibly_dirty(filename)
484 self.set_possibly_dirty(filename)
485 return False
485 return False
486
486
487 @requires_no_parents_change
487 @requires_no_parents_change
488 def set_untracked(self, filename):
488 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
489 """a "public" method for generic code to mark a file as untracked
490
490
491 This function is to be called outside of "update/merge" case. For
491 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
492 example by a command like `hg remove X`.
493
493
494 return True the file was previously tracked, False otherwise.
494 return True the file was previously tracked, False otherwise.
495 """
495 """
496 entry = self._map.get(filename)
496 entry = self._map.get(filename)
497 if entry is None:
497 if entry is None:
498 return False
498 return False
499 elif entry.added:
499 elif entry.added:
500 self._drop(filename)
500 self._drop(filename)
501 return True
501 return True
502 else:
502 else:
503 self._dirty = True
503 self._dirty = True
504 self._updatedfiles.add(filename)
504 self._updatedfiles.add(filename)
505 self._map.set_untracked(filename)
505 self._map.set_untracked(filename)
506 return True
506 return True
507
507
508 @requires_no_parents_change
508 @requires_no_parents_change
509 def set_clean(self, filename, parentfiledata=None):
509 def set_clean(self, filename, parentfiledata=None):
510 """record that the current state of the file on disk is known to be clean"""
510 """record that the current state of the file on disk is known to be clean"""
511 self._dirty = True
511 self._dirty = True
512 self._updatedfiles.add(filename)
512 self._updatedfiles.add(filename)
513 if parentfiledata:
513 if parentfiledata:
514 (mode, size, mtime) = parentfiledata
514 (mode, size, mtime) = parentfiledata
515 else:
515 else:
516 (mode, size, mtime) = self._get_filedata(filename)
516 (mode, size, mtime) = self._get_filedata(filename)
517 self._addpath(filename, mode=mode, size=size, mtime=mtime)
517 self._addpath(filename, mode=mode, size=size, mtime=mtime)
518 self._map.copymap.pop(filename, None)
518 self._map.copymap.pop(filename, None)
519 if filename in self._map.nonnormalset:
519 if filename in self._map.nonnormalset:
520 self._map.nonnormalset.remove(filename)
520 self._map.nonnormalset.remove(filename)
521 if mtime > self._lastnormaltime:
521 if mtime > self._lastnormaltime:
522 # Remember the most recent modification timeslot for status(),
522 # Remember the most recent modification timeslot for status(),
523 # to make sure we won't miss future size-preserving file content
523 # to make sure we won't miss future size-preserving file content
524 # modifications that happen within the same timeslot.
524 # modifications that happen within the same timeslot.
525 self._lastnormaltime = mtime
525 self._lastnormaltime = mtime
526
526
527 @requires_no_parents_change
527 @requires_no_parents_change
528 def set_possibly_dirty(self, filename):
528 def set_possibly_dirty(self, filename):
529 """record that the current state of the file on disk is unknown"""
529 """record that the current state of the file on disk is unknown"""
530 self._dirty = True
530 self._dirty = True
531 self._updatedfiles.add(filename)
531 self._updatedfiles.add(filename)
532 self._map.set_possibly_dirty(filename)
532 self._map.set_possibly_dirty(filename)
533
533
534 @requires_parents_change
534 @requires_parents_change
535 def update_file_p1(
535 def update_file_p1(
536 self,
536 self,
537 filename,
537 filename,
538 p1_tracked,
538 p1_tracked,
539 ):
539 ):
540 """Set a file as tracked in the parent (or not)
540 """Set a file as tracked in the parent (or not)
541
541
542 This is to be called when adjust the dirstate to a new parent after an history
542 This is to be called when adjust the dirstate to a new parent after an history
543 rewriting operation.
543 rewriting operation.
544
544
545 It should not be called during a merge (p2 != nullid) and only within
545 It should not be called during a merge (p2 != nullid) and only within
546 a `with dirstate.parentchange():` context.
546 a `with dirstate.parentchange():` context.
547 """
547 """
548 if self.in_merge:
548 if self.in_merge:
549 msg = b'update_file_reference should not be called when merging'
549 msg = b'update_file_reference should not be called when merging'
550 raise error.ProgrammingError(msg)
550 raise error.ProgrammingError(msg)
551 entry = self._map.get(filename)
551 entry = self._map.get(filename)
552 if entry is None:
552 if entry is None:
553 wc_tracked = False
553 wc_tracked = False
554 else:
554 else:
555 wc_tracked = entry.tracked
555 wc_tracked = entry.tracked
556 possibly_dirty = False
556 possibly_dirty = False
557 if p1_tracked and wc_tracked:
557 if p1_tracked and wc_tracked:
558 # the underlying reference might have changed, we will have to
558 # the underlying reference might have changed, we will have to
559 # check it.
559 # check it.
560 possibly_dirty = True
560 possibly_dirty = True
561 elif not (p1_tracked or wc_tracked):
561 elif not (p1_tracked or wc_tracked):
562 # the file is no longer relevant to anyone
562 # the file is no longer relevant to anyone
563 self._drop(filename)
563 self._drop(filename)
564 elif (not p1_tracked) and wc_tracked:
564 elif (not p1_tracked) and wc_tracked:
565 if entry is not None and entry.added:
565 if entry is not None and entry.added:
566 return # avoid dropping copy information (maybe?)
566 return # avoid dropping copy information (maybe?)
567 elif p1_tracked and not wc_tracked:
567 elif p1_tracked and not wc_tracked:
568 pass
568 pass
569 else:
569 else:
570 assert False, 'unreachable'
570 assert False, 'unreachable'
571
571
572 # this mean we are doing call for file we do not really care about the
572 # this mean we are doing call for file we do not really care about the
573 # data (eg: added or removed), however this should be a minor overhead
573 # data (eg: added or removed), however this should be a minor overhead
574 # compared to the overall update process calling this.
574 # compared to the overall update process calling this.
575 parentfiledata = None
575 parentfiledata = None
576 if wc_tracked:
576 if wc_tracked:
577 parentfiledata = self._get_filedata(filename)
577 parentfiledata = self._get_filedata(filename)
578
578
579 self._updatedfiles.add(filename)
579 self._updatedfiles.add(filename)
580 self._map.reset_state(
580 self._map.reset_state(
581 filename,
581 filename,
582 wc_tracked,
582 wc_tracked,
583 p1_tracked,
583 p1_tracked,
584 possibly_dirty=possibly_dirty,
584 possibly_dirty=possibly_dirty,
585 parentfiledata=parentfiledata,
585 parentfiledata=parentfiledata,
586 )
586 )
587 if (
587 if (
588 parentfiledata is not None
588 parentfiledata is not None
589 and parentfiledata[2] > self._lastnormaltime
589 and parentfiledata[2] > self._lastnormaltime
590 ):
590 ):
591 # Remember the most recent modification timeslot for status(),
591 # Remember the most recent modification timeslot for status(),
592 # to make sure we won't miss future size-preserving file content
592 # to make sure we won't miss future size-preserving file content
593 # modifications that happen within the same timeslot.
593 # modifications that happen within the same timeslot.
594 self._lastnormaltime = parentfiledata[2]
594 self._lastnormaltime = parentfiledata[2]
595
595
596 @requires_parents_change
596 @requires_parents_change
597 def update_file(
597 def update_file(
598 self,
598 self,
599 filename,
599 filename,
600 wc_tracked,
600 wc_tracked,
601 p1_tracked,
601 p1_tracked,
602 p2_tracked=False,
602 p2_tracked=False,
603 merged=False,
603 merged=False,
604 clean_p1=False,
604 clean_p1=False,
605 clean_p2=False,
605 clean_p2=False,
606 possibly_dirty=False,
606 possibly_dirty=False,
607 parentfiledata=None,
607 parentfiledata=None,
608 ):
608 ):
609 """update the information about a file in the dirstate
609 """update the information about a file in the dirstate
610
610
611 This is to be called when the direstates parent changes to keep track
611 This is to be called when the direstates parent changes to keep track
612 of what is the file situation in regards to the working copy and its parent.
612 of what is the file situation in regards to the working copy and its parent.
613
613
614 This function must be called within a `dirstate.parentchange` context.
614 This function must be called within a `dirstate.parentchange` context.
615
615
616 note: the API is at an early stage and we might need to adjust it
616 note: the API is at an early stage and we might need to adjust it
617 depending of what information ends up being relevant and useful to
617 depending of what information ends up being relevant and useful to
618 other processing.
618 other processing.
619 """
619 """
620 if merged and (clean_p1 or clean_p2):
620 if merged and (clean_p1 or clean_p2):
621 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
621 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
622 raise error.ProgrammingError(msg)
622 raise error.ProgrammingError(msg)
623
623
624 # note: I do not think we need to double check name clash here since we
624 # note: I do not think we need to double check name clash here since we
625 # are in a update/merge case that should already have taken care of
625 # are in a update/merge case that should already have taken care of
626 # this. The test agrees
626 # this. The test agrees
627
627
628 self._dirty = True
628 self._dirty = True
629 self._updatedfiles.add(filename)
629 self._updatedfiles.add(filename)
630
630
631 need_parent_file_data = (
631 need_parent_file_data = (
632 not (possibly_dirty or clean_p2 or merged)
632 not (possibly_dirty or clean_p2 or merged)
633 and wc_tracked
633 and wc_tracked
634 and p1_tracked
634 and p1_tracked
635 )
635 )
636
636
637 # this mean we are doing call for file we do not really care about the
637 # this mean we are doing call for file we do not really care about the
638 # data (eg: added or removed), however this should be a minor overhead
638 # data (eg: added or removed), however this should be a minor overhead
639 # compared to the overall update process calling this.
639 # compared to the overall update process calling this.
640 if need_parent_file_data:
640 if need_parent_file_data:
641 if parentfiledata is None:
641 if parentfiledata is None:
642 parentfiledata = self._get_filedata(filename)
642 parentfiledata = self._get_filedata(filename)
643 mtime = parentfiledata[2]
643 mtime = parentfiledata[2]
644
644
645 if mtime > self._lastnormaltime:
645 if mtime > self._lastnormaltime:
646 # Remember the most recent modification timeslot for
646 # Remember the most recent modification timeslot for
647 # status(), to make sure we won't miss future
647 # status(), to make sure we won't miss future
648 # size-preserving file content modifications that happen
648 # size-preserving file content modifications that happen
649 # within the same timeslot.
649 # within the same timeslot.
650 self._lastnormaltime = mtime
650 self._lastnormaltime = mtime
651
651
652 self._map.reset_state(
652 self._map.reset_state(
653 filename,
653 filename,
654 wc_tracked,
654 wc_tracked,
655 p1_tracked,
655 p1_tracked,
656 p2_tracked=p2_tracked,
656 p2_tracked=p2_tracked,
657 merged=merged,
657 merged=merged,
658 clean_p1=clean_p1,
658 clean_p1=clean_p1,
659 clean_p2=clean_p2,
659 clean_p2=clean_p2,
660 possibly_dirty=possibly_dirty,
660 possibly_dirty=possibly_dirty,
661 parentfiledata=parentfiledata,
661 parentfiledata=parentfiledata,
662 )
662 )
663 if (
663 if (
664 parentfiledata is not None
664 parentfiledata is not None
665 and parentfiledata[2] > self._lastnormaltime
665 and parentfiledata[2] > self._lastnormaltime
666 ):
666 ):
667 # Remember the most recent modification timeslot for status(),
667 # Remember the most recent modification timeslot for status(),
668 # to make sure we won't miss future size-preserving file content
668 # to make sure we won't miss future size-preserving file content
669 # modifications that happen within the same timeslot.
669 # modifications that happen within the same timeslot.
670 self._lastnormaltime = parentfiledata[2]
670 self._lastnormaltime = parentfiledata[2]
671
671
672 def _addpath(
672 def _addpath(
673 self,
673 self,
674 f,
674 f,
675 mode=0,
675 mode=0,
676 size=None,
676 size=None,
677 mtime=None,
677 mtime=None,
678 added=False,
678 added=False,
679 merged=False,
679 merged=False,
680 from_p2=False,
680 from_p2=False,
681 possibly_dirty=False,
681 possibly_dirty=False,
682 ):
682 ):
683 entry = self._map.get(f)
683 entry = self._map.get(f)
684 if added or entry is not None and entry.removed:
684 if added or entry is not None and entry.removed:
685 scmutil.checkfilename(f)
685 scmutil.checkfilename(f)
686 if self._map.hastrackeddir(f):
686 if self._map.hastrackeddir(f):
687 msg = _(b'directory %r already in dirstate')
687 msg = _(b'directory %r already in dirstate')
688 msg %= pycompat.bytestr(f)
688 msg %= pycompat.bytestr(f)
689 raise error.Abort(msg)
689 raise error.Abort(msg)
690 # shadows
690 # shadows
691 for d in pathutil.finddirs(f):
691 for d in pathutil.finddirs(f):
692 if self._map.hastrackeddir(d):
692 if self._map.hastrackeddir(d):
693 break
693 break
694 entry = self._map.get(d)
694 entry = self._map.get(d)
695 if entry is not None and not entry.removed:
695 if entry is not None and not entry.removed:
696 msg = _(b'file %r in dirstate clashes with %r')
696 msg = _(b'file %r in dirstate clashes with %r')
697 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
697 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
698 raise error.Abort(msg)
698 raise error.Abort(msg)
699 self._dirty = True
699 self._dirty = True
700 self._updatedfiles.add(f)
700 self._updatedfiles.add(f)
701 self._map.addfile(
701 self._map.addfile(
702 f,
702 f,
703 mode=mode,
703 mode=mode,
704 size=size,
704 size=size,
705 mtime=mtime,
705 mtime=mtime,
706 added=added,
706 added=added,
707 merged=merged,
707 merged=merged,
708 from_p2=from_p2,
708 from_p2=from_p2,
709 possibly_dirty=possibly_dirty,
709 possibly_dirty=possibly_dirty,
710 )
710 )
711
711
712 def _get_filedata(self, filename):
712 def _get_filedata(self, filename):
713 """returns"""
713 """returns"""
714 s = os.lstat(self._join(filename))
714 s = os.lstat(self._join(filename))
715 mode = s.st_mode
715 mode = s.st_mode
716 size = s.st_size
716 size = s.st_size
717 mtime = s[stat.ST_MTIME]
717 mtime = s[stat.ST_MTIME]
718 return (mode, size, mtime)
718 return (mode, size, mtime)
719
719
720 def _normallookup(self, f):
720 def _normallookup(self, f):
721 '''Mark a file normal, but possibly dirty.'''
721 '''Mark a file normal, but possibly dirty.'''
722 if self.in_merge:
722 if self.in_merge:
723 # if there is a merge going on and the file was either
723 # if there is a merge going on and the file was either
724 # "merged" or coming from other parent (-2) before
724 # "merged" or coming from other parent (-2) before
725 # being removed, restore that state.
725 # being removed, restore that state.
726 entry = self._map.get(f)
726 entry = self._map.get(f)
727 if entry is not None:
727 if entry is not None:
728 # XXX this should probably be dealt with a a lower level
728 # XXX this should probably be dealt with a a lower level
729 # (see `merged_removed` and `from_p2_removed`)
729 # (see `merged_removed` and `from_p2_removed`)
730 if entry.merged_removed or entry.from_p2_removed:
730 if entry.merged_removed or entry.from_p2_removed:
731 source = self._map.copymap.get(f)
731 source = self._map.copymap.get(f)
732 if entry.merged_removed:
732 self._addpath(f, from_p2=True)
733 self._otherparent(f)
733 self._map.copymap.pop(f, None)
734 elif entry.from_p2_removed:
735 self._otherparent(f)
736 if source is not None:
734 if source is not None:
737 self.copy(source, f)
735 self.copy(source, f)
738 return
736 return
739 elif entry.merged or entry.from_p2:
737 elif entry.merged or entry.from_p2:
740 return
738 return
741 self._addpath(f, possibly_dirty=True)
739 self._addpath(f, possibly_dirty=True)
742 self._map.copymap.pop(f, None)
740 self._map.copymap.pop(f, None)
743
741
744 def _otherparent(self, f):
745 if not self.in_merge:
746 msg = _(b"setting %r to other parent only allowed in merges") % f
747 raise error.Abort(msg)
748 entry = self._map.get(f)
749 if entry is not None and entry.tracked:
750 # merge-like
751 self._addpath(f, merged=True)
752 else:
753 # add-like
754 self._addpath(f, from_p2=True)
755 self._map.copymap.pop(f, None)
756
757 def _add(self, filename):
742 def _add(self, filename):
758 """internal function to mark a file as added"""
743 """internal function to mark a file as added"""
759 self._addpath(filename, added=True)
744 self._addpath(filename, added=True)
760 self._map.copymap.pop(filename, None)
745 self._map.copymap.pop(filename, None)
761
746
762 def _drop(self, filename):
747 def _drop(self, filename):
763 """internal function to drop a file from the dirstate"""
748 """internal function to drop a file from the dirstate"""
764 if self._map.dropfile(filename):
749 if self._map.dropfile(filename):
765 self._dirty = True
750 self._dirty = True
766 self._updatedfiles.add(filename)
751 self._updatedfiles.add(filename)
767 self._map.copymap.pop(filename, None)
752 self._map.copymap.pop(filename, None)
768
753
769 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
754 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
770 if exists is None:
755 if exists is None:
771 exists = os.path.lexists(os.path.join(self._root, path))
756 exists = os.path.lexists(os.path.join(self._root, path))
772 if not exists:
757 if not exists:
773 # Maybe a path component exists
758 # Maybe a path component exists
774 if not ignoremissing and b'/' in path:
759 if not ignoremissing and b'/' in path:
775 d, f = path.rsplit(b'/', 1)
760 d, f = path.rsplit(b'/', 1)
776 d = self._normalize(d, False, ignoremissing, None)
761 d = self._normalize(d, False, ignoremissing, None)
777 folded = d + b"/" + f
762 folded = d + b"/" + f
778 else:
763 else:
779 # No path components, preserve original case
764 # No path components, preserve original case
780 folded = path
765 folded = path
781 else:
766 else:
782 # recursively normalize leading directory components
767 # recursively normalize leading directory components
783 # against dirstate
768 # against dirstate
784 if b'/' in normed:
769 if b'/' in normed:
785 d, f = normed.rsplit(b'/', 1)
770 d, f = normed.rsplit(b'/', 1)
786 d = self._normalize(d, False, ignoremissing, True)
771 d = self._normalize(d, False, ignoremissing, True)
787 r = self._root + b"/" + d
772 r = self._root + b"/" + d
788 folded = d + b"/" + util.fspath(f, r)
773 folded = d + b"/" + util.fspath(f, r)
789 else:
774 else:
790 folded = util.fspath(normed, self._root)
775 folded = util.fspath(normed, self._root)
791 storemap[normed] = folded
776 storemap[normed] = folded
792
777
793 return folded
778 return folded
794
779
795 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
780 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
796 normed = util.normcase(path)
781 normed = util.normcase(path)
797 folded = self._map.filefoldmap.get(normed, None)
782 folded = self._map.filefoldmap.get(normed, None)
798 if folded is None:
783 if folded is None:
799 if isknown:
784 if isknown:
800 folded = path
785 folded = path
801 else:
786 else:
802 folded = self._discoverpath(
787 folded = self._discoverpath(
803 path, normed, ignoremissing, exists, self._map.filefoldmap
788 path, normed, ignoremissing, exists, self._map.filefoldmap
804 )
789 )
805 return folded
790 return folded
806
791
807 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
792 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
808 normed = util.normcase(path)
793 normed = util.normcase(path)
809 folded = self._map.filefoldmap.get(normed, None)
794 folded = self._map.filefoldmap.get(normed, None)
810 if folded is None:
795 if folded is None:
811 folded = self._map.dirfoldmap.get(normed, None)
796 folded = self._map.dirfoldmap.get(normed, None)
812 if folded is None:
797 if folded is None:
813 if isknown:
798 if isknown:
814 folded = path
799 folded = path
815 else:
800 else:
816 # store discovered result in dirfoldmap so that future
801 # store discovered result in dirfoldmap so that future
817 # normalizefile calls don't start matching directories
802 # normalizefile calls don't start matching directories
818 folded = self._discoverpath(
803 folded = self._discoverpath(
819 path, normed, ignoremissing, exists, self._map.dirfoldmap
804 path, normed, ignoremissing, exists, self._map.dirfoldmap
820 )
805 )
821 return folded
806 return folded
822
807
823 def normalize(self, path, isknown=False, ignoremissing=False):
808 def normalize(self, path, isknown=False, ignoremissing=False):
824 """
809 """
825 normalize the case of a pathname when on a casefolding filesystem
810 normalize the case of a pathname when on a casefolding filesystem
826
811
827 isknown specifies whether the filename came from walking the
812 isknown specifies whether the filename came from walking the
828 disk, to avoid extra filesystem access.
813 disk, to avoid extra filesystem access.
829
814
830 If ignoremissing is True, missing path are returned
815 If ignoremissing is True, missing path are returned
831 unchanged. Otherwise, we try harder to normalize possibly
816 unchanged. Otherwise, we try harder to normalize possibly
832 existing path components.
817 existing path components.
833
818
834 The normalized case is determined based on the following precedence:
819 The normalized case is determined based on the following precedence:
835
820
836 - version of name already stored in the dirstate
821 - version of name already stored in the dirstate
837 - version of name stored on disk
822 - version of name stored on disk
838 - version provided via command arguments
823 - version provided via command arguments
839 """
824 """
840
825
841 if self._checkcase:
826 if self._checkcase:
842 return self._normalize(path, isknown, ignoremissing)
827 return self._normalize(path, isknown, ignoremissing)
843 return path
828 return path
844
829
845 def clear(self):
830 def clear(self):
846 self._map.clear()
831 self._map.clear()
847 self._lastnormaltime = 0
832 self._lastnormaltime = 0
848 self._updatedfiles.clear()
833 self._updatedfiles.clear()
849 self._dirty = True
834 self._dirty = True
850
835
851 def rebuild(self, parent, allfiles, changedfiles=None):
836 def rebuild(self, parent, allfiles, changedfiles=None):
852 if changedfiles is None:
837 if changedfiles is None:
853 # Rebuild entire dirstate
838 # Rebuild entire dirstate
854 to_lookup = allfiles
839 to_lookup = allfiles
855 to_drop = []
840 to_drop = []
856 lastnormaltime = self._lastnormaltime
841 lastnormaltime = self._lastnormaltime
857 self.clear()
842 self.clear()
858 self._lastnormaltime = lastnormaltime
843 self._lastnormaltime = lastnormaltime
859 elif len(changedfiles) < 10:
844 elif len(changedfiles) < 10:
860 # Avoid turning allfiles into a set, which can be expensive if it's
845 # Avoid turning allfiles into a set, which can be expensive if it's
861 # large.
846 # large.
862 to_lookup = []
847 to_lookup = []
863 to_drop = []
848 to_drop = []
864 for f in changedfiles:
849 for f in changedfiles:
865 if f in allfiles:
850 if f in allfiles:
866 to_lookup.append(f)
851 to_lookup.append(f)
867 else:
852 else:
868 to_drop.append(f)
853 to_drop.append(f)
869 else:
854 else:
870 changedfilesset = set(changedfiles)
855 changedfilesset = set(changedfiles)
871 to_lookup = changedfilesset & set(allfiles)
856 to_lookup = changedfilesset & set(allfiles)
872 to_drop = changedfilesset - to_lookup
857 to_drop = changedfilesset - to_lookup
873
858
874 if self._origpl is None:
859 if self._origpl is None:
875 self._origpl = self._pl
860 self._origpl = self._pl
876 self._map.setparents(parent, self._nodeconstants.nullid)
861 self._map.setparents(parent, self._nodeconstants.nullid)
877
862
878 for f in to_lookup:
863 for f in to_lookup:
879 self._normallookup(f)
864 self._normallookup(f)
880 for f in to_drop:
865 for f in to_drop:
881 self._drop(f)
866 self._drop(f)
882
867
883 self._dirty = True
868 self._dirty = True
884
869
885 def identity(self):
870 def identity(self):
886 """Return identity of dirstate itself to detect changing in storage
871 """Return identity of dirstate itself to detect changing in storage
887
872
888 If identity of previous dirstate is equal to this, writing
873 If identity of previous dirstate is equal to this, writing
889 changes based on the former dirstate out can keep consistency.
874 changes based on the former dirstate out can keep consistency.
890 """
875 """
891 return self._map.identity
876 return self._map.identity
892
877
893 def write(self, tr):
878 def write(self, tr):
894 if not self._dirty:
879 if not self._dirty:
895 return
880 return
896
881
897 filename = self._filename
882 filename = self._filename
898 if tr:
883 if tr:
899 # 'dirstate.write()' is not only for writing in-memory
884 # 'dirstate.write()' is not only for writing in-memory
900 # changes out, but also for dropping ambiguous timestamp.
885 # changes out, but also for dropping ambiguous timestamp.
901 # delayed writing re-raise "ambiguous timestamp issue".
886 # delayed writing re-raise "ambiguous timestamp issue".
902 # See also the wiki page below for detail:
887 # See also the wiki page below for detail:
903 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
888 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
904
889
905 # emulate dropping timestamp in 'parsers.pack_dirstate'
890 # emulate dropping timestamp in 'parsers.pack_dirstate'
906 now = _getfsnow(self._opener)
891 now = _getfsnow(self._opener)
907 self._map.clearambiguoustimes(self._updatedfiles, now)
892 self._map.clearambiguoustimes(self._updatedfiles, now)
908
893
909 # emulate that all 'dirstate.normal' results are written out
894 # emulate that all 'dirstate.normal' results are written out
910 self._lastnormaltime = 0
895 self._lastnormaltime = 0
911 self._updatedfiles.clear()
896 self._updatedfiles.clear()
912
897
913 # delay writing in-memory changes out
898 # delay writing in-memory changes out
914 tr.addfilegenerator(
899 tr.addfilegenerator(
915 b'dirstate',
900 b'dirstate',
916 (self._filename,),
901 (self._filename,),
917 lambda f: self._writedirstate(tr, f),
902 lambda f: self._writedirstate(tr, f),
918 location=b'plain',
903 location=b'plain',
919 )
904 )
920 return
905 return
921
906
922 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
907 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
923 self._writedirstate(tr, st)
908 self._writedirstate(tr, st)
924
909
925 def addparentchangecallback(self, category, callback):
910 def addparentchangecallback(self, category, callback):
926 """add a callback to be called when the wd parents are changed
911 """add a callback to be called when the wd parents are changed
927
912
928 Callback will be called with the following arguments:
913 Callback will be called with the following arguments:
929 dirstate, (oldp1, oldp2), (newp1, newp2)
914 dirstate, (oldp1, oldp2), (newp1, newp2)
930
915
931 Category is a unique identifier to allow overwriting an old callback
916 Category is a unique identifier to allow overwriting an old callback
932 with a newer callback.
917 with a newer callback.
933 """
918 """
934 self._plchangecallbacks[category] = callback
919 self._plchangecallbacks[category] = callback
935
920
936 def _writedirstate(self, tr, st):
921 def _writedirstate(self, tr, st):
937 # notify callbacks about parents change
922 # notify callbacks about parents change
938 if self._origpl is not None and self._origpl != self._pl:
923 if self._origpl is not None and self._origpl != self._pl:
939 for c, callback in sorted(
924 for c, callback in sorted(
940 pycompat.iteritems(self._plchangecallbacks)
925 pycompat.iteritems(self._plchangecallbacks)
941 ):
926 ):
942 callback(self, self._origpl, self._pl)
927 callback(self, self._origpl, self._pl)
943 self._origpl = None
928 self._origpl = None
944 # use the modification time of the newly created temporary file as the
929 # use the modification time of the newly created temporary file as the
945 # filesystem's notion of 'now'
930 # filesystem's notion of 'now'
946 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
931 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
947
932
948 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
933 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
949 # timestamp of each entries in dirstate, because of 'now > mtime'
934 # timestamp of each entries in dirstate, because of 'now > mtime'
950 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
935 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
951 if delaywrite > 0:
936 if delaywrite > 0:
952 # do we have any files to delay for?
937 # do we have any files to delay for?
953 for f, e in pycompat.iteritems(self._map):
938 for f, e in pycompat.iteritems(self._map):
954 if e.need_delay(now):
939 if e.need_delay(now):
955 import time # to avoid useless import
940 import time # to avoid useless import
956
941
957 # rather than sleep n seconds, sleep until the next
942 # rather than sleep n seconds, sleep until the next
958 # multiple of n seconds
943 # multiple of n seconds
959 clock = time.time()
944 clock = time.time()
960 start = int(clock) - (int(clock) % delaywrite)
945 start = int(clock) - (int(clock) % delaywrite)
961 end = start + delaywrite
946 end = start + delaywrite
962 time.sleep(end - clock)
947 time.sleep(end - clock)
963 now = end # trust our estimate that the end is near now
948 now = end # trust our estimate that the end is near now
964 break
949 break
965
950
966 self._map.write(tr, st, now)
951 self._map.write(tr, st, now)
967 self._lastnormaltime = 0
952 self._lastnormaltime = 0
968 self._dirty = False
953 self._dirty = False
969
954
970 def _dirignore(self, f):
955 def _dirignore(self, f):
971 if self._ignore(f):
956 if self._ignore(f):
972 return True
957 return True
973 for p in pathutil.finddirs(f):
958 for p in pathutil.finddirs(f):
974 if self._ignore(p):
959 if self._ignore(p):
975 return True
960 return True
976 return False
961 return False
977
962
978 def _ignorefiles(self):
963 def _ignorefiles(self):
979 files = []
964 files = []
980 if os.path.exists(self._join(b'.hgignore')):
965 if os.path.exists(self._join(b'.hgignore')):
981 files.append(self._join(b'.hgignore'))
966 files.append(self._join(b'.hgignore'))
982 for name, path in self._ui.configitems(b"ui"):
967 for name, path in self._ui.configitems(b"ui"):
983 if name == b'ignore' or name.startswith(b'ignore.'):
968 if name == b'ignore' or name.startswith(b'ignore.'):
984 # we need to use os.path.join here rather than self._join
969 # we need to use os.path.join here rather than self._join
985 # because path is arbitrary and user-specified
970 # because path is arbitrary and user-specified
986 files.append(os.path.join(self._rootdir, util.expandpath(path)))
971 files.append(os.path.join(self._rootdir, util.expandpath(path)))
987 return files
972 return files
988
973
989 def _ignorefileandline(self, f):
974 def _ignorefileandline(self, f):
990 files = collections.deque(self._ignorefiles())
975 files = collections.deque(self._ignorefiles())
991 visited = set()
976 visited = set()
992 while files:
977 while files:
993 i = files.popleft()
978 i = files.popleft()
994 patterns = matchmod.readpatternfile(
979 patterns = matchmod.readpatternfile(
995 i, self._ui.warn, sourceinfo=True
980 i, self._ui.warn, sourceinfo=True
996 )
981 )
997 for pattern, lineno, line in patterns:
982 for pattern, lineno, line in patterns:
998 kind, p = matchmod._patsplit(pattern, b'glob')
983 kind, p = matchmod._patsplit(pattern, b'glob')
999 if kind == b"subinclude":
984 if kind == b"subinclude":
1000 if p not in visited:
985 if p not in visited:
1001 files.append(p)
986 files.append(p)
1002 continue
987 continue
1003 m = matchmod.match(
988 m = matchmod.match(
1004 self._root, b'', [], [pattern], warn=self._ui.warn
989 self._root, b'', [], [pattern], warn=self._ui.warn
1005 )
990 )
1006 if m(f):
991 if m(f):
1007 return (i, lineno, line)
992 return (i, lineno, line)
1008 visited.add(i)
993 visited.add(i)
1009 return (None, -1, b"")
994 return (None, -1, b"")
1010
995
1011 def _walkexplicit(self, match, subrepos):
996 def _walkexplicit(self, match, subrepos):
1012 """Get stat data about the files explicitly specified by match.
997 """Get stat data about the files explicitly specified by match.
1013
998
1014 Return a triple (results, dirsfound, dirsnotfound).
999 Return a triple (results, dirsfound, dirsnotfound).
1015 - results is a mapping from filename to stat result. It also contains
1000 - results is a mapping from filename to stat result. It also contains
1016 listings mapping subrepos and .hg to None.
1001 listings mapping subrepos and .hg to None.
1017 - dirsfound is a list of files found to be directories.
1002 - dirsfound is a list of files found to be directories.
1018 - dirsnotfound is a list of files that the dirstate thinks are
1003 - dirsnotfound is a list of files that the dirstate thinks are
1019 directories and that were not found."""
1004 directories and that were not found."""
1020
1005
1021 def badtype(mode):
1006 def badtype(mode):
1022 kind = _(b'unknown')
1007 kind = _(b'unknown')
1023 if stat.S_ISCHR(mode):
1008 if stat.S_ISCHR(mode):
1024 kind = _(b'character device')
1009 kind = _(b'character device')
1025 elif stat.S_ISBLK(mode):
1010 elif stat.S_ISBLK(mode):
1026 kind = _(b'block device')
1011 kind = _(b'block device')
1027 elif stat.S_ISFIFO(mode):
1012 elif stat.S_ISFIFO(mode):
1028 kind = _(b'fifo')
1013 kind = _(b'fifo')
1029 elif stat.S_ISSOCK(mode):
1014 elif stat.S_ISSOCK(mode):
1030 kind = _(b'socket')
1015 kind = _(b'socket')
1031 elif stat.S_ISDIR(mode):
1016 elif stat.S_ISDIR(mode):
1032 kind = _(b'directory')
1017 kind = _(b'directory')
1033 return _(b'unsupported file type (type is %s)') % kind
1018 return _(b'unsupported file type (type is %s)') % kind
1034
1019
1035 badfn = match.bad
1020 badfn = match.bad
1036 dmap = self._map
1021 dmap = self._map
1037 lstat = os.lstat
1022 lstat = os.lstat
1038 getkind = stat.S_IFMT
1023 getkind = stat.S_IFMT
1039 dirkind = stat.S_IFDIR
1024 dirkind = stat.S_IFDIR
1040 regkind = stat.S_IFREG
1025 regkind = stat.S_IFREG
1041 lnkkind = stat.S_IFLNK
1026 lnkkind = stat.S_IFLNK
1042 join = self._join
1027 join = self._join
1043 dirsfound = []
1028 dirsfound = []
1044 foundadd = dirsfound.append
1029 foundadd = dirsfound.append
1045 dirsnotfound = []
1030 dirsnotfound = []
1046 notfoundadd = dirsnotfound.append
1031 notfoundadd = dirsnotfound.append
1047
1032
1048 if not match.isexact() and self._checkcase:
1033 if not match.isexact() and self._checkcase:
1049 normalize = self._normalize
1034 normalize = self._normalize
1050 else:
1035 else:
1051 normalize = None
1036 normalize = None
1052
1037
1053 files = sorted(match.files())
1038 files = sorted(match.files())
1054 subrepos.sort()
1039 subrepos.sort()
1055 i, j = 0, 0
1040 i, j = 0, 0
1056 while i < len(files) and j < len(subrepos):
1041 while i < len(files) and j < len(subrepos):
1057 subpath = subrepos[j] + b"/"
1042 subpath = subrepos[j] + b"/"
1058 if files[i] < subpath:
1043 if files[i] < subpath:
1059 i += 1
1044 i += 1
1060 continue
1045 continue
1061 while i < len(files) and files[i].startswith(subpath):
1046 while i < len(files) and files[i].startswith(subpath):
1062 del files[i]
1047 del files[i]
1063 j += 1
1048 j += 1
1064
1049
1065 if not files or b'' in files:
1050 if not files or b'' in files:
1066 files = [b'']
1051 files = [b'']
1067 # constructing the foldmap is expensive, so don't do it for the
1052 # constructing the foldmap is expensive, so don't do it for the
1068 # common case where files is ['']
1053 # common case where files is ['']
1069 normalize = None
1054 normalize = None
1070 results = dict.fromkeys(subrepos)
1055 results = dict.fromkeys(subrepos)
1071 results[b'.hg'] = None
1056 results[b'.hg'] = None
1072
1057
1073 for ff in files:
1058 for ff in files:
1074 if normalize:
1059 if normalize:
1075 nf = normalize(ff, False, True)
1060 nf = normalize(ff, False, True)
1076 else:
1061 else:
1077 nf = ff
1062 nf = ff
1078 if nf in results:
1063 if nf in results:
1079 continue
1064 continue
1080
1065
1081 try:
1066 try:
1082 st = lstat(join(nf))
1067 st = lstat(join(nf))
1083 kind = getkind(st.st_mode)
1068 kind = getkind(st.st_mode)
1084 if kind == dirkind:
1069 if kind == dirkind:
1085 if nf in dmap:
1070 if nf in dmap:
1086 # file replaced by dir on disk but still in dirstate
1071 # file replaced by dir on disk but still in dirstate
1087 results[nf] = None
1072 results[nf] = None
1088 foundadd((nf, ff))
1073 foundadd((nf, ff))
1089 elif kind == regkind or kind == lnkkind:
1074 elif kind == regkind or kind == lnkkind:
1090 results[nf] = st
1075 results[nf] = st
1091 else:
1076 else:
1092 badfn(ff, badtype(kind))
1077 badfn(ff, badtype(kind))
1093 if nf in dmap:
1078 if nf in dmap:
1094 results[nf] = None
1079 results[nf] = None
1095 except OSError as inst: # nf not found on disk - it is dirstate only
1080 except OSError as inst: # nf not found on disk - it is dirstate only
1096 if nf in dmap: # does it exactly match a missing file?
1081 if nf in dmap: # does it exactly match a missing file?
1097 results[nf] = None
1082 results[nf] = None
1098 else: # does it match a missing directory?
1083 else: # does it match a missing directory?
1099 if self._map.hasdir(nf):
1084 if self._map.hasdir(nf):
1100 notfoundadd(nf)
1085 notfoundadd(nf)
1101 else:
1086 else:
1102 badfn(ff, encoding.strtolocal(inst.strerror))
1087 badfn(ff, encoding.strtolocal(inst.strerror))
1103
1088
1104 # match.files() may contain explicitly-specified paths that shouldn't
1089 # match.files() may contain explicitly-specified paths that shouldn't
1105 # be taken; drop them from the list of files found. dirsfound/notfound
1090 # be taken; drop them from the list of files found. dirsfound/notfound
1106 # aren't filtered here because they will be tested later.
1091 # aren't filtered here because they will be tested later.
1107 if match.anypats():
1092 if match.anypats():
1108 for f in list(results):
1093 for f in list(results):
1109 if f == b'.hg' or f in subrepos:
1094 if f == b'.hg' or f in subrepos:
1110 # keep sentinel to disable further out-of-repo walks
1095 # keep sentinel to disable further out-of-repo walks
1111 continue
1096 continue
1112 if not match(f):
1097 if not match(f):
1113 del results[f]
1098 del results[f]
1114
1099
1115 # Case insensitive filesystems cannot rely on lstat() failing to detect
1100 # Case insensitive filesystems cannot rely on lstat() failing to detect
1116 # a case-only rename. Prune the stat object for any file that does not
1101 # a case-only rename. Prune the stat object for any file that does not
1117 # match the case in the filesystem, if there are multiple files that
1102 # match the case in the filesystem, if there are multiple files that
1118 # normalize to the same path.
1103 # normalize to the same path.
1119 if match.isexact() and self._checkcase:
1104 if match.isexact() and self._checkcase:
1120 normed = {}
1105 normed = {}
1121
1106
1122 for f, st in pycompat.iteritems(results):
1107 for f, st in pycompat.iteritems(results):
1123 if st is None:
1108 if st is None:
1124 continue
1109 continue
1125
1110
1126 nc = util.normcase(f)
1111 nc = util.normcase(f)
1127 paths = normed.get(nc)
1112 paths = normed.get(nc)
1128
1113
1129 if paths is None:
1114 if paths is None:
1130 paths = set()
1115 paths = set()
1131 normed[nc] = paths
1116 normed[nc] = paths
1132
1117
1133 paths.add(f)
1118 paths.add(f)
1134
1119
1135 for norm, paths in pycompat.iteritems(normed):
1120 for norm, paths in pycompat.iteritems(normed):
1136 if len(paths) > 1:
1121 if len(paths) > 1:
1137 for path in paths:
1122 for path in paths:
1138 folded = self._discoverpath(
1123 folded = self._discoverpath(
1139 path, norm, True, None, self._map.dirfoldmap
1124 path, norm, True, None, self._map.dirfoldmap
1140 )
1125 )
1141 if path != folded:
1126 if path != folded:
1142 results[path] = None
1127 results[path] = None
1143
1128
1144 return results, dirsfound, dirsnotfound
1129 return results, dirsfound, dirsnotfound
1145
1130
1146 def walk(self, match, subrepos, unknown, ignored, full=True):
1131 def walk(self, match, subrepos, unknown, ignored, full=True):
1147 """
1132 """
1148 Walk recursively through the directory tree, finding all files
1133 Walk recursively through the directory tree, finding all files
1149 matched by match.
1134 matched by match.
1150
1135
1151 If full is False, maybe skip some known-clean files.
1136 If full is False, maybe skip some known-clean files.
1152
1137
1153 Return a dict mapping filename to stat-like object (either
1138 Return a dict mapping filename to stat-like object (either
1154 mercurial.osutil.stat instance or return value of os.stat()).
1139 mercurial.osutil.stat instance or return value of os.stat()).
1155
1140
1156 """
1141 """
1157 # full is a flag that extensions that hook into walk can use -- this
1142 # full is a flag that extensions that hook into walk can use -- this
1158 # implementation doesn't use it at all. This satisfies the contract
1143 # implementation doesn't use it at all. This satisfies the contract
1159 # because we only guarantee a "maybe".
1144 # because we only guarantee a "maybe".
1160
1145
1161 if ignored:
1146 if ignored:
1162 ignore = util.never
1147 ignore = util.never
1163 dirignore = util.never
1148 dirignore = util.never
1164 elif unknown:
1149 elif unknown:
1165 ignore = self._ignore
1150 ignore = self._ignore
1166 dirignore = self._dirignore
1151 dirignore = self._dirignore
1167 else:
1152 else:
1168 # if not unknown and not ignored, drop dir recursion and step 2
1153 # if not unknown and not ignored, drop dir recursion and step 2
1169 ignore = util.always
1154 ignore = util.always
1170 dirignore = util.always
1155 dirignore = util.always
1171
1156
1172 matchfn = match.matchfn
1157 matchfn = match.matchfn
1173 matchalways = match.always()
1158 matchalways = match.always()
1174 matchtdir = match.traversedir
1159 matchtdir = match.traversedir
1175 dmap = self._map
1160 dmap = self._map
1176 listdir = util.listdir
1161 listdir = util.listdir
1177 lstat = os.lstat
1162 lstat = os.lstat
1178 dirkind = stat.S_IFDIR
1163 dirkind = stat.S_IFDIR
1179 regkind = stat.S_IFREG
1164 regkind = stat.S_IFREG
1180 lnkkind = stat.S_IFLNK
1165 lnkkind = stat.S_IFLNK
1181 join = self._join
1166 join = self._join
1182
1167
1183 exact = skipstep3 = False
1168 exact = skipstep3 = False
1184 if match.isexact(): # match.exact
1169 if match.isexact(): # match.exact
1185 exact = True
1170 exact = True
1186 dirignore = util.always # skip step 2
1171 dirignore = util.always # skip step 2
1187 elif match.prefix(): # match.match, no patterns
1172 elif match.prefix(): # match.match, no patterns
1188 skipstep3 = True
1173 skipstep3 = True
1189
1174
1190 if not exact and self._checkcase:
1175 if not exact and self._checkcase:
1191 normalize = self._normalize
1176 normalize = self._normalize
1192 normalizefile = self._normalizefile
1177 normalizefile = self._normalizefile
1193 skipstep3 = False
1178 skipstep3 = False
1194 else:
1179 else:
1195 normalize = self._normalize
1180 normalize = self._normalize
1196 normalizefile = None
1181 normalizefile = None
1197
1182
1198 # step 1: find all explicit files
1183 # step 1: find all explicit files
1199 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1184 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1200 if matchtdir:
1185 if matchtdir:
1201 for d in work:
1186 for d in work:
1202 matchtdir(d[0])
1187 matchtdir(d[0])
1203 for d in dirsnotfound:
1188 for d in dirsnotfound:
1204 matchtdir(d)
1189 matchtdir(d)
1205
1190
1206 skipstep3 = skipstep3 and not (work or dirsnotfound)
1191 skipstep3 = skipstep3 and not (work or dirsnotfound)
1207 work = [d for d in work if not dirignore(d[0])]
1192 work = [d for d in work if not dirignore(d[0])]
1208
1193
1209 # step 2: visit subdirectories
1194 # step 2: visit subdirectories
1210 def traverse(work, alreadynormed):
1195 def traverse(work, alreadynormed):
1211 wadd = work.append
1196 wadd = work.append
1212 while work:
1197 while work:
1213 tracing.counter('dirstate.walk work', len(work))
1198 tracing.counter('dirstate.walk work', len(work))
1214 nd = work.pop()
1199 nd = work.pop()
1215 visitentries = match.visitchildrenset(nd)
1200 visitentries = match.visitchildrenset(nd)
1216 if not visitentries:
1201 if not visitentries:
1217 continue
1202 continue
1218 if visitentries == b'this' or visitentries == b'all':
1203 if visitentries == b'this' or visitentries == b'all':
1219 visitentries = None
1204 visitentries = None
1220 skip = None
1205 skip = None
1221 if nd != b'':
1206 if nd != b'':
1222 skip = b'.hg'
1207 skip = b'.hg'
1223 try:
1208 try:
1224 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1209 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1225 entries = listdir(join(nd), stat=True, skip=skip)
1210 entries = listdir(join(nd), stat=True, skip=skip)
1226 except OSError as inst:
1211 except OSError as inst:
1227 if inst.errno in (errno.EACCES, errno.ENOENT):
1212 if inst.errno in (errno.EACCES, errno.ENOENT):
1228 match.bad(
1213 match.bad(
1229 self.pathto(nd), encoding.strtolocal(inst.strerror)
1214 self.pathto(nd), encoding.strtolocal(inst.strerror)
1230 )
1215 )
1231 continue
1216 continue
1232 raise
1217 raise
1233 for f, kind, st in entries:
1218 for f, kind, st in entries:
1234 # Some matchers may return files in the visitentries set,
1219 # Some matchers may return files in the visitentries set,
1235 # instead of 'this', if the matcher explicitly mentions them
1220 # instead of 'this', if the matcher explicitly mentions them
1236 # and is not an exactmatcher. This is acceptable; we do not
1221 # and is not an exactmatcher. This is acceptable; we do not
1237 # make any hard assumptions about file-or-directory below
1222 # make any hard assumptions about file-or-directory below
1238 # based on the presence of `f` in visitentries. If
1223 # based on the presence of `f` in visitentries. If
1239 # visitchildrenset returned a set, we can always skip the
1224 # visitchildrenset returned a set, we can always skip the
1240 # entries *not* in the set it provided regardless of whether
1225 # entries *not* in the set it provided regardless of whether
1241 # they're actually a file or a directory.
1226 # they're actually a file or a directory.
1242 if visitentries and f not in visitentries:
1227 if visitentries and f not in visitentries:
1243 continue
1228 continue
1244 if normalizefile:
1229 if normalizefile:
1245 # even though f might be a directory, we're only
1230 # even though f might be a directory, we're only
1246 # interested in comparing it to files currently in the
1231 # interested in comparing it to files currently in the
1247 # dmap -- therefore normalizefile is enough
1232 # dmap -- therefore normalizefile is enough
1248 nf = normalizefile(
1233 nf = normalizefile(
1249 nd and (nd + b"/" + f) or f, True, True
1234 nd and (nd + b"/" + f) or f, True, True
1250 )
1235 )
1251 else:
1236 else:
1252 nf = nd and (nd + b"/" + f) or f
1237 nf = nd and (nd + b"/" + f) or f
1253 if nf not in results:
1238 if nf not in results:
1254 if kind == dirkind:
1239 if kind == dirkind:
1255 if not ignore(nf):
1240 if not ignore(nf):
1256 if matchtdir:
1241 if matchtdir:
1257 matchtdir(nf)
1242 matchtdir(nf)
1258 wadd(nf)
1243 wadd(nf)
1259 if nf in dmap and (matchalways or matchfn(nf)):
1244 if nf in dmap and (matchalways or matchfn(nf)):
1260 results[nf] = None
1245 results[nf] = None
1261 elif kind == regkind or kind == lnkkind:
1246 elif kind == regkind or kind == lnkkind:
1262 if nf in dmap:
1247 if nf in dmap:
1263 if matchalways or matchfn(nf):
1248 if matchalways or matchfn(nf):
1264 results[nf] = st
1249 results[nf] = st
1265 elif (matchalways or matchfn(nf)) and not ignore(
1250 elif (matchalways or matchfn(nf)) and not ignore(
1266 nf
1251 nf
1267 ):
1252 ):
1268 # unknown file -- normalize if necessary
1253 # unknown file -- normalize if necessary
1269 if not alreadynormed:
1254 if not alreadynormed:
1270 nf = normalize(nf, False, True)
1255 nf = normalize(nf, False, True)
1271 results[nf] = st
1256 results[nf] = st
1272 elif nf in dmap and (matchalways or matchfn(nf)):
1257 elif nf in dmap and (matchalways or matchfn(nf)):
1273 results[nf] = None
1258 results[nf] = None
1274
1259
1275 for nd, d in work:
1260 for nd, d in work:
1276 # alreadynormed means that processwork doesn't have to do any
1261 # alreadynormed means that processwork doesn't have to do any
1277 # expensive directory normalization
1262 # expensive directory normalization
1278 alreadynormed = not normalize or nd == d
1263 alreadynormed = not normalize or nd == d
1279 traverse([d], alreadynormed)
1264 traverse([d], alreadynormed)
1280
1265
1281 for s in subrepos:
1266 for s in subrepos:
1282 del results[s]
1267 del results[s]
1283 del results[b'.hg']
1268 del results[b'.hg']
1284
1269
1285 # step 3: visit remaining files from dmap
1270 # step 3: visit remaining files from dmap
1286 if not skipstep3 and not exact:
1271 if not skipstep3 and not exact:
1287 # If a dmap file is not in results yet, it was either
1272 # If a dmap file is not in results yet, it was either
1288 # a) not matching matchfn b) ignored, c) missing, or d) under a
1273 # a) not matching matchfn b) ignored, c) missing, or d) under a
1289 # symlink directory.
1274 # symlink directory.
1290 if not results and matchalways:
1275 if not results and matchalways:
1291 visit = [f for f in dmap]
1276 visit = [f for f in dmap]
1292 else:
1277 else:
1293 visit = [f for f in dmap if f not in results and matchfn(f)]
1278 visit = [f for f in dmap if f not in results and matchfn(f)]
1294 visit.sort()
1279 visit.sort()
1295
1280
1296 if unknown:
1281 if unknown:
1297 # unknown == True means we walked all dirs under the roots
1282 # unknown == True means we walked all dirs under the roots
1298 # that wasn't ignored, and everything that matched was stat'ed
1283 # that wasn't ignored, and everything that matched was stat'ed
1299 # and is already in results.
1284 # and is already in results.
1300 # The rest must thus be ignored or under a symlink.
1285 # The rest must thus be ignored or under a symlink.
1301 audit_path = pathutil.pathauditor(self._root, cached=True)
1286 audit_path = pathutil.pathauditor(self._root, cached=True)
1302
1287
1303 for nf in iter(visit):
1288 for nf in iter(visit):
1304 # If a stat for the same file was already added with a
1289 # If a stat for the same file was already added with a
1305 # different case, don't add one for this, since that would
1290 # different case, don't add one for this, since that would
1306 # make it appear as if the file exists under both names
1291 # make it appear as if the file exists under both names
1307 # on disk.
1292 # on disk.
1308 if (
1293 if (
1309 normalizefile
1294 normalizefile
1310 and normalizefile(nf, True, True) in results
1295 and normalizefile(nf, True, True) in results
1311 ):
1296 ):
1312 results[nf] = None
1297 results[nf] = None
1313 # Report ignored items in the dmap as long as they are not
1298 # Report ignored items in the dmap as long as they are not
1314 # under a symlink directory.
1299 # under a symlink directory.
1315 elif audit_path.check(nf):
1300 elif audit_path.check(nf):
1316 try:
1301 try:
1317 results[nf] = lstat(join(nf))
1302 results[nf] = lstat(join(nf))
1318 # file was just ignored, no links, and exists
1303 # file was just ignored, no links, and exists
1319 except OSError:
1304 except OSError:
1320 # file doesn't exist
1305 # file doesn't exist
1321 results[nf] = None
1306 results[nf] = None
1322 else:
1307 else:
1323 # It's either missing or under a symlink directory
1308 # It's either missing or under a symlink directory
1324 # which we in this case report as missing
1309 # which we in this case report as missing
1325 results[nf] = None
1310 results[nf] = None
1326 else:
1311 else:
1327 # We may not have walked the full directory tree above,
1312 # We may not have walked the full directory tree above,
1328 # so stat and check everything we missed.
1313 # so stat and check everything we missed.
1329 iv = iter(visit)
1314 iv = iter(visit)
1330 for st in util.statfiles([join(i) for i in visit]):
1315 for st in util.statfiles([join(i) for i in visit]):
1331 results[next(iv)] = st
1316 results[next(iv)] = st
1332 return results
1317 return results
1333
1318
1334 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1319 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1335 # Force Rayon (Rust parallelism library) to respect the number of
1320 # Force Rayon (Rust parallelism library) to respect the number of
1336 # workers. This is a temporary workaround until Rust code knows
1321 # workers. This is a temporary workaround until Rust code knows
1337 # how to read the config file.
1322 # how to read the config file.
1338 numcpus = self._ui.configint(b"worker", b"numcpus")
1323 numcpus = self._ui.configint(b"worker", b"numcpus")
1339 if numcpus is not None:
1324 if numcpus is not None:
1340 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1325 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1341
1326
1342 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1327 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1343 if not workers_enabled:
1328 if not workers_enabled:
1344 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1329 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1345
1330
1346 (
1331 (
1347 lookup,
1332 lookup,
1348 modified,
1333 modified,
1349 added,
1334 added,
1350 removed,
1335 removed,
1351 deleted,
1336 deleted,
1352 clean,
1337 clean,
1353 ignored,
1338 ignored,
1354 unknown,
1339 unknown,
1355 warnings,
1340 warnings,
1356 bad,
1341 bad,
1357 traversed,
1342 traversed,
1358 dirty,
1343 dirty,
1359 ) = rustmod.status(
1344 ) = rustmod.status(
1360 self._map._rustmap,
1345 self._map._rustmap,
1361 matcher,
1346 matcher,
1362 self._rootdir,
1347 self._rootdir,
1363 self._ignorefiles(),
1348 self._ignorefiles(),
1364 self._checkexec,
1349 self._checkexec,
1365 self._lastnormaltime,
1350 self._lastnormaltime,
1366 bool(list_clean),
1351 bool(list_clean),
1367 bool(list_ignored),
1352 bool(list_ignored),
1368 bool(list_unknown),
1353 bool(list_unknown),
1369 bool(matcher.traversedir),
1354 bool(matcher.traversedir),
1370 )
1355 )
1371
1356
1372 self._dirty |= dirty
1357 self._dirty |= dirty
1373
1358
1374 if matcher.traversedir:
1359 if matcher.traversedir:
1375 for dir in traversed:
1360 for dir in traversed:
1376 matcher.traversedir(dir)
1361 matcher.traversedir(dir)
1377
1362
1378 if self._ui.warn:
1363 if self._ui.warn:
1379 for item in warnings:
1364 for item in warnings:
1380 if isinstance(item, tuple):
1365 if isinstance(item, tuple):
1381 file_path, syntax = item
1366 file_path, syntax = item
1382 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1367 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1383 file_path,
1368 file_path,
1384 syntax,
1369 syntax,
1385 )
1370 )
1386 self._ui.warn(msg)
1371 self._ui.warn(msg)
1387 else:
1372 else:
1388 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1373 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1389 self._ui.warn(
1374 self._ui.warn(
1390 msg
1375 msg
1391 % (
1376 % (
1392 pathutil.canonpath(
1377 pathutil.canonpath(
1393 self._rootdir, self._rootdir, item
1378 self._rootdir, self._rootdir, item
1394 ),
1379 ),
1395 b"No such file or directory",
1380 b"No such file or directory",
1396 )
1381 )
1397 )
1382 )
1398
1383
1399 for (fn, message) in bad:
1384 for (fn, message) in bad:
1400 matcher.bad(fn, encoding.strtolocal(message))
1385 matcher.bad(fn, encoding.strtolocal(message))
1401
1386
1402 status = scmutil.status(
1387 status = scmutil.status(
1403 modified=modified,
1388 modified=modified,
1404 added=added,
1389 added=added,
1405 removed=removed,
1390 removed=removed,
1406 deleted=deleted,
1391 deleted=deleted,
1407 unknown=unknown,
1392 unknown=unknown,
1408 ignored=ignored,
1393 ignored=ignored,
1409 clean=clean,
1394 clean=clean,
1410 )
1395 )
1411 return (lookup, status)
1396 return (lookup, status)
1412
1397
1413 def status(self, match, subrepos, ignored, clean, unknown):
1398 def status(self, match, subrepos, ignored, clean, unknown):
1414 """Determine the status of the working copy relative to the
1399 """Determine the status of the working copy relative to the
1415 dirstate and return a pair of (unsure, status), where status is of type
1400 dirstate and return a pair of (unsure, status), where status is of type
1416 scmutil.status and:
1401 scmutil.status and:
1417
1402
1418 unsure:
1403 unsure:
1419 files that might have been modified since the dirstate was
1404 files that might have been modified since the dirstate was
1420 written, but need to be read to be sure (size is the same
1405 written, but need to be read to be sure (size is the same
1421 but mtime differs)
1406 but mtime differs)
1422 status.modified:
1407 status.modified:
1423 files that have definitely been modified since the dirstate
1408 files that have definitely been modified since the dirstate
1424 was written (different size or mode)
1409 was written (different size or mode)
1425 status.clean:
1410 status.clean:
1426 files that have definitely not been modified since the
1411 files that have definitely not been modified since the
1427 dirstate was written
1412 dirstate was written
1428 """
1413 """
1429 listignored, listclean, listunknown = ignored, clean, unknown
1414 listignored, listclean, listunknown = ignored, clean, unknown
1430 lookup, modified, added, unknown, ignored = [], [], [], [], []
1415 lookup, modified, added, unknown, ignored = [], [], [], [], []
1431 removed, deleted, clean = [], [], []
1416 removed, deleted, clean = [], [], []
1432
1417
1433 dmap = self._map
1418 dmap = self._map
1434 dmap.preload()
1419 dmap.preload()
1435
1420
1436 use_rust = True
1421 use_rust = True
1437
1422
1438 allowed_matchers = (
1423 allowed_matchers = (
1439 matchmod.alwaysmatcher,
1424 matchmod.alwaysmatcher,
1440 matchmod.exactmatcher,
1425 matchmod.exactmatcher,
1441 matchmod.includematcher,
1426 matchmod.includematcher,
1442 )
1427 )
1443
1428
1444 if rustmod is None:
1429 if rustmod is None:
1445 use_rust = False
1430 use_rust = False
1446 elif self._checkcase:
1431 elif self._checkcase:
1447 # Case-insensitive filesystems are not handled yet
1432 # Case-insensitive filesystems are not handled yet
1448 use_rust = False
1433 use_rust = False
1449 elif subrepos:
1434 elif subrepos:
1450 use_rust = False
1435 use_rust = False
1451 elif sparse.enabled:
1436 elif sparse.enabled:
1452 use_rust = False
1437 use_rust = False
1453 elif not isinstance(match, allowed_matchers):
1438 elif not isinstance(match, allowed_matchers):
1454 # Some matchers have yet to be implemented
1439 # Some matchers have yet to be implemented
1455 use_rust = False
1440 use_rust = False
1456
1441
1457 if use_rust:
1442 if use_rust:
1458 try:
1443 try:
1459 return self._rust_status(
1444 return self._rust_status(
1460 match, listclean, listignored, listunknown
1445 match, listclean, listignored, listunknown
1461 )
1446 )
1462 except rustmod.FallbackError:
1447 except rustmod.FallbackError:
1463 pass
1448 pass
1464
1449
1465 def noop(f):
1450 def noop(f):
1466 pass
1451 pass
1467
1452
1468 dcontains = dmap.__contains__
1453 dcontains = dmap.__contains__
1469 dget = dmap.__getitem__
1454 dget = dmap.__getitem__
1470 ladd = lookup.append # aka "unsure"
1455 ladd = lookup.append # aka "unsure"
1471 madd = modified.append
1456 madd = modified.append
1472 aadd = added.append
1457 aadd = added.append
1473 uadd = unknown.append if listunknown else noop
1458 uadd = unknown.append if listunknown else noop
1474 iadd = ignored.append if listignored else noop
1459 iadd = ignored.append if listignored else noop
1475 radd = removed.append
1460 radd = removed.append
1476 dadd = deleted.append
1461 dadd = deleted.append
1477 cadd = clean.append if listclean else noop
1462 cadd = clean.append if listclean else noop
1478 mexact = match.exact
1463 mexact = match.exact
1479 dirignore = self._dirignore
1464 dirignore = self._dirignore
1480 checkexec = self._checkexec
1465 checkexec = self._checkexec
1481 copymap = self._map.copymap
1466 copymap = self._map.copymap
1482 lastnormaltime = self._lastnormaltime
1467 lastnormaltime = self._lastnormaltime
1483
1468
1484 # We need to do full walks when either
1469 # We need to do full walks when either
1485 # - we're listing all clean files, or
1470 # - we're listing all clean files, or
1486 # - match.traversedir does something, because match.traversedir should
1471 # - match.traversedir does something, because match.traversedir should
1487 # be called for every dir in the working dir
1472 # be called for every dir in the working dir
1488 full = listclean or match.traversedir is not None
1473 full = listclean or match.traversedir is not None
1489 for fn, st in pycompat.iteritems(
1474 for fn, st in pycompat.iteritems(
1490 self.walk(match, subrepos, listunknown, listignored, full=full)
1475 self.walk(match, subrepos, listunknown, listignored, full=full)
1491 ):
1476 ):
1492 if not dcontains(fn):
1477 if not dcontains(fn):
1493 if (listignored or mexact(fn)) and dirignore(fn):
1478 if (listignored or mexact(fn)) and dirignore(fn):
1494 if listignored:
1479 if listignored:
1495 iadd(fn)
1480 iadd(fn)
1496 else:
1481 else:
1497 uadd(fn)
1482 uadd(fn)
1498 continue
1483 continue
1499
1484
1500 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1485 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1501 # written like that for performance reasons. dmap[fn] is not a
1486 # written like that for performance reasons. dmap[fn] is not a
1502 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1487 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1503 # opcode has fast paths when the value to be unpacked is a tuple or
1488 # opcode has fast paths when the value to be unpacked is a tuple or
1504 # a list, but falls back to creating a full-fledged iterator in
1489 # a list, but falls back to creating a full-fledged iterator in
1505 # general. That is much slower than simply accessing and storing the
1490 # general. That is much slower than simply accessing and storing the
1506 # tuple members one by one.
1491 # tuple members one by one.
1507 t = dget(fn)
1492 t = dget(fn)
1508 mode = t.mode
1493 mode = t.mode
1509 size = t.size
1494 size = t.size
1510 time = t.mtime
1495 time = t.mtime
1511
1496
1512 if not st and t.tracked:
1497 if not st and t.tracked:
1513 dadd(fn)
1498 dadd(fn)
1514 elif t.merged:
1499 elif t.merged:
1515 madd(fn)
1500 madd(fn)
1516 elif t.added:
1501 elif t.added:
1517 aadd(fn)
1502 aadd(fn)
1518 elif t.removed:
1503 elif t.removed:
1519 radd(fn)
1504 radd(fn)
1520 elif t.tracked:
1505 elif t.tracked:
1521 if (
1506 if (
1522 size >= 0
1507 size >= 0
1523 and (
1508 and (
1524 (size != st.st_size and size != st.st_size & _rangemask)
1509 (size != st.st_size and size != st.st_size & _rangemask)
1525 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1510 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1526 )
1511 )
1527 or t.from_p2
1512 or t.from_p2
1528 or fn in copymap
1513 or fn in copymap
1529 ):
1514 ):
1530 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1515 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1531 # issue6456: Size returned may be longer due to
1516 # issue6456: Size returned may be longer due to
1532 # encryption on EXT-4 fscrypt, undecided.
1517 # encryption on EXT-4 fscrypt, undecided.
1533 ladd(fn)
1518 ladd(fn)
1534 else:
1519 else:
1535 madd(fn)
1520 madd(fn)
1536 elif (
1521 elif (
1537 time != st[stat.ST_MTIME]
1522 time != st[stat.ST_MTIME]
1538 and time != st[stat.ST_MTIME] & _rangemask
1523 and time != st[stat.ST_MTIME] & _rangemask
1539 ):
1524 ):
1540 ladd(fn)
1525 ladd(fn)
1541 elif st[stat.ST_MTIME] == lastnormaltime:
1526 elif st[stat.ST_MTIME] == lastnormaltime:
1542 # fn may have just been marked as normal and it may have
1527 # fn may have just been marked as normal and it may have
1543 # changed in the same second without changing its size.
1528 # changed in the same second without changing its size.
1544 # This can happen if we quickly do multiple commits.
1529 # This can happen if we quickly do multiple commits.
1545 # Force lookup, so we don't miss such a racy file change.
1530 # Force lookup, so we don't miss such a racy file change.
1546 ladd(fn)
1531 ladd(fn)
1547 elif listclean:
1532 elif listclean:
1548 cadd(fn)
1533 cadd(fn)
1549 status = scmutil.status(
1534 status = scmutil.status(
1550 modified, added, removed, deleted, unknown, ignored, clean
1535 modified, added, removed, deleted, unknown, ignored, clean
1551 )
1536 )
1552 return (lookup, status)
1537 return (lookup, status)
1553
1538
1554 def matches(self, match):
1539 def matches(self, match):
1555 """
1540 """
1556 return files in the dirstate (in whatever state) filtered by match
1541 return files in the dirstate (in whatever state) filtered by match
1557 """
1542 """
1558 dmap = self._map
1543 dmap = self._map
1559 if rustmod is not None:
1544 if rustmod is not None:
1560 dmap = self._map._rustmap
1545 dmap = self._map._rustmap
1561
1546
1562 if match.always():
1547 if match.always():
1563 return dmap.keys()
1548 return dmap.keys()
1564 files = match.files()
1549 files = match.files()
1565 if match.isexact():
1550 if match.isexact():
1566 # fast path -- filter the other way around, since typically files is
1551 # fast path -- filter the other way around, since typically files is
1567 # much smaller than dmap
1552 # much smaller than dmap
1568 return [f for f in files if f in dmap]
1553 return [f for f in files if f in dmap]
1569 if match.prefix() and all(fn in dmap for fn in files):
1554 if match.prefix() and all(fn in dmap for fn in files):
1570 # fast path -- all the values are known to be files, so just return
1555 # fast path -- all the values are known to be files, so just return
1571 # that
1556 # that
1572 return list(files)
1557 return list(files)
1573 return [f for f in dmap if match(f)]
1558 return [f for f in dmap if match(f)]
1574
1559
1575 def _actualfilename(self, tr):
1560 def _actualfilename(self, tr):
1576 if tr:
1561 if tr:
1577 return self._pendingfilename
1562 return self._pendingfilename
1578 else:
1563 else:
1579 return self._filename
1564 return self._filename
1580
1565
1581 def savebackup(self, tr, backupname):
1566 def savebackup(self, tr, backupname):
1582 '''Save current dirstate into backup file'''
1567 '''Save current dirstate into backup file'''
1583 filename = self._actualfilename(tr)
1568 filename = self._actualfilename(tr)
1584 assert backupname != filename
1569 assert backupname != filename
1585
1570
1586 # use '_writedirstate' instead of 'write' to write changes certainly,
1571 # use '_writedirstate' instead of 'write' to write changes certainly,
1587 # because the latter omits writing out if transaction is running.
1572 # because the latter omits writing out if transaction is running.
1588 # output file will be used to create backup of dirstate at this point.
1573 # output file will be used to create backup of dirstate at this point.
1589 if self._dirty or not self._opener.exists(filename):
1574 if self._dirty or not self._opener.exists(filename):
1590 self._writedirstate(
1575 self._writedirstate(
1591 tr,
1576 tr,
1592 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1577 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1593 )
1578 )
1594
1579
1595 if tr:
1580 if tr:
1596 # ensure that subsequent tr.writepending returns True for
1581 # ensure that subsequent tr.writepending returns True for
1597 # changes written out above, even if dirstate is never
1582 # changes written out above, even if dirstate is never
1598 # changed after this
1583 # changed after this
1599 tr.addfilegenerator(
1584 tr.addfilegenerator(
1600 b'dirstate',
1585 b'dirstate',
1601 (self._filename,),
1586 (self._filename,),
1602 lambda f: self._writedirstate(tr, f),
1587 lambda f: self._writedirstate(tr, f),
1603 location=b'plain',
1588 location=b'plain',
1604 )
1589 )
1605
1590
1606 # ensure that pending file written above is unlinked at
1591 # ensure that pending file written above is unlinked at
1607 # failure, even if tr.writepending isn't invoked until the
1592 # failure, even if tr.writepending isn't invoked until the
1608 # end of this transaction
1593 # end of this transaction
1609 tr.registertmp(filename, location=b'plain')
1594 tr.registertmp(filename, location=b'plain')
1610
1595
1611 self._opener.tryunlink(backupname)
1596 self._opener.tryunlink(backupname)
1612 # hardlink backup is okay because _writedirstate is always called
1597 # hardlink backup is okay because _writedirstate is always called
1613 # with an "atomictemp=True" file.
1598 # with an "atomictemp=True" file.
1614 util.copyfile(
1599 util.copyfile(
1615 self._opener.join(filename),
1600 self._opener.join(filename),
1616 self._opener.join(backupname),
1601 self._opener.join(backupname),
1617 hardlink=True,
1602 hardlink=True,
1618 )
1603 )
1619
1604
1620 def restorebackup(self, tr, backupname):
1605 def restorebackup(self, tr, backupname):
1621 '''Restore dirstate by backup file'''
1606 '''Restore dirstate by backup file'''
1622 # this "invalidate()" prevents "wlock.release()" from writing
1607 # this "invalidate()" prevents "wlock.release()" from writing
1623 # changes of dirstate out after restoring from backup file
1608 # changes of dirstate out after restoring from backup file
1624 self.invalidate()
1609 self.invalidate()
1625 filename = self._actualfilename(tr)
1610 filename = self._actualfilename(tr)
1626 o = self._opener
1611 o = self._opener
1627 if util.samefile(o.join(backupname), o.join(filename)):
1612 if util.samefile(o.join(backupname), o.join(filename)):
1628 o.unlink(backupname)
1613 o.unlink(backupname)
1629 else:
1614 else:
1630 o.rename(backupname, filename, checkambig=True)
1615 o.rename(backupname, filename, checkambig=True)
1631
1616
1632 def clearbackup(self, tr, backupname):
1617 def clearbackup(self, tr, backupname):
1633 '''Clear backup file'''
1618 '''Clear backup file'''
1634 self._opener.unlink(backupname)
1619 self._opener.unlink(backupname)
General Comments 0
You need to be logged in to leave comments. Login now