##// END OF EJS Templates
dirstate-item: use the `p2_info` property to replace more verbose call...
marmoute -
r48960:e2753a7a default
parent child Browse files
Show More
@@ -1,1513 +1,1513 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = dirstatemap.DirstateItem
48 DirstateItem = dirstatemap.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._mapcls = dirstatemap.dirstatemap
133 self._mapcls = dirstatemap.dirstatemap
134 # Access and cache cwd early, so we don't access it for the first time
134 # Access and cache cwd early, so we don't access it for the first time
135 # after a working-copy update caused it to not exist (accessing it then
135 # after a working-copy update caused it to not exist (accessing it then
136 # raises an exception).
136 # raises an exception).
137 self._cwd
137 self._cwd
138
138
139 def prefetch_parents(self):
139 def prefetch_parents(self):
140 """make sure the parents are loaded
140 """make sure the parents are loaded
141
141
142 Used to avoid a race condition.
142 Used to avoid a race condition.
143 """
143 """
144 self._pl
144 self._pl
145
145
146 @contextlib.contextmanager
146 @contextlib.contextmanager
147 def parentchange(self):
147 def parentchange(self):
148 """Context manager for handling dirstate parents.
148 """Context manager for handling dirstate parents.
149
149
150 If an exception occurs in the scope of the context manager,
150 If an exception occurs in the scope of the context manager,
151 the incoherent dirstate won't be written when wlock is
151 the incoherent dirstate won't be written when wlock is
152 released.
152 released.
153 """
153 """
154 self._parentwriters += 1
154 self._parentwriters += 1
155 yield
155 yield
156 # Typically we want the "undo" step of a context manager in a
156 # Typically we want the "undo" step of a context manager in a
157 # finally block so it happens even when an exception
157 # finally block so it happens even when an exception
158 # occurs. In this case, however, we only want to decrement
158 # occurs. In this case, however, we only want to decrement
159 # parentwriters if the code in the with statement exits
159 # parentwriters if the code in the with statement exits
160 # normally, so we don't have a try/finally here on purpose.
160 # normally, so we don't have a try/finally here on purpose.
161 self._parentwriters -= 1
161 self._parentwriters -= 1
162
162
163 def pendingparentchange(self):
163 def pendingparentchange(self):
164 """Returns true if the dirstate is in the middle of a set of changes
164 """Returns true if the dirstate is in the middle of a set of changes
165 that modify the dirstate parent.
165 that modify the dirstate parent.
166 """
166 """
167 return self._parentwriters > 0
167 return self._parentwriters > 0
168
168
169 @propertycache
169 @propertycache
170 def _map(self):
170 def _map(self):
171 """Return the dirstate contents (see documentation for dirstatemap)."""
171 """Return the dirstate contents (see documentation for dirstatemap)."""
172 self._map = self._mapcls(
172 self._map = self._mapcls(
173 self._ui,
173 self._ui,
174 self._opener,
174 self._opener,
175 self._root,
175 self._root,
176 self._nodeconstants,
176 self._nodeconstants,
177 self._use_dirstate_v2,
177 self._use_dirstate_v2,
178 )
178 )
179 return self._map
179 return self._map
180
180
181 @property
181 @property
182 def _sparsematcher(self):
182 def _sparsematcher(self):
183 """The matcher for the sparse checkout.
183 """The matcher for the sparse checkout.
184
184
185 The working directory may not include every file from a manifest. The
185 The working directory may not include every file from a manifest. The
186 matcher obtained by this property will match a path if it is to be
186 matcher obtained by this property will match a path if it is to be
187 included in the working directory.
187 included in the working directory.
188 """
188 """
189 # TODO there is potential to cache this property. For now, the matcher
189 # TODO there is potential to cache this property. For now, the matcher
190 # is resolved on every access. (But the called function does use a
190 # is resolved on every access. (But the called function does use a
191 # cache to keep the lookup fast.)
191 # cache to keep the lookup fast.)
192 return self._sparsematchfn()
192 return self._sparsematchfn()
193
193
194 @repocache(b'branch')
194 @repocache(b'branch')
195 def _branch(self):
195 def _branch(self):
196 try:
196 try:
197 return self._opener.read(b"branch").strip() or b"default"
197 return self._opener.read(b"branch").strip() or b"default"
198 except IOError as inst:
198 except IOError as inst:
199 if inst.errno != errno.ENOENT:
199 if inst.errno != errno.ENOENT:
200 raise
200 raise
201 return b"default"
201 return b"default"
202
202
203 @property
203 @property
204 def _pl(self):
204 def _pl(self):
205 return self._map.parents()
205 return self._map.parents()
206
206
207 def hasdir(self, d):
207 def hasdir(self, d):
208 return self._map.hastrackeddir(d)
208 return self._map.hastrackeddir(d)
209
209
210 @rootcache(b'.hgignore')
210 @rootcache(b'.hgignore')
211 def _ignore(self):
211 def _ignore(self):
212 files = self._ignorefiles()
212 files = self._ignorefiles()
213 if not files:
213 if not files:
214 return matchmod.never()
214 return matchmod.never()
215
215
216 pats = [b'include:%s' % f for f in files]
216 pats = [b'include:%s' % f for f in files]
217 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
217 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218
218
219 @propertycache
219 @propertycache
220 def _slash(self):
220 def _slash(self):
221 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
221 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222
222
223 @propertycache
223 @propertycache
224 def _checklink(self):
224 def _checklink(self):
225 return util.checklink(self._root)
225 return util.checklink(self._root)
226
226
227 @propertycache
227 @propertycache
228 def _checkexec(self):
228 def _checkexec(self):
229 return bool(util.checkexec(self._root))
229 return bool(util.checkexec(self._root))
230
230
231 @propertycache
231 @propertycache
232 def _checkcase(self):
232 def _checkcase(self):
233 return not util.fscasesensitive(self._join(b'.hg'))
233 return not util.fscasesensitive(self._join(b'.hg'))
234
234
235 def _join(self, f):
235 def _join(self, f):
236 # much faster than os.path.join()
236 # much faster than os.path.join()
237 # it's safe because f is always a relative path
237 # it's safe because f is always a relative path
238 return self._rootdir + f
238 return self._rootdir + f
239
239
240 def flagfunc(self, buildfallback):
240 def flagfunc(self, buildfallback):
241 if self._checklink and self._checkexec:
241 if self._checklink and self._checkexec:
242
242
243 def f(x):
243 def f(x):
244 try:
244 try:
245 st = os.lstat(self._join(x))
245 st = os.lstat(self._join(x))
246 if util.statislink(st):
246 if util.statislink(st):
247 return b'l'
247 return b'l'
248 if util.statisexec(st):
248 if util.statisexec(st):
249 return b'x'
249 return b'x'
250 except OSError:
250 except OSError:
251 pass
251 pass
252 return b''
252 return b''
253
253
254 return f
254 return f
255
255
256 fallback = buildfallback()
256 fallback = buildfallback()
257 if self._checklink:
257 if self._checklink:
258
258
259 def f(x):
259 def f(x):
260 if os.path.islink(self._join(x)):
260 if os.path.islink(self._join(x)):
261 return b'l'
261 return b'l'
262 if b'x' in fallback(x):
262 if b'x' in fallback(x):
263 return b'x'
263 return b'x'
264 return b''
264 return b''
265
265
266 return f
266 return f
267 if self._checkexec:
267 if self._checkexec:
268
268
269 def f(x):
269 def f(x):
270 if b'l' in fallback(x):
270 if b'l' in fallback(x):
271 return b'l'
271 return b'l'
272 if util.isexec(self._join(x)):
272 if util.isexec(self._join(x)):
273 return b'x'
273 return b'x'
274 return b''
274 return b''
275
275
276 return f
276 return f
277 else:
277 else:
278 return fallback
278 return fallback
279
279
280 @propertycache
280 @propertycache
281 def _cwd(self):
281 def _cwd(self):
282 # internal config: ui.forcecwd
282 # internal config: ui.forcecwd
283 forcecwd = self._ui.config(b'ui', b'forcecwd')
283 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 if forcecwd:
284 if forcecwd:
285 return forcecwd
285 return forcecwd
286 return encoding.getcwd()
286 return encoding.getcwd()
287
287
288 def getcwd(self):
288 def getcwd(self):
289 """Return the path from which a canonical path is calculated.
289 """Return the path from which a canonical path is calculated.
290
290
291 This path should be used to resolve file patterns or to convert
291 This path should be used to resolve file patterns or to convert
292 canonical paths back to file paths for display. It shouldn't be
292 canonical paths back to file paths for display. It shouldn't be
293 used to get real file paths. Use vfs functions instead.
293 used to get real file paths. Use vfs functions instead.
294 """
294 """
295 cwd = self._cwd
295 cwd = self._cwd
296 if cwd == self._root:
296 if cwd == self._root:
297 return b''
297 return b''
298 # self._root ends with a path separator if self._root is '/' or 'C:\'
298 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 rootsep = self._root
299 rootsep = self._root
300 if not util.endswithsep(rootsep):
300 if not util.endswithsep(rootsep):
301 rootsep += pycompat.ossep
301 rootsep += pycompat.ossep
302 if cwd.startswith(rootsep):
302 if cwd.startswith(rootsep):
303 return cwd[len(rootsep) :]
303 return cwd[len(rootsep) :]
304 else:
304 else:
305 # we're outside the repo. return an absolute path.
305 # we're outside the repo. return an absolute path.
306 return cwd
306 return cwd
307
307
308 def pathto(self, f, cwd=None):
308 def pathto(self, f, cwd=None):
309 if cwd is None:
309 if cwd is None:
310 cwd = self.getcwd()
310 cwd = self.getcwd()
311 path = util.pathto(self._root, cwd, f)
311 path = util.pathto(self._root, cwd, f)
312 if self._slash:
312 if self._slash:
313 return util.pconvert(path)
313 return util.pconvert(path)
314 return path
314 return path
315
315
316 def __getitem__(self, key):
316 def __getitem__(self, key):
317 """Return the current state of key (a filename) in the dirstate.
317 """Return the current state of key (a filename) in the dirstate.
318
318
319 States are:
319 States are:
320 n normal
320 n normal
321 m needs merging
321 m needs merging
322 r marked for removal
322 r marked for removal
323 a marked for addition
323 a marked for addition
324 ? not tracked
324 ? not tracked
325
325
326 XXX The "state" is a bit obscure to be in the "public" API. we should
326 XXX The "state" is a bit obscure to be in the "public" API. we should
327 consider migrating all user of this to going through the dirstate entry
327 consider migrating all user of this to going through the dirstate entry
328 instead.
328 instead.
329 """
329 """
330 msg = b"don't use dirstate[file], use dirstate.get_entry(file)"
330 msg = b"don't use dirstate[file], use dirstate.get_entry(file)"
331 util.nouideprecwarn(msg, b'6.1', stacklevel=2)
331 util.nouideprecwarn(msg, b'6.1', stacklevel=2)
332 entry = self._map.get(key)
332 entry = self._map.get(key)
333 if entry is not None:
333 if entry is not None:
334 return entry.state
334 return entry.state
335 return b'?'
335 return b'?'
336
336
337 def get_entry(self, path):
337 def get_entry(self, path):
338 """return a DirstateItem for the associated path"""
338 """return a DirstateItem for the associated path"""
339 entry = self._map.get(path)
339 entry = self._map.get(path)
340 if entry is None:
340 if entry is None:
341 return DirstateItem()
341 return DirstateItem()
342 return entry
342 return entry
343
343
344 def __contains__(self, key):
344 def __contains__(self, key):
345 return key in self._map
345 return key in self._map
346
346
347 def __iter__(self):
347 def __iter__(self):
348 return iter(sorted(self._map))
348 return iter(sorted(self._map))
349
349
350 def items(self):
350 def items(self):
351 return pycompat.iteritems(self._map)
351 return pycompat.iteritems(self._map)
352
352
353 iteritems = items
353 iteritems = items
354
354
355 def parents(self):
355 def parents(self):
356 return [self._validate(p) for p in self._pl]
356 return [self._validate(p) for p in self._pl]
357
357
358 def p1(self):
358 def p1(self):
359 return self._validate(self._pl[0])
359 return self._validate(self._pl[0])
360
360
361 def p2(self):
361 def p2(self):
362 return self._validate(self._pl[1])
362 return self._validate(self._pl[1])
363
363
364 @property
364 @property
365 def in_merge(self):
365 def in_merge(self):
366 """True if a merge is in progress"""
366 """True if a merge is in progress"""
367 return self._pl[1] != self._nodeconstants.nullid
367 return self._pl[1] != self._nodeconstants.nullid
368
368
369 def branch(self):
369 def branch(self):
370 return encoding.tolocal(self._branch)
370 return encoding.tolocal(self._branch)
371
371
372 def setparents(self, p1, p2=None):
372 def setparents(self, p1, p2=None):
373 """Set dirstate parents to p1 and p2.
373 """Set dirstate parents to p1 and p2.
374
374
375 When moving from two parents to one, "merged" entries a
375 When moving from two parents to one, "merged" entries a
376 adjusted to normal and previous copy records discarded and
376 adjusted to normal and previous copy records discarded and
377 returned by the call.
377 returned by the call.
378
378
379 See localrepo.setparents()
379 See localrepo.setparents()
380 """
380 """
381 if p2 is None:
381 if p2 is None:
382 p2 = self._nodeconstants.nullid
382 p2 = self._nodeconstants.nullid
383 if self._parentwriters == 0:
383 if self._parentwriters == 0:
384 raise ValueError(
384 raise ValueError(
385 b"cannot set dirstate parent outside of "
385 b"cannot set dirstate parent outside of "
386 b"dirstate.parentchange context manager"
386 b"dirstate.parentchange context manager"
387 )
387 )
388
388
389 self._dirty = True
389 self._dirty = True
390 oldp2 = self._pl[1]
390 oldp2 = self._pl[1]
391 if self._origpl is None:
391 if self._origpl is None:
392 self._origpl = self._pl
392 self._origpl = self._pl
393 nullid = self._nodeconstants.nullid
393 nullid = self._nodeconstants.nullid
394 # True if we need to fold p2 related state back to a linear case
394 # True if we need to fold p2 related state back to a linear case
395 fold_p2 = oldp2 != nullid and p2 == nullid
395 fold_p2 = oldp2 != nullid and p2 == nullid
396 return self._map.setparents(p1, p2, fold_p2=fold_p2)
396 return self._map.setparents(p1, p2, fold_p2=fold_p2)
397
397
398 def setbranch(self, branch):
398 def setbranch(self, branch):
399 self.__class__._branch.set(self, encoding.fromlocal(branch))
399 self.__class__._branch.set(self, encoding.fromlocal(branch))
400 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
400 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
401 try:
401 try:
402 f.write(self._branch + b'\n')
402 f.write(self._branch + b'\n')
403 f.close()
403 f.close()
404
404
405 # make sure filecache has the correct stat info for _branch after
405 # make sure filecache has the correct stat info for _branch after
406 # replacing the underlying file
406 # replacing the underlying file
407 ce = self._filecache[b'_branch']
407 ce = self._filecache[b'_branch']
408 if ce:
408 if ce:
409 ce.refresh()
409 ce.refresh()
410 except: # re-raises
410 except: # re-raises
411 f.discard()
411 f.discard()
412 raise
412 raise
413
413
414 def invalidate(self):
414 def invalidate(self):
415 """Causes the next access to reread the dirstate.
415 """Causes the next access to reread the dirstate.
416
416
417 This is different from localrepo.invalidatedirstate() because it always
417 This is different from localrepo.invalidatedirstate() because it always
418 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
418 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
419 check whether the dirstate has changed before rereading it."""
419 check whether the dirstate has changed before rereading it."""
420
420
421 for a in ("_map", "_branch", "_ignore"):
421 for a in ("_map", "_branch", "_ignore"):
422 if a in self.__dict__:
422 if a in self.__dict__:
423 delattr(self, a)
423 delattr(self, a)
424 self._lastnormaltime = 0
424 self._lastnormaltime = 0
425 self._dirty = False
425 self._dirty = False
426 self._parentwriters = 0
426 self._parentwriters = 0
427 self._origpl = None
427 self._origpl = None
428
428
429 def copy(self, source, dest):
429 def copy(self, source, dest):
430 """Mark dest as a copy of source. Unmark dest if source is None."""
430 """Mark dest as a copy of source. Unmark dest if source is None."""
431 if source == dest:
431 if source == dest:
432 return
432 return
433 self._dirty = True
433 self._dirty = True
434 if source is not None:
434 if source is not None:
435 self._map.copymap[dest] = source
435 self._map.copymap[dest] = source
436 else:
436 else:
437 self._map.copymap.pop(dest, None)
437 self._map.copymap.pop(dest, None)
438
438
439 def copied(self, file):
439 def copied(self, file):
440 return self._map.copymap.get(file, None)
440 return self._map.copymap.get(file, None)
441
441
442 def copies(self):
442 def copies(self):
443 return self._map.copymap
443 return self._map.copymap
444
444
445 @requires_no_parents_change
445 @requires_no_parents_change
446 def set_tracked(self, filename):
446 def set_tracked(self, filename):
447 """a "public" method for generic code to mark a file as tracked
447 """a "public" method for generic code to mark a file as tracked
448
448
449 This function is to be called outside of "update/merge" case. For
449 This function is to be called outside of "update/merge" case. For
450 example by a command like `hg add X`.
450 example by a command like `hg add X`.
451
451
452 return True the file was previously untracked, False otherwise.
452 return True the file was previously untracked, False otherwise.
453 """
453 """
454 self._dirty = True
454 self._dirty = True
455 entry = self._map.get(filename)
455 entry = self._map.get(filename)
456 if entry is None or not entry.tracked:
456 if entry is None or not entry.tracked:
457 self._check_new_tracked_filename(filename)
457 self._check_new_tracked_filename(filename)
458 return self._map.set_tracked(filename)
458 return self._map.set_tracked(filename)
459
459
460 @requires_no_parents_change
460 @requires_no_parents_change
461 def set_untracked(self, filename):
461 def set_untracked(self, filename):
462 """a "public" method for generic code to mark a file as untracked
462 """a "public" method for generic code to mark a file as untracked
463
463
464 This function is to be called outside of "update/merge" case. For
464 This function is to be called outside of "update/merge" case. For
465 example by a command like `hg remove X`.
465 example by a command like `hg remove X`.
466
466
467 return True the file was previously tracked, False otherwise.
467 return True the file was previously tracked, False otherwise.
468 """
468 """
469 ret = self._map.set_untracked(filename)
469 ret = self._map.set_untracked(filename)
470 if ret:
470 if ret:
471 self._dirty = True
471 self._dirty = True
472 return ret
472 return ret
473
473
474 @requires_no_parents_change
474 @requires_no_parents_change
475 def set_clean(self, filename, parentfiledata=None):
475 def set_clean(self, filename, parentfiledata=None):
476 """record that the current state of the file on disk is known to be clean"""
476 """record that the current state of the file on disk is known to be clean"""
477 self._dirty = True
477 self._dirty = True
478 if parentfiledata:
478 if parentfiledata:
479 (mode, size, mtime) = parentfiledata
479 (mode, size, mtime) = parentfiledata
480 else:
480 else:
481 (mode, size, mtime) = self._get_filedata(filename)
481 (mode, size, mtime) = self._get_filedata(filename)
482 if not self._map[filename].tracked:
482 if not self._map[filename].tracked:
483 self._check_new_tracked_filename(filename)
483 self._check_new_tracked_filename(filename)
484 self._map.set_clean(filename, mode, size, mtime)
484 self._map.set_clean(filename, mode, size, mtime)
485 if mtime > self._lastnormaltime:
485 if mtime > self._lastnormaltime:
486 # Remember the most recent modification timeslot for status(),
486 # Remember the most recent modification timeslot for status(),
487 # to make sure we won't miss future size-preserving file content
487 # to make sure we won't miss future size-preserving file content
488 # modifications that happen within the same timeslot.
488 # modifications that happen within the same timeslot.
489 self._lastnormaltime = mtime
489 self._lastnormaltime = mtime
490
490
491 @requires_no_parents_change
491 @requires_no_parents_change
492 def set_possibly_dirty(self, filename):
492 def set_possibly_dirty(self, filename):
493 """record that the current state of the file on disk is unknown"""
493 """record that the current state of the file on disk is unknown"""
494 self._dirty = True
494 self._dirty = True
495 self._map.set_possibly_dirty(filename)
495 self._map.set_possibly_dirty(filename)
496
496
497 @requires_parents_change
497 @requires_parents_change
498 def update_file_p1(
498 def update_file_p1(
499 self,
499 self,
500 filename,
500 filename,
501 p1_tracked,
501 p1_tracked,
502 ):
502 ):
503 """Set a file as tracked in the parent (or not)
503 """Set a file as tracked in the parent (or not)
504
504
505 This is to be called when adjust the dirstate to a new parent after an history
505 This is to be called when adjust the dirstate to a new parent after an history
506 rewriting operation.
506 rewriting operation.
507
507
508 It should not be called during a merge (p2 != nullid) and only within
508 It should not be called during a merge (p2 != nullid) and only within
509 a `with dirstate.parentchange():` context.
509 a `with dirstate.parentchange():` context.
510 """
510 """
511 if self.in_merge:
511 if self.in_merge:
512 msg = b'update_file_reference should not be called when merging'
512 msg = b'update_file_reference should not be called when merging'
513 raise error.ProgrammingError(msg)
513 raise error.ProgrammingError(msg)
514 entry = self._map.get(filename)
514 entry = self._map.get(filename)
515 if entry is None:
515 if entry is None:
516 wc_tracked = False
516 wc_tracked = False
517 else:
517 else:
518 wc_tracked = entry.tracked
518 wc_tracked = entry.tracked
519 if not (p1_tracked or wc_tracked):
519 if not (p1_tracked or wc_tracked):
520 # the file is no longer relevant to anyone
520 # the file is no longer relevant to anyone
521 if self._map.get(filename) is not None:
521 if self._map.get(filename) is not None:
522 self._map.reset_state(filename)
522 self._map.reset_state(filename)
523 self._dirty = True
523 self._dirty = True
524 elif (not p1_tracked) and wc_tracked:
524 elif (not p1_tracked) and wc_tracked:
525 if entry is not None and entry.added:
525 if entry is not None and entry.added:
526 return # avoid dropping copy information (maybe?)
526 return # avoid dropping copy information (maybe?)
527
527
528 parentfiledata = None
528 parentfiledata = None
529 if wc_tracked and p1_tracked:
529 if wc_tracked and p1_tracked:
530 parentfiledata = self._get_filedata(filename)
530 parentfiledata = self._get_filedata(filename)
531
531
532 self._map.reset_state(
532 self._map.reset_state(
533 filename,
533 filename,
534 wc_tracked,
534 wc_tracked,
535 p1_tracked,
535 p1_tracked,
536 # the underlying reference might have changed, we will have to
536 # the underlying reference might have changed, we will have to
537 # check it.
537 # check it.
538 has_meaningful_mtime=False,
538 has_meaningful_mtime=False,
539 parentfiledata=parentfiledata,
539 parentfiledata=parentfiledata,
540 )
540 )
541 if (
541 if (
542 parentfiledata is not None
542 parentfiledata is not None
543 and parentfiledata[2] > self._lastnormaltime
543 and parentfiledata[2] > self._lastnormaltime
544 ):
544 ):
545 # Remember the most recent modification timeslot for status(),
545 # Remember the most recent modification timeslot for status(),
546 # to make sure we won't miss future size-preserving file content
546 # to make sure we won't miss future size-preserving file content
547 # modifications that happen within the same timeslot.
547 # modifications that happen within the same timeslot.
548 self._lastnormaltime = parentfiledata[2]
548 self._lastnormaltime = parentfiledata[2]
549
549
550 @requires_parents_change
550 @requires_parents_change
551 def update_file(
551 def update_file(
552 self,
552 self,
553 filename,
553 filename,
554 wc_tracked,
554 wc_tracked,
555 p1_tracked,
555 p1_tracked,
556 p2_info=False,
556 p2_info=False,
557 possibly_dirty=False,
557 possibly_dirty=False,
558 parentfiledata=None,
558 parentfiledata=None,
559 ):
559 ):
560 """update the information about a file in the dirstate
560 """update the information about a file in the dirstate
561
561
562 This is to be called when the direstates parent changes to keep track
562 This is to be called when the direstates parent changes to keep track
563 of what is the file situation in regards to the working copy and its parent.
563 of what is the file situation in regards to the working copy and its parent.
564
564
565 This function must be called within a `dirstate.parentchange` context.
565 This function must be called within a `dirstate.parentchange` context.
566
566
567 note: the API is at an early stage and we might need to adjust it
567 note: the API is at an early stage and we might need to adjust it
568 depending of what information ends up being relevant and useful to
568 depending of what information ends up being relevant and useful to
569 other processing.
569 other processing.
570 """
570 """
571
571
572 # note: I do not think we need to double check name clash here since we
572 # note: I do not think we need to double check name clash here since we
573 # are in a update/merge case that should already have taken care of
573 # are in a update/merge case that should already have taken care of
574 # this. The test agrees
574 # this. The test agrees
575
575
576 self._dirty = True
576 self._dirty = True
577
577
578 need_parent_file_data = (
578 need_parent_file_data = (
579 not possibly_dirty and not p2_info and wc_tracked and p1_tracked
579 not possibly_dirty and not p2_info and wc_tracked and p1_tracked
580 )
580 )
581
581
582 if need_parent_file_data and parentfiledata is None:
582 if need_parent_file_data and parentfiledata is None:
583 parentfiledata = self._get_filedata(filename)
583 parentfiledata = self._get_filedata(filename)
584
584
585 self._map.reset_state(
585 self._map.reset_state(
586 filename,
586 filename,
587 wc_tracked,
587 wc_tracked,
588 p1_tracked,
588 p1_tracked,
589 p2_info=p2_info,
589 p2_info=p2_info,
590 has_meaningful_mtime=not possibly_dirty,
590 has_meaningful_mtime=not possibly_dirty,
591 parentfiledata=parentfiledata,
591 parentfiledata=parentfiledata,
592 )
592 )
593 if (
593 if (
594 parentfiledata is not None
594 parentfiledata is not None
595 and parentfiledata[2] > self._lastnormaltime
595 and parentfiledata[2] > self._lastnormaltime
596 ):
596 ):
597 # Remember the most recent modification timeslot for status(),
597 # Remember the most recent modification timeslot for status(),
598 # to make sure we won't miss future size-preserving file content
598 # to make sure we won't miss future size-preserving file content
599 # modifications that happen within the same timeslot.
599 # modifications that happen within the same timeslot.
600 self._lastnormaltime = parentfiledata[2]
600 self._lastnormaltime = parentfiledata[2]
601
601
602 def _check_new_tracked_filename(self, filename):
602 def _check_new_tracked_filename(self, filename):
603 scmutil.checkfilename(filename)
603 scmutil.checkfilename(filename)
604 if self._map.hastrackeddir(filename):
604 if self._map.hastrackeddir(filename):
605 msg = _(b'directory %r already in dirstate')
605 msg = _(b'directory %r already in dirstate')
606 msg %= pycompat.bytestr(filename)
606 msg %= pycompat.bytestr(filename)
607 raise error.Abort(msg)
607 raise error.Abort(msg)
608 # shadows
608 # shadows
609 for d in pathutil.finddirs(filename):
609 for d in pathutil.finddirs(filename):
610 if self._map.hastrackeddir(d):
610 if self._map.hastrackeddir(d):
611 break
611 break
612 entry = self._map.get(d)
612 entry = self._map.get(d)
613 if entry is not None and not entry.removed:
613 if entry is not None and not entry.removed:
614 msg = _(b'file %r in dirstate clashes with %r')
614 msg = _(b'file %r in dirstate clashes with %r')
615 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
615 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
616 raise error.Abort(msg)
616 raise error.Abort(msg)
617
617
618 def _get_filedata(self, filename):
618 def _get_filedata(self, filename):
619 """returns"""
619 """returns"""
620 s = os.lstat(self._join(filename))
620 s = os.lstat(self._join(filename))
621 mode = s.st_mode
621 mode = s.st_mode
622 size = s.st_size
622 size = s.st_size
623 mtime = s[stat.ST_MTIME]
623 mtime = s[stat.ST_MTIME]
624 return (mode, size, mtime)
624 return (mode, size, mtime)
625
625
626 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
626 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
627 if exists is None:
627 if exists is None:
628 exists = os.path.lexists(os.path.join(self._root, path))
628 exists = os.path.lexists(os.path.join(self._root, path))
629 if not exists:
629 if not exists:
630 # Maybe a path component exists
630 # Maybe a path component exists
631 if not ignoremissing and b'/' in path:
631 if not ignoremissing and b'/' in path:
632 d, f = path.rsplit(b'/', 1)
632 d, f = path.rsplit(b'/', 1)
633 d = self._normalize(d, False, ignoremissing, None)
633 d = self._normalize(d, False, ignoremissing, None)
634 folded = d + b"/" + f
634 folded = d + b"/" + f
635 else:
635 else:
636 # No path components, preserve original case
636 # No path components, preserve original case
637 folded = path
637 folded = path
638 else:
638 else:
639 # recursively normalize leading directory components
639 # recursively normalize leading directory components
640 # against dirstate
640 # against dirstate
641 if b'/' in normed:
641 if b'/' in normed:
642 d, f = normed.rsplit(b'/', 1)
642 d, f = normed.rsplit(b'/', 1)
643 d = self._normalize(d, False, ignoremissing, True)
643 d = self._normalize(d, False, ignoremissing, True)
644 r = self._root + b"/" + d
644 r = self._root + b"/" + d
645 folded = d + b"/" + util.fspath(f, r)
645 folded = d + b"/" + util.fspath(f, r)
646 else:
646 else:
647 folded = util.fspath(normed, self._root)
647 folded = util.fspath(normed, self._root)
648 storemap[normed] = folded
648 storemap[normed] = folded
649
649
650 return folded
650 return folded
651
651
652 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
652 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
653 normed = util.normcase(path)
653 normed = util.normcase(path)
654 folded = self._map.filefoldmap.get(normed, None)
654 folded = self._map.filefoldmap.get(normed, None)
655 if folded is None:
655 if folded is None:
656 if isknown:
656 if isknown:
657 folded = path
657 folded = path
658 else:
658 else:
659 folded = self._discoverpath(
659 folded = self._discoverpath(
660 path, normed, ignoremissing, exists, self._map.filefoldmap
660 path, normed, ignoremissing, exists, self._map.filefoldmap
661 )
661 )
662 return folded
662 return folded
663
663
664 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
664 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
665 normed = util.normcase(path)
665 normed = util.normcase(path)
666 folded = self._map.filefoldmap.get(normed, None)
666 folded = self._map.filefoldmap.get(normed, None)
667 if folded is None:
667 if folded is None:
668 folded = self._map.dirfoldmap.get(normed, None)
668 folded = self._map.dirfoldmap.get(normed, None)
669 if folded is None:
669 if folded is None:
670 if isknown:
670 if isknown:
671 folded = path
671 folded = path
672 else:
672 else:
673 # store discovered result in dirfoldmap so that future
673 # store discovered result in dirfoldmap so that future
674 # normalizefile calls don't start matching directories
674 # normalizefile calls don't start matching directories
675 folded = self._discoverpath(
675 folded = self._discoverpath(
676 path, normed, ignoremissing, exists, self._map.dirfoldmap
676 path, normed, ignoremissing, exists, self._map.dirfoldmap
677 )
677 )
678 return folded
678 return folded
679
679
680 def normalize(self, path, isknown=False, ignoremissing=False):
680 def normalize(self, path, isknown=False, ignoremissing=False):
681 """
681 """
682 normalize the case of a pathname when on a casefolding filesystem
682 normalize the case of a pathname when on a casefolding filesystem
683
683
684 isknown specifies whether the filename came from walking the
684 isknown specifies whether the filename came from walking the
685 disk, to avoid extra filesystem access.
685 disk, to avoid extra filesystem access.
686
686
687 If ignoremissing is True, missing path are returned
687 If ignoremissing is True, missing path are returned
688 unchanged. Otherwise, we try harder to normalize possibly
688 unchanged. Otherwise, we try harder to normalize possibly
689 existing path components.
689 existing path components.
690
690
691 The normalized case is determined based on the following precedence:
691 The normalized case is determined based on the following precedence:
692
692
693 - version of name already stored in the dirstate
693 - version of name already stored in the dirstate
694 - version of name stored on disk
694 - version of name stored on disk
695 - version provided via command arguments
695 - version provided via command arguments
696 """
696 """
697
697
698 if self._checkcase:
698 if self._checkcase:
699 return self._normalize(path, isknown, ignoremissing)
699 return self._normalize(path, isknown, ignoremissing)
700 return path
700 return path
701
701
702 def clear(self):
702 def clear(self):
703 self._map.clear()
703 self._map.clear()
704 self._lastnormaltime = 0
704 self._lastnormaltime = 0
705 self._dirty = True
705 self._dirty = True
706
706
707 def rebuild(self, parent, allfiles, changedfiles=None):
707 def rebuild(self, parent, allfiles, changedfiles=None):
708 if changedfiles is None:
708 if changedfiles is None:
709 # Rebuild entire dirstate
709 # Rebuild entire dirstate
710 to_lookup = allfiles
710 to_lookup = allfiles
711 to_drop = []
711 to_drop = []
712 lastnormaltime = self._lastnormaltime
712 lastnormaltime = self._lastnormaltime
713 self.clear()
713 self.clear()
714 self._lastnormaltime = lastnormaltime
714 self._lastnormaltime = lastnormaltime
715 elif len(changedfiles) < 10:
715 elif len(changedfiles) < 10:
716 # Avoid turning allfiles into a set, which can be expensive if it's
716 # Avoid turning allfiles into a set, which can be expensive if it's
717 # large.
717 # large.
718 to_lookup = []
718 to_lookup = []
719 to_drop = []
719 to_drop = []
720 for f in changedfiles:
720 for f in changedfiles:
721 if f in allfiles:
721 if f in allfiles:
722 to_lookup.append(f)
722 to_lookup.append(f)
723 else:
723 else:
724 to_drop.append(f)
724 to_drop.append(f)
725 else:
725 else:
726 changedfilesset = set(changedfiles)
726 changedfilesset = set(changedfiles)
727 to_lookup = changedfilesset & set(allfiles)
727 to_lookup = changedfilesset & set(allfiles)
728 to_drop = changedfilesset - to_lookup
728 to_drop = changedfilesset - to_lookup
729
729
730 if self._origpl is None:
730 if self._origpl is None:
731 self._origpl = self._pl
731 self._origpl = self._pl
732 self._map.setparents(parent, self._nodeconstants.nullid)
732 self._map.setparents(parent, self._nodeconstants.nullid)
733
733
734 for f in to_lookup:
734 for f in to_lookup:
735
735
736 if self.in_merge:
736 if self.in_merge:
737 self.set_tracked(f)
737 self.set_tracked(f)
738 else:
738 else:
739 self._map.reset_state(
739 self._map.reset_state(
740 f,
740 f,
741 wc_tracked=True,
741 wc_tracked=True,
742 p1_tracked=True,
742 p1_tracked=True,
743 )
743 )
744 for f in to_drop:
744 for f in to_drop:
745 self._map.reset_state(f)
745 self._map.reset_state(f)
746
746
747 self._dirty = True
747 self._dirty = True
748
748
749 def identity(self):
749 def identity(self):
750 """Return identity of dirstate itself to detect changing in storage
750 """Return identity of dirstate itself to detect changing in storage
751
751
752 If identity of previous dirstate is equal to this, writing
752 If identity of previous dirstate is equal to this, writing
753 changes based on the former dirstate out can keep consistency.
753 changes based on the former dirstate out can keep consistency.
754 """
754 """
755 return self._map.identity
755 return self._map.identity
756
756
757 def write(self, tr):
757 def write(self, tr):
758 if not self._dirty:
758 if not self._dirty:
759 return
759 return
760
760
761 filename = self._filename
761 filename = self._filename
762 if tr:
762 if tr:
763 # 'dirstate.write()' is not only for writing in-memory
763 # 'dirstate.write()' is not only for writing in-memory
764 # changes out, but also for dropping ambiguous timestamp.
764 # changes out, but also for dropping ambiguous timestamp.
765 # delayed writing re-raise "ambiguous timestamp issue".
765 # delayed writing re-raise "ambiguous timestamp issue".
766 # See also the wiki page below for detail:
766 # See also the wiki page below for detail:
767 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
767 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
768
768
769 # record when mtime start to be ambiguous
769 # record when mtime start to be ambiguous
770 now = _getfsnow(self._opener)
770 now = _getfsnow(self._opener)
771
771
772 # delay writing in-memory changes out
772 # delay writing in-memory changes out
773 tr.addfilegenerator(
773 tr.addfilegenerator(
774 b'dirstate',
774 b'dirstate',
775 (self._filename,),
775 (self._filename,),
776 lambda f: self._writedirstate(tr, f, now=now),
776 lambda f: self._writedirstate(tr, f, now=now),
777 location=b'plain',
777 location=b'plain',
778 )
778 )
779 return
779 return
780
780
781 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
781 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
782 self._writedirstate(tr, st)
782 self._writedirstate(tr, st)
783
783
784 def addparentchangecallback(self, category, callback):
784 def addparentchangecallback(self, category, callback):
785 """add a callback to be called when the wd parents are changed
785 """add a callback to be called when the wd parents are changed
786
786
787 Callback will be called with the following arguments:
787 Callback will be called with the following arguments:
788 dirstate, (oldp1, oldp2), (newp1, newp2)
788 dirstate, (oldp1, oldp2), (newp1, newp2)
789
789
790 Category is a unique identifier to allow overwriting an old callback
790 Category is a unique identifier to allow overwriting an old callback
791 with a newer callback.
791 with a newer callback.
792 """
792 """
793 self._plchangecallbacks[category] = callback
793 self._plchangecallbacks[category] = callback
794
794
795 def _writedirstate(self, tr, st, now=None):
795 def _writedirstate(self, tr, st, now=None):
796 # notify callbacks about parents change
796 # notify callbacks about parents change
797 if self._origpl is not None and self._origpl != self._pl:
797 if self._origpl is not None and self._origpl != self._pl:
798 for c, callback in sorted(
798 for c, callback in sorted(
799 pycompat.iteritems(self._plchangecallbacks)
799 pycompat.iteritems(self._plchangecallbacks)
800 ):
800 ):
801 callback(self, self._origpl, self._pl)
801 callback(self, self._origpl, self._pl)
802 self._origpl = None
802 self._origpl = None
803
803
804 if now is None:
804 if now is None:
805 # use the modification time of the newly created temporary file as the
805 # use the modification time of the newly created temporary file as the
806 # filesystem's notion of 'now'
806 # filesystem's notion of 'now'
807 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
807 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
808
808
809 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
809 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
810 # timestamp of each entries in dirstate, because of 'now > mtime'
810 # timestamp of each entries in dirstate, because of 'now > mtime'
811 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
811 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
812 if delaywrite > 0:
812 if delaywrite > 0:
813 # do we have any files to delay for?
813 # do we have any files to delay for?
814 for f, e in pycompat.iteritems(self._map):
814 for f, e in pycompat.iteritems(self._map):
815 if e.need_delay(now):
815 if e.need_delay(now):
816 import time # to avoid useless import
816 import time # to avoid useless import
817
817
818 # rather than sleep n seconds, sleep until the next
818 # rather than sleep n seconds, sleep until the next
819 # multiple of n seconds
819 # multiple of n seconds
820 clock = time.time()
820 clock = time.time()
821 start = int(clock) - (int(clock) % delaywrite)
821 start = int(clock) - (int(clock) % delaywrite)
822 end = start + delaywrite
822 end = start + delaywrite
823 time.sleep(end - clock)
823 time.sleep(end - clock)
824 now = end # trust our estimate that the end is near now
824 now = end # trust our estimate that the end is near now
825 break
825 break
826
826
827 self._map.write(tr, st, now)
827 self._map.write(tr, st, now)
828 self._lastnormaltime = 0
828 self._lastnormaltime = 0
829 self._dirty = False
829 self._dirty = False
830
830
831 def _dirignore(self, f):
831 def _dirignore(self, f):
832 if self._ignore(f):
832 if self._ignore(f):
833 return True
833 return True
834 for p in pathutil.finddirs(f):
834 for p in pathutil.finddirs(f):
835 if self._ignore(p):
835 if self._ignore(p):
836 return True
836 return True
837 return False
837 return False
838
838
839 def _ignorefiles(self):
839 def _ignorefiles(self):
840 files = []
840 files = []
841 if os.path.exists(self._join(b'.hgignore')):
841 if os.path.exists(self._join(b'.hgignore')):
842 files.append(self._join(b'.hgignore'))
842 files.append(self._join(b'.hgignore'))
843 for name, path in self._ui.configitems(b"ui"):
843 for name, path in self._ui.configitems(b"ui"):
844 if name == b'ignore' or name.startswith(b'ignore.'):
844 if name == b'ignore' or name.startswith(b'ignore.'):
845 # we need to use os.path.join here rather than self._join
845 # we need to use os.path.join here rather than self._join
846 # because path is arbitrary and user-specified
846 # because path is arbitrary and user-specified
847 files.append(os.path.join(self._rootdir, util.expandpath(path)))
847 files.append(os.path.join(self._rootdir, util.expandpath(path)))
848 return files
848 return files
849
849
850 def _ignorefileandline(self, f):
850 def _ignorefileandline(self, f):
851 files = collections.deque(self._ignorefiles())
851 files = collections.deque(self._ignorefiles())
852 visited = set()
852 visited = set()
853 while files:
853 while files:
854 i = files.popleft()
854 i = files.popleft()
855 patterns = matchmod.readpatternfile(
855 patterns = matchmod.readpatternfile(
856 i, self._ui.warn, sourceinfo=True
856 i, self._ui.warn, sourceinfo=True
857 )
857 )
858 for pattern, lineno, line in patterns:
858 for pattern, lineno, line in patterns:
859 kind, p = matchmod._patsplit(pattern, b'glob')
859 kind, p = matchmod._patsplit(pattern, b'glob')
860 if kind == b"subinclude":
860 if kind == b"subinclude":
861 if p not in visited:
861 if p not in visited:
862 files.append(p)
862 files.append(p)
863 continue
863 continue
864 m = matchmod.match(
864 m = matchmod.match(
865 self._root, b'', [], [pattern], warn=self._ui.warn
865 self._root, b'', [], [pattern], warn=self._ui.warn
866 )
866 )
867 if m(f):
867 if m(f):
868 return (i, lineno, line)
868 return (i, lineno, line)
869 visited.add(i)
869 visited.add(i)
870 return (None, -1, b"")
870 return (None, -1, b"")
871
871
872 def _walkexplicit(self, match, subrepos):
872 def _walkexplicit(self, match, subrepos):
873 """Get stat data about the files explicitly specified by match.
873 """Get stat data about the files explicitly specified by match.
874
874
875 Return a triple (results, dirsfound, dirsnotfound).
875 Return a triple (results, dirsfound, dirsnotfound).
876 - results is a mapping from filename to stat result. It also contains
876 - results is a mapping from filename to stat result. It also contains
877 listings mapping subrepos and .hg to None.
877 listings mapping subrepos and .hg to None.
878 - dirsfound is a list of files found to be directories.
878 - dirsfound is a list of files found to be directories.
879 - dirsnotfound is a list of files that the dirstate thinks are
879 - dirsnotfound is a list of files that the dirstate thinks are
880 directories and that were not found."""
880 directories and that were not found."""
881
881
882 def badtype(mode):
882 def badtype(mode):
883 kind = _(b'unknown')
883 kind = _(b'unknown')
884 if stat.S_ISCHR(mode):
884 if stat.S_ISCHR(mode):
885 kind = _(b'character device')
885 kind = _(b'character device')
886 elif stat.S_ISBLK(mode):
886 elif stat.S_ISBLK(mode):
887 kind = _(b'block device')
887 kind = _(b'block device')
888 elif stat.S_ISFIFO(mode):
888 elif stat.S_ISFIFO(mode):
889 kind = _(b'fifo')
889 kind = _(b'fifo')
890 elif stat.S_ISSOCK(mode):
890 elif stat.S_ISSOCK(mode):
891 kind = _(b'socket')
891 kind = _(b'socket')
892 elif stat.S_ISDIR(mode):
892 elif stat.S_ISDIR(mode):
893 kind = _(b'directory')
893 kind = _(b'directory')
894 return _(b'unsupported file type (type is %s)') % kind
894 return _(b'unsupported file type (type is %s)') % kind
895
895
896 badfn = match.bad
896 badfn = match.bad
897 dmap = self._map
897 dmap = self._map
898 lstat = os.lstat
898 lstat = os.lstat
899 getkind = stat.S_IFMT
899 getkind = stat.S_IFMT
900 dirkind = stat.S_IFDIR
900 dirkind = stat.S_IFDIR
901 regkind = stat.S_IFREG
901 regkind = stat.S_IFREG
902 lnkkind = stat.S_IFLNK
902 lnkkind = stat.S_IFLNK
903 join = self._join
903 join = self._join
904 dirsfound = []
904 dirsfound = []
905 foundadd = dirsfound.append
905 foundadd = dirsfound.append
906 dirsnotfound = []
906 dirsnotfound = []
907 notfoundadd = dirsnotfound.append
907 notfoundadd = dirsnotfound.append
908
908
909 if not match.isexact() and self._checkcase:
909 if not match.isexact() and self._checkcase:
910 normalize = self._normalize
910 normalize = self._normalize
911 else:
911 else:
912 normalize = None
912 normalize = None
913
913
914 files = sorted(match.files())
914 files = sorted(match.files())
915 subrepos.sort()
915 subrepos.sort()
916 i, j = 0, 0
916 i, j = 0, 0
917 while i < len(files) and j < len(subrepos):
917 while i < len(files) and j < len(subrepos):
918 subpath = subrepos[j] + b"/"
918 subpath = subrepos[j] + b"/"
919 if files[i] < subpath:
919 if files[i] < subpath:
920 i += 1
920 i += 1
921 continue
921 continue
922 while i < len(files) and files[i].startswith(subpath):
922 while i < len(files) and files[i].startswith(subpath):
923 del files[i]
923 del files[i]
924 j += 1
924 j += 1
925
925
926 if not files or b'' in files:
926 if not files or b'' in files:
927 files = [b'']
927 files = [b'']
928 # constructing the foldmap is expensive, so don't do it for the
928 # constructing the foldmap is expensive, so don't do it for the
929 # common case where files is ['']
929 # common case where files is ['']
930 normalize = None
930 normalize = None
931 results = dict.fromkeys(subrepos)
931 results = dict.fromkeys(subrepos)
932 results[b'.hg'] = None
932 results[b'.hg'] = None
933
933
934 for ff in files:
934 for ff in files:
935 if normalize:
935 if normalize:
936 nf = normalize(ff, False, True)
936 nf = normalize(ff, False, True)
937 else:
937 else:
938 nf = ff
938 nf = ff
939 if nf in results:
939 if nf in results:
940 continue
940 continue
941
941
942 try:
942 try:
943 st = lstat(join(nf))
943 st = lstat(join(nf))
944 kind = getkind(st.st_mode)
944 kind = getkind(st.st_mode)
945 if kind == dirkind:
945 if kind == dirkind:
946 if nf in dmap:
946 if nf in dmap:
947 # file replaced by dir on disk but still in dirstate
947 # file replaced by dir on disk but still in dirstate
948 results[nf] = None
948 results[nf] = None
949 foundadd((nf, ff))
949 foundadd((nf, ff))
950 elif kind == regkind or kind == lnkkind:
950 elif kind == regkind or kind == lnkkind:
951 results[nf] = st
951 results[nf] = st
952 else:
952 else:
953 badfn(ff, badtype(kind))
953 badfn(ff, badtype(kind))
954 if nf in dmap:
954 if nf in dmap:
955 results[nf] = None
955 results[nf] = None
956 except OSError as inst: # nf not found on disk - it is dirstate only
956 except OSError as inst: # nf not found on disk - it is dirstate only
957 if nf in dmap: # does it exactly match a missing file?
957 if nf in dmap: # does it exactly match a missing file?
958 results[nf] = None
958 results[nf] = None
959 else: # does it match a missing directory?
959 else: # does it match a missing directory?
960 if self._map.hasdir(nf):
960 if self._map.hasdir(nf):
961 notfoundadd(nf)
961 notfoundadd(nf)
962 else:
962 else:
963 badfn(ff, encoding.strtolocal(inst.strerror))
963 badfn(ff, encoding.strtolocal(inst.strerror))
964
964
965 # match.files() may contain explicitly-specified paths that shouldn't
965 # match.files() may contain explicitly-specified paths that shouldn't
966 # be taken; drop them from the list of files found. dirsfound/notfound
966 # be taken; drop them from the list of files found. dirsfound/notfound
967 # aren't filtered here because they will be tested later.
967 # aren't filtered here because they will be tested later.
968 if match.anypats():
968 if match.anypats():
969 for f in list(results):
969 for f in list(results):
970 if f == b'.hg' or f in subrepos:
970 if f == b'.hg' or f in subrepos:
971 # keep sentinel to disable further out-of-repo walks
971 # keep sentinel to disable further out-of-repo walks
972 continue
972 continue
973 if not match(f):
973 if not match(f):
974 del results[f]
974 del results[f]
975
975
976 # Case insensitive filesystems cannot rely on lstat() failing to detect
976 # Case insensitive filesystems cannot rely on lstat() failing to detect
977 # a case-only rename. Prune the stat object for any file that does not
977 # a case-only rename. Prune the stat object for any file that does not
978 # match the case in the filesystem, if there are multiple files that
978 # match the case in the filesystem, if there are multiple files that
979 # normalize to the same path.
979 # normalize to the same path.
980 if match.isexact() and self._checkcase:
980 if match.isexact() and self._checkcase:
981 normed = {}
981 normed = {}
982
982
983 for f, st in pycompat.iteritems(results):
983 for f, st in pycompat.iteritems(results):
984 if st is None:
984 if st is None:
985 continue
985 continue
986
986
987 nc = util.normcase(f)
987 nc = util.normcase(f)
988 paths = normed.get(nc)
988 paths = normed.get(nc)
989
989
990 if paths is None:
990 if paths is None:
991 paths = set()
991 paths = set()
992 normed[nc] = paths
992 normed[nc] = paths
993
993
994 paths.add(f)
994 paths.add(f)
995
995
996 for norm, paths in pycompat.iteritems(normed):
996 for norm, paths in pycompat.iteritems(normed):
997 if len(paths) > 1:
997 if len(paths) > 1:
998 for path in paths:
998 for path in paths:
999 folded = self._discoverpath(
999 folded = self._discoverpath(
1000 path, norm, True, None, self._map.dirfoldmap
1000 path, norm, True, None, self._map.dirfoldmap
1001 )
1001 )
1002 if path != folded:
1002 if path != folded:
1003 results[path] = None
1003 results[path] = None
1004
1004
1005 return results, dirsfound, dirsnotfound
1005 return results, dirsfound, dirsnotfound
1006
1006
1007 def walk(self, match, subrepos, unknown, ignored, full=True):
1007 def walk(self, match, subrepos, unknown, ignored, full=True):
1008 """
1008 """
1009 Walk recursively through the directory tree, finding all files
1009 Walk recursively through the directory tree, finding all files
1010 matched by match.
1010 matched by match.
1011
1011
1012 If full is False, maybe skip some known-clean files.
1012 If full is False, maybe skip some known-clean files.
1013
1013
1014 Return a dict mapping filename to stat-like object (either
1014 Return a dict mapping filename to stat-like object (either
1015 mercurial.osutil.stat instance or return value of os.stat()).
1015 mercurial.osutil.stat instance or return value of os.stat()).
1016
1016
1017 """
1017 """
1018 # full is a flag that extensions that hook into walk can use -- this
1018 # full is a flag that extensions that hook into walk can use -- this
1019 # implementation doesn't use it at all. This satisfies the contract
1019 # implementation doesn't use it at all. This satisfies the contract
1020 # because we only guarantee a "maybe".
1020 # because we only guarantee a "maybe".
1021
1021
1022 if ignored:
1022 if ignored:
1023 ignore = util.never
1023 ignore = util.never
1024 dirignore = util.never
1024 dirignore = util.never
1025 elif unknown:
1025 elif unknown:
1026 ignore = self._ignore
1026 ignore = self._ignore
1027 dirignore = self._dirignore
1027 dirignore = self._dirignore
1028 else:
1028 else:
1029 # if not unknown and not ignored, drop dir recursion and step 2
1029 # if not unknown and not ignored, drop dir recursion and step 2
1030 ignore = util.always
1030 ignore = util.always
1031 dirignore = util.always
1031 dirignore = util.always
1032
1032
1033 matchfn = match.matchfn
1033 matchfn = match.matchfn
1034 matchalways = match.always()
1034 matchalways = match.always()
1035 matchtdir = match.traversedir
1035 matchtdir = match.traversedir
1036 dmap = self._map
1036 dmap = self._map
1037 listdir = util.listdir
1037 listdir = util.listdir
1038 lstat = os.lstat
1038 lstat = os.lstat
1039 dirkind = stat.S_IFDIR
1039 dirkind = stat.S_IFDIR
1040 regkind = stat.S_IFREG
1040 regkind = stat.S_IFREG
1041 lnkkind = stat.S_IFLNK
1041 lnkkind = stat.S_IFLNK
1042 join = self._join
1042 join = self._join
1043
1043
1044 exact = skipstep3 = False
1044 exact = skipstep3 = False
1045 if match.isexact(): # match.exact
1045 if match.isexact(): # match.exact
1046 exact = True
1046 exact = True
1047 dirignore = util.always # skip step 2
1047 dirignore = util.always # skip step 2
1048 elif match.prefix(): # match.match, no patterns
1048 elif match.prefix(): # match.match, no patterns
1049 skipstep3 = True
1049 skipstep3 = True
1050
1050
1051 if not exact and self._checkcase:
1051 if not exact and self._checkcase:
1052 normalize = self._normalize
1052 normalize = self._normalize
1053 normalizefile = self._normalizefile
1053 normalizefile = self._normalizefile
1054 skipstep3 = False
1054 skipstep3 = False
1055 else:
1055 else:
1056 normalize = self._normalize
1056 normalize = self._normalize
1057 normalizefile = None
1057 normalizefile = None
1058
1058
1059 # step 1: find all explicit files
1059 # step 1: find all explicit files
1060 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1060 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1061 if matchtdir:
1061 if matchtdir:
1062 for d in work:
1062 for d in work:
1063 matchtdir(d[0])
1063 matchtdir(d[0])
1064 for d in dirsnotfound:
1064 for d in dirsnotfound:
1065 matchtdir(d)
1065 matchtdir(d)
1066
1066
1067 skipstep3 = skipstep3 and not (work or dirsnotfound)
1067 skipstep3 = skipstep3 and not (work or dirsnotfound)
1068 work = [d for d in work if not dirignore(d[0])]
1068 work = [d for d in work if not dirignore(d[0])]
1069
1069
1070 # step 2: visit subdirectories
1070 # step 2: visit subdirectories
1071 def traverse(work, alreadynormed):
1071 def traverse(work, alreadynormed):
1072 wadd = work.append
1072 wadd = work.append
1073 while work:
1073 while work:
1074 tracing.counter('dirstate.walk work', len(work))
1074 tracing.counter('dirstate.walk work', len(work))
1075 nd = work.pop()
1075 nd = work.pop()
1076 visitentries = match.visitchildrenset(nd)
1076 visitentries = match.visitchildrenset(nd)
1077 if not visitentries:
1077 if not visitentries:
1078 continue
1078 continue
1079 if visitentries == b'this' or visitentries == b'all':
1079 if visitentries == b'this' or visitentries == b'all':
1080 visitentries = None
1080 visitentries = None
1081 skip = None
1081 skip = None
1082 if nd != b'':
1082 if nd != b'':
1083 skip = b'.hg'
1083 skip = b'.hg'
1084 try:
1084 try:
1085 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1085 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1086 entries = listdir(join(nd), stat=True, skip=skip)
1086 entries = listdir(join(nd), stat=True, skip=skip)
1087 except OSError as inst:
1087 except OSError as inst:
1088 if inst.errno in (errno.EACCES, errno.ENOENT):
1088 if inst.errno in (errno.EACCES, errno.ENOENT):
1089 match.bad(
1089 match.bad(
1090 self.pathto(nd), encoding.strtolocal(inst.strerror)
1090 self.pathto(nd), encoding.strtolocal(inst.strerror)
1091 )
1091 )
1092 continue
1092 continue
1093 raise
1093 raise
1094 for f, kind, st in entries:
1094 for f, kind, st in entries:
1095 # Some matchers may return files in the visitentries set,
1095 # Some matchers may return files in the visitentries set,
1096 # instead of 'this', if the matcher explicitly mentions them
1096 # instead of 'this', if the matcher explicitly mentions them
1097 # and is not an exactmatcher. This is acceptable; we do not
1097 # and is not an exactmatcher. This is acceptable; we do not
1098 # make any hard assumptions about file-or-directory below
1098 # make any hard assumptions about file-or-directory below
1099 # based on the presence of `f` in visitentries. If
1099 # based on the presence of `f` in visitentries. If
1100 # visitchildrenset returned a set, we can always skip the
1100 # visitchildrenset returned a set, we can always skip the
1101 # entries *not* in the set it provided regardless of whether
1101 # entries *not* in the set it provided regardless of whether
1102 # they're actually a file or a directory.
1102 # they're actually a file or a directory.
1103 if visitentries and f not in visitentries:
1103 if visitentries and f not in visitentries:
1104 continue
1104 continue
1105 if normalizefile:
1105 if normalizefile:
1106 # even though f might be a directory, we're only
1106 # even though f might be a directory, we're only
1107 # interested in comparing it to files currently in the
1107 # interested in comparing it to files currently in the
1108 # dmap -- therefore normalizefile is enough
1108 # dmap -- therefore normalizefile is enough
1109 nf = normalizefile(
1109 nf = normalizefile(
1110 nd and (nd + b"/" + f) or f, True, True
1110 nd and (nd + b"/" + f) or f, True, True
1111 )
1111 )
1112 else:
1112 else:
1113 nf = nd and (nd + b"/" + f) or f
1113 nf = nd and (nd + b"/" + f) or f
1114 if nf not in results:
1114 if nf not in results:
1115 if kind == dirkind:
1115 if kind == dirkind:
1116 if not ignore(nf):
1116 if not ignore(nf):
1117 if matchtdir:
1117 if matchtdir:
1118 matchtdir(nf)
1118 matchtdir(nf)
1119 wadd(nf)
1119 wadd(nf)
1120 if nf in dmap and (matchalways or matchfn(nf)):
1120 if nf in dmap and (matchalways or matchfn(nf)):
1121 results[nf] = None
1121 results[nf] = None
1122 elif kind == regkind or kind == lnkkind:
1122 elif kind == regkind or kind == lnkkind:
1123 if nf in dmap:
1123 if nf in dmap:
1124 if matchalways or matchfn(nf):
1124 if matchalways or matchfn(nf):
1125 results[nf] = st
1125 results[nf] = st
1126 elif (matchalways or matchfn(nf)) and not ignore(
1126 elif (matchalways or matchfn(nf)) and not ignore(
1127 nf
1127 nf
1128 ):
1128 ):
1129 # unknown file -- normalize if necessary
1129 # unknown file -- normalize if necessary
1130 if not alreadynormed:
1130 if not alreadynormed:
1131 nf = normalize(nf, False, True)
1131 nf = normalize(nf, False, True)
1132 results[nf] = st
1132 results[nf] = st
1133 elif nf in dmap and (matchalways or matchfn(nf)):
1133 elif nf in dmap and (matchalways or matchfn(nf)):
1134 results[nf] = None
1134 results[nf] = None
1135
1135
1136 for nd, d in work:
1136 for nd, d in work:
1137 # alreadynormed means that processwork doesn't have to do any
1137 # alreadynormed means that processwork doesn't have to do any
1138 # expensive directory normalization
1138 # expensive directory normalization
1139 alreadynormed = not normalize or nd == d
1139 alreadynormed = not normalize or nd == d
1140 traverse([d], alreadynormed)
1140 traverse([d], alreadynormed)
1141
1141
1142 for s in subrepos:
1142 for s in subrepos:
1143 del results[s]
1143 del results[s]
1144 del results[b'.hg']
1144 del results[b'.hg']
1145
1145
1146 # step 3: visit remaining files from dmap
1146 # step 3: visit remaining files from dmap
1147 if not skipstep3 and not exact:
1147 if not skipstep3 and not exact:
1148 # If a dmap file is not in results yet, it was either
1148 # If a dmap file is not in results yet, it was either
1149 # a) not matching matchfn b) ignored, c) missing, or d) under a
1149 # a) not matching matchfn b) ignored, c) missing, or d) under a
1150 # symlink directory.
1150 # symlink directory.
1151 if not results and matchalways:
1151 if not results and matchalways:
1152 visit = [f for f in dmap]
1152 visit = [f for f in dmap]
1153 else:
1153 else:
1154 visit = [f for f in dmap if f not in results and matchfn(f)]
1154 visit = [f for f in dmap if f not in results and matchfn(f)]
1155 visit.sort()
1155 visit.sort()
1156
1156
1157 if unknown:
1157 if unknown:
1158 # unknown == True means we walked all dirs under the roots
1158 # unknown == True means we walked all dirs under the roots
1159 # that wasn't ignored, and everything that matched was stat'ed
1159 # that wasn't ignored, and everything that matched was stat'ed
1160 # and is already in results.
1160 # and is already in results.
1161 # The rest must thus be ignored or under a symlink.
1161 # The rest must thus be ignored or under a symlink.
1162 audit_path = pathutil.pathauditor(self._root, cached=True)
1162 audit_path = pathutil.pathauditor(self._root, cached=True)
1163
1163
1164 for nf in iter(visit):
1164 for nf in iter(visit):
1165 # If a stat for the same file was already added with a
1165 # If a stat for the same file was already added with a
1166 # different case, don't add one for this, since that would
1166 # different case, don't add one for this, since that would
1167 # make it appear as if the file exists under both names
1167 # make it appear as if the file exists under both names
1168 # on disk.
1168 # on disk.
1169 if (
1169 if (
1170 normalizefile
1170 normalizefile
1171 and normalizefile(nf, True, True) in results
1171 and normalizefile(nf, True, True) in results
1172 ):
1172 ):
1173 results[nf] = None
1173 results[nf] = None
1174 # Report ignored items in the dmap as long as they are not
1174 # Report ignored items in the dmap as long as they are not
1175 # under a symlink directory.
1175 # under a symlink directory.
1176 elif audit_path.check(nf):
1176 elif audit_path.check(nf):
1177 try:
1177 try:
1178 results[nf] = lstat(join(nf))
1178 results[nf] = lstat(join(nf))
1179 # file was just ignored, no links, and exists
1179 # file was just ignored, no links, and exists
1180 except OSError:
1180 except OSError:
1181 # file doesn't exist
1181 # file doesn't exist
1182 results[nf] = None
1182 results[nf] = None
1183 else:
1183 else:
1184 # It's either missing or under a symlink directory
1184 # It's either missing or under a symlink directory
1185 # which we in this case report as missing
1185 # which we in this case report as missing
1186 results[nf] = None
1186 results[nf] = None
1187 else:
1187 else:
1188 # We may not have walked the full directory tree above,
1188 # We may not have walked the full directory tree above,
1189 # so stat and check everything we missed.
1189 # so stat and check everything we missed.
1190 iv = iter(visit)
1190 iv = iter(visit)
1191 for st in util.statfiles([join(i) for i in visit]):
1191 for st in util.statfiles([join(i) for i in visit]):
1192 results[next(iv)] = st
1192 results[next(iv)] = st
1193 return results
1193 return results
1194
1194
1195 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1195 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1196 # Force Rayon (Rust parallelism library) to respect the number of
1196 # Force Rayon (Rust parallelism library) to respect the number of
1197 # workers. This is a temporary workaround until Rust code knows
1197 # workers. This is a temporary workaround until Rust code knows
1198 # how to read the config file.
1198 # how to read the config file.
1199 numcpus = self._ui.configint(b"worker", b"numcpus")
1199 numcpus = self._ui.configint(b"worker", b"numcpus")
1200 if numcpus is not None:
1200 if numcpus is not None:
1201 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1201 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1202
1202
1203 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1203 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1204 if not workers_enabled:
1204 if not workers_enabled:
1205 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1205 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1206
1206
1207 (
1207 (
1208 lookup,
1208 lookup,
1209 modified,
1209 modified,
1210 added,
1210 added,
1211 removed,
1211 removed,
1212 deleted,
1212 deleted,
1213 clean,
1213 clean,
1214 ignored,
1214 ignored,
1215 unknown,
1215 unknown,
1216 warnings,
1216 warnings,
1217 bad,
1217 bad,
1218 traversed,
1218 traversed,
1219 dirty,
1219 dirty,
1220 ) = rustmod.status(
1220 ) = rustmod.status(
1221 self._map._map,
1221 self._map._map,
1222 matcher,
1222 matcher,
1223 self._rootdir,
1223 self._rootdir,
1224 self._ignorefiles(),
1224 self._ignorefiles(),
1225 self._checkexec,
1225 self._checkexec,
1226 self._lastnormaltime,
1226 self._lastnormaltime,
1227 bool(list_clean),
1227 bool(list_clean),
1228 bool(list_ignored),
1228 bool(list_ignored),
1229 bool(list_unknown),
1229 bool(list_unknown),
1230 bool(matcher.traversedir),
1230 bool(matcher.traversedir),
1231 )
1231 )
1232
1232
1233 self._dirty |= dirty
1233 self._dirty |= dirty
1234
1234
1235 if matcher.traversedir:
1235 if matcher.traversedir:
1236 for dir in traversed:
1236 for dir in traversed:
1237 matcher.traversedir(dir)
1237 matcher.traversedir(dir)
1238
1238
1239 if self._ui.warn:
1239 if self._ui.warn:
1240 for item in warnings:
1240 for item in warnings:
1241 if isinstance(item, tuple):
1241 if isinstance(item, tuple):
1242 file_path, syntax = item
1242 file_path, syntax = item
1243 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1243 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1244 file_path,
1244 file_path,
1245 syntax,
1245 syntax,
1246 )
1246 )
1247 self._ui.warn(msg)
1247 self._ui.warn(msg)
1248 else:
1248 else:
1249 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1249 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1250 self._ui.warn(
1250 self._ui.warn(
1251 msg
1251 msg
1252 % (
1252 % (
1253 pathutil.canonpath(
1253 pathutil.canonpath(
1254 self._rootdir, self._rootdir, item
1254 self._rootdir, self._rootdir, item
1255 ),
1255 ),
1256 b"No such file or directory",
1256 b"No such file or directory",
1257 )
1257 )
1258 )
1258 )
1259
1259
1260 for (fn, message) in bad:
1260 for (fn, message) in bad:
1261 matcher.bad(fn, encoding.strtolocal(message))
1261 matcher.bad(fn, encoding.strtolocal(message))
1262
1262
1263 status = scmutil.status(
1263 status = scmutil.status(
1264 modified=modified,
1264 modified=modified,
1265 added=added,
1265 added=added,
1266 removed=removed,
1266 removed=removed,
1267 deleted=deleted,
1267 deleted=deleted,
1268 unknown=unknown,
1268 unknown=unknown,
1269 ignored=ignored,
1269 ignored=ignored,
1270 clean=clean,
1270 clean=clean,
1271 )
1271 )
1272 return (lookup, status)
1272 return (lookup, status)
1273
1273
1274 def status(self, match, subrepos, ignored, clean, unknown):
1274 def status(self, match, subrepos, ignored, clean, unknown):
1275 """Determine the status of the working copy relative to the
1275 """Determine the status of the working copy relative to the
1276 dirstate and return a pair of (unsure, status), where status is of type
1276 dirstate and return a pair of (unsure, status), where status is of type
1277 scmutil.status and:
1277 scmutil.status and:
1278
1278
1279 unsure:
1279 unsure:
1280 files that might have been modified since the dirstate was
1280 files that might have been modified since the dirstate was
1281 written, but need to be read to be sure (size is the same
1281 written, but need to be read to be sure (size is the same
1282 but mtime differs)
1282 but mtime differs)
1283 status.modified:
1283 status.modified:
1284 files that have definitely been modified since the dirstate
1284 files that have definitely been modified since the dirstate
1285 was written (different size or mode)
1285 was written (different size or mode)
1286 status.clean:
1286 status.clean:
1287 files that have definitely not been modified since the
1287 files that have definitely not been modified since the
1288 dirstate was written
1288 dirstate was written
1289 """
1289 """
1290 listignored, listclean, listunknown = ignored, clean, unknown
1290 listignored, listclean, listunknown = ignored, clean, unknown
1291 lookup, modified, added, unknown, ignored = [], [], [], [], []
1291 lookup, modified, added, unknown, ignored = [], [], [], [], []
1292 removed, deleted, clean = [], [], []
1292 removed, deleted, clean = [], [], []
1293
1293
1294 dmap = self._map
1294 dmap = self._map
1295 dmap.preload()
1295 dmap.preload()
1296
1296
1297 use_rust = True
1297 use_rust = True
1298
1298
1299 allowed_matchers = (
1299 allowed_matchers = (
1300 matchmod.alwaysmatcher,
1300 matchmod.alwaysmatcher,
1301 matchmod.exactmatcher,
1301 matchmod.exactmatcher,
1302 matchmod.includematcher,
1302 matchmod.includematcher,
1303 )
1303 )
1304
1304
1305 if rustmod is None:
1305 if rustmod is None:
1306 use_rust = False
1306 use_rust = False
1307 elif self._checkcase:
1307 elif self._checkcase:
1308 # Case-insensitive filesystems are not handled yet
1308 # Case-insensitive filesystems are not handled yet
1309 use_rust = False
1309 use_rust = False
1310 elif subrepos:
1310 elif subrepos:
1311 use_rust = False
1311 use_rust = False
1312 elif sparse.enabled:
1312 elif sparse.enabled:
1313 use_rust = False
1313 use_rust = False
1314 elif not isinstance(match, allowed_matchers):
1314 elif not isinstance(match, allowed_matchers):
1315 # Some matchers have yet to be implemented
1315 # Some matchers have yet to be implemented
1316 use_rust = False
1316 use_rust = False
1317
1317
1318 if use_rust:
1318 if use_rust:
1319 try:
1319 try:
1320 return self._rust_status(
1320 return self._rust_status(
1321 match, listclean, listignored, listunknown
1321 match, listclean, listignored, listunknown
1322 )
1322 )
1323 except rustmod.FallbackError:
1323 except rustmod.FallbackError:
1324 pass
1324 pass
1325
1325
1326 def noop(f):
1326 def noop(f):
1327 pass
1327 pass
1328
1328
1329 dcontains = dmap.__contains__
1329 dcontains = dmap.__contains__
1330 dget = dmap.__getitem__
1330 dget = dmap.__getitem__
1331 ladd = lookup.append # aka "unsure"
1331 ladd = lookup.append # aka "unsure"
1332 madd = modified.append
1332 madd = modified.append
1333 aadd = added.append
1333 aadd = added.append
1334 uadd = unknown.append if listunknown else noop
1334 uadd = unknown.append if listunknown else noop
1335 iadd = ignored.append if listignored else noop
1335 iadd = ignored.append if listignored else noop
1336 radd = removed.append
1336 radd = removed.append
1337 dadd = deleted.append
1337 dadd = deleted.append
1338 cadd = clean.append if listclean else noop
1338 cadd = clean.append if listclean else noop
1339 mexact = match.exact
1339 mexact = match.exact
1340 dirignore = self._dirignore
1340 dirignore = self._dirignore
1341 checkexec = self._checkexec
1341 checkexec = self._checkexec
1342 copymap = self._map.copymap
1342 copymap = self._map.copymap
1343 lastnormaltime = self._lastnormaltime
1343 lastnormaltime = self._lastnormaltime
1344
1344
1345 # We need to do full walks when either
1345 # We need to do full walks when either
1346 # - we're listing all clean files, or
1346 # - we're listing all clean files, or
1347 # - match.traversedir does something, because match.traversedir should
1347 # - match.traversedir does something, because match.traversedir should
1348 # be called for every dir in the working dir
1348 # be called for every dir in the working dir
1349 full = listclean or match.traversedir is not None
1349 full = listclean or match.traversedir is not None
1350 for fn, st in pycompat.iteritems(
1350 for fn, st in pycompat.iteritems(
1351 self.walk(match, subrepos, listunknown, listignored, full=full)
1351 self.walk(match, subrepos, listunknown, listignored, full=full)
1352 ):
1352 ):
1353 if not dcontains(fn):
1353 if not dcontains(fn):
1354 if (listignored or mexact(fn)) and dirignore(fn):
1354 if (listignored or mexact(fn)) and dirignore(fn):
1355 if listignored:
1355 if listignored:
1356 iadd(fn)
1356 iadd(fn)
1357 else:
1357 else:
1358 uadd(fn)
1358 uadd(fn)
1359 continue
1359 continue
1360
1360
1361 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1361 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1362 # written like that for performance reasons. dmap[fn] is not a
1362 # written like that for performance reasons. dmap[fn] is not a
1363 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1363 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1364 # opcode has fast paths when the value to be unpacked is a tuple or
1364 # opcode has fast paths when the value to be unpacked is a tuple or
1365 # a list, but falls back to creating a full-fledged iterator in
1365 # a list, but falls back to creating a full-fledged iterator in
1366 # general. That is much slower than simply accessing and storing the
1366 # general. That is much slower than simply accessing and storing the
1367 # tuple members one by one.
1367 # tuple members one by one.
1368 t = dget(fn)
1368 t = dget(fn)
1369 mode = t.mode
1369 mode = t.mode
1370 size = t.size
1370 size = t.size
1371 time = t.mtime
1371 time = t.mtime
1372
1372
1373 if not st and t.tracked:
1373 if not st and t.tracked:
1374 dadd(fn)
1374 dadd(fn)
1375 elif t.merged or t.from_p2:
1375 elif t.p2_info:
1376 madd(fn)
1376 madd(fn)
1377 elif t.added:
1377 elif t.added:
1378 aadd(fn)
1378 aadd(fn)
1379 elif t.removed:
1379 elif t.removed:
1380 radd(fn)
1380 radd(fn)
1381 elif t.tracked:
1381 elif t.tracked:
1382 if (
1382 if (
1383 size >= 0
1383 size >= 0
1384 and (
1384 and (
1385 (size != st.st_size and size != st.st_size & _rangemask)
1385 (size != st.st_size and size != st.st_size & _rangemask)
1386 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1386 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1387 )
1387 )
1388 or fn in copymap
1388 or fn in copymap
1389 ):
1389 ):
1390 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1390 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1391 # issue6456: Size returned may be longer due to
1391 # issue6456: Size returned may be longer due to
1392 # encryption on EXT-4 fscrypt, undecided.
1392 # encryption on EXT-4 fscrypt, undecided.
1393 ladd(fn)
1393 ladd(fn)
1394 else:
1394 else:
1395 madd(fn)
1395 madd(fn)
1396 elif (
1396 elif (
1397 time != st[stat.ST_MTIME]
1397 time != st[stat.ST_MTIME]
1398 and time != st[stat.ST_MTIME] & _rangemask
1398 and time != st[stat.ST_MTIME] & _rangemask
1399 ):
1399 ):
1400 ladd(fn)
1400 ladd(fn)
1401 elif st[stat.ST_MTIME] == lastnormaltime:
1401 elif st[stat.ST_MTIME] == lastnormaltime:
1402 # fn may have just been marked as normal and it may have
1402 # fn may have just been marked as normal and it may have
1403 # changed in the same second without changing its size.
1403 # changed in the same second without changing its size.
1404 # This can happen if we quickly do multiple commits.
1404 # This can happen if we quickly do multiple commits.
1405 # Force lookup, so we don't miss such a racy file change.
1405 # Force lookup, so we don't miss such a racy file change.
1406 ladd(fn)
1406 ladd(fn)
1407 elif listclean:
1407 elif listclean:
1408 cadd(fn)
1408 cadd(fn)
1409 status = scmutil.status(
1409 status = scmutil.status(
1410 modified, added, removed, deleted, unknown, ignored, clean
1410 modified, added, removed, deleted, unknown, ignored, clean
1411 )
1411 )
1412 return (lookup, status)
1412 return (lookup, status)
1413
1413
1414 def matches(self, match):
1414 def matches(self, match):
1415 """
1415 """
1416 return files in the dirstate (in whatever state) filtered by match
1416 return files in the dirstate (in whatever state) filtered by match
1417 """
1417 """
1418 dmap = self._map
1418 dmap = self._map
1419 if rustmod is not None:
1419 if rustmod is not None:
1420 dmap = self._map._map
1420 dmap = self._map._map
1421
1421
1422 if match.always():
1422 if match.always():
1423 return dmap.keys()
1423 return dmap.keys()
1424 files = match.files()
1424 files = match.files()
1425 if match.isexact():
1425 if match.isexact():
1426 # fast path -- filter the other way around, since typically files is
1426 # fast path -- filter the other way around, since typically files is
1427 # much smaller than dmap
1427 # much smaller than dmap
1428 return [f for f in files if f in dmap]
1428 return [f for f in files if f in dmap]
1429 if match.prefix() and all(fn in dmap for fn in files):
1429 if match.prefix() and all(fn in dmap for fn in files):
1430 # fast path -- all the values are known to be files, so just return
1430 # fast path -- all the values are known to be files, so just return
1431 # that
1431 # that
1432 return list(files)
1432 return list(files)
1433 return [f for f in dmap if match(f)]
1433 return [f for f in dmap if match(f)]
1434
1434
1435 def _actualfilename(self, tr):
1435 def _actualfilename(self, tr):
1436 if tr:
1436 if tr:
1437 return self._pendingfilename
1437 return self._pendingfilename
1438 else:
1438 else:
1439 return self._filename
1439 return self._filename
1440
1440
1441 def savebackup(self, tr, backupname):
1441 def savebackup(self, tr, backupname):
1442 '''Save current dirstate into backup file'''
1442 '''Save current dirstate into backup file'''
1443 filename = self._actualfilename(tr)
1443 filename = self._actualfilename(tr)
1444 assert backupname != filename
1444 assert backupname != filename
1445
1445
1446 # use '_writedirstate' instead of 'write' to write changes certainly,
1446 # use '_writedirstate' instead of 'write' to write changes certainly,
1447 # because the latter omits writing out if transaction is running.
1447 # because the latter omits writing out if transaction is running.
1448 # output file will be used to create backup of dirstate at this point.
1448 # output file will be used to create backup of dirstate at this point.
1449 if self._dirty or not self._opener.exists(filename):
1449 if self._dirty or not self._opener.exists(filename):
1450 self._writedirstate(
1450 self._writedirstate(
1451 tr,
1451 tr,
1452 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1452 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1453 )
1453 )
1454
1454
1455 if tr:
1455 if tr:
1456 # ensure that subsequent tr.writepending returns True for
1456 # ensure that subsequent tr.writepending returns True for
1457 # changes written out above, even if dirstate is never
1457 # changes written out above, even if dirstate is never
1458 # changed after this
1458 # changed after this
1459 tr.addfilegenerator(
1459 tr.addfilegenerator(
1460 b'dirstate',
1460 b'dirstate',
1461 (self._filename,),
1461 (self._filename,),
1462 lambda f: self._writedirstate(tr, f),
1462 lambda f: self._writedirstate(tr, f),
1463 location=b'plain',
1463 location=b'plain',
1464 )
1464 )
1465
1465
1466 # ensure that pending file written above is unlinked at
1466 # ensure that pending file written above is unlinked at
1467 # failure, even if tr.writepending isn't invoked until the
1467 # failure, even if tr.writepending isn't invoked until the
1468 # end of this transaction
1468 # end of this transaction
1469 tr.registertmp(filename, location=b'plain')
1469 tr.registertmp(filename, location=b'plain')
1470
1470
1471 self._opener.tryunlink(backupname)
1471 self._opener.tryunlink(backupname)
1472 # hardlink backup is okay because _writedirstate is always called
1472 # hardlink backup is okay because _writedirstate is always called
1473 # with an "atomictemp=True" file.
1473 # with an "atomictemp=True" file.
1474 util.copyfile(
1474 util.copyfile(
1475 self._opener.join(filename),
1475 self._opener.join(filename),
1476 self._opener.join(backupname),
1476 self._opener.join(backupname),
1477 hardlink=True,
1477 hardlink=True,
1478 )
1478 )
1479
1479
1480 def restorebackup(self, tr, backupname):
1480 def restorebackup(self, tr, backupname):
1481 '''Restore dirstate by backup file'''
1481 '''Restore dirstate by backup file'''
1482 # this "invalidate()" prevents "wlock.release()" from writing
1482 # this "invalidate()" prevents "wlock.release()" from writing
1483 # changes of dirstate out after restoring from backup file
1483 # changes of dirstate out after restoring from backup file
1484 self.invalidate()
1484 self.invalidate()
1485 filename = self._actualfilename(tr)
1485 filename = self._actualfilename(tr)
1486 o = self._opener
1486 o = self._opener
1487 if util.samefile(o.join(backupname), o.join(filename)):
1487 if util.samefile(o.join(backupname), o.join(filename)):
1488 o.unlink(backupname)
1488 o.unlink(backupname)
1489 else:
1489 else:
1490 o.rename(backupname, filename, checkambig=True)
1490 o.rename(backupname, filename, checkambig=True)
1491
1491
1492 def clearbackup(self, tr, backupname):
1492 def clearbackup(self, tr, backupname):
1493 '''Clear backup file'''
1493 '''Clear backup file'''
1494 self._opener.unlink(backupname)
1494 self._opener.unlink(backupname)
1495
1495
1496 def verify(self, m1, m2):
1496 def verify(self, m1, m2):
1497 """check the dirstate content again the parent manifest and yield errors"""
1497 """check the dirstate content again the parent manifest and yield errors"""
1498 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1498 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1499 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1499 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1500 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1500 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1501 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1501 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1502 for f, entry in self.items():
1502 for f, entry in self.items():
1503 state = entry.state
1503 state = entry.state
1504 if state in b"nr" and f not in m1:
1504 if state in b"nr" and f not in m1:
1505 yield (missing_from_p1, f, state)
1505 yield (missing_from_p1, f, state)
1506 if state in b"a" and f in m1:
1506 if state in b"a" and f in m1:
1507 yield (unexpected_in_p1, f, state)
1507 yield (unexpected_in_p1, f, state)
1508 if state in b"m" and f not in m1 and f not in m2:
1508 if state in b"m" and f not in m1 and f not in m2:
1509 yield (missing_from_ps, f, state)
1509 yield (missing_from_ps, f, state)
1510 for f in m1:
1510 for f in m1:
1511 state = self.get_entry(f).state
1511 state = self.get_entry(f).state
1512 if state not in b"nrm":
1512 if state not in b"nrm":
1513 yield (missing_from_ds, f, state)
1513 yield (missing_from_ds, f, state)
@@ -1,770 +1,770 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 if rustmod is None:
30 if rustmod is None:
31 DirstateItem = parsers.DirstateItem
31 DirstateItem = parsers.DirstateItem
32 else:
32 else:
33 DirstateItem = rustmod.DirstateItem
33 DirstateItem = rustmod.DirstateItem
34
34
35 rangemask = 0x7FFFFFFF
35 rangemask = 0x7FFFFFFF
36
36
37
37
38 class _dirstatemapcommon(object):
38 class _dirstatemapcommon(object):
39 """
39 """
40 Methods that are identical for both implementations of the dirstatemap
40 Methods that are identical for both implementations of the dirstatemap
41 class, with and without Rust extensions enabled.
41 class, with and without Rust extensions enabled.
42 """
42 """
43
43
44 # please pytype
44 # please pytype
45
45
46 _map = None
46 _map = None
47 copymap = None
47 copymap = None
48
48
49 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
49 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
50 self._use_dirstate_v2 = use_dirstate_v2
50 self._use_dirstate_v2 = use_dirstate_v2
51 self._nodeconstants = nodeconstants
51 self._nodeconstants = nodeconstants
52 self._ui = ui
52 self._ui = ui
53 self._opener = opener
53 self._opener = opener
54 self._root = root
54 self._root = root
55 self._filename = b'dirstate'
55 self._filename = b'dirstate'
56 self._nodelen = 20 # Also update Rust code when changing this!
56 self._nodelen = 20 # Also update Rust code when changing this!
57 self._parents = None
57 self._parents = None
58 self._dirtyparents = False
58 self._dirtyparents = False
59
59
60 # for consistent view between _pl() and _read() invocations
60 # for consistent view between _pl() and _read() invocations
61 self._pendingmode = None
61 self._pendingmode = None
62
62
63 def preload(self):
63 def preload(self):
64 """Loads the underlying data, if it's not already loaded"""
64 """Loads the underlying data, if it's not already loaded"""
65 self._map
65 self._map
66
66
67 def get(self, key, default=None):
67 def get(self, key, default=None):
68 return self._map.get(key, default)
68 return self._map.get(key, default)
69
69
70 def __len__(self):
70 def __len__(self):
71 return len(self._map)
71 return len(self._map)
72
72
73 def __iter__(self):
73 def __iter__(self):
74 return iter(self._map)
74 return iter(self._map)
75
75
76 def __contains__(self, key):
76 def __contains__(self, key):
77 return key in self._map
77 return key in self._map
78
78
79 def __getitem__(self, item):
79 def __getitem__(self, item):
80 return self._map[item]
80 return self._map[item]
81
81
82 ### sub-class utility method
82 ### sub-class utility method
83 #
83 #
84 # Use to allow for generic implementation of some method while still coping
84 # Use to allow for generic implementation of some method while still coping
85 # with minor difference between implementation.
85 # with minor difference between implementation.
86
86
87 def _dirs_incr(self, filename, old_entry=None):
87 def _dirs_incr(self, filename, old_entry=None):
88 """incremente the dirstate counter if applicable
88 """incremente the dirstate counter if applicable
89
89
90 This might be a no-op for some subclass who deal with directory
90 This might be a no-op for some subclass who deal with directory
91 tracking in a different way.
91 tracking in a different way.
92 """
92 """
93
93
94 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
94 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
95 """decremente the dirstate counter if applicable
95 """decremente the dirstate counter if applicable
96
96
97 This might be a no-op for some subclass who deal with directory
97 This might be a no-op for some subclass who deal with directory
98 tracking in a different way.
98 tracking in a different way.
99 """
99 """
100
100
101 def _refresh_entry(self, f, entry):
101 def _refresh_entry(self, f, entry):
102 """record updated state of an entry"""
102 """record updated state of an entry"""
103
103
104 def _insert_entry(self, f, entry):
104 def _insert_entry(self, f, entry):
105 """add a new dirstate entry (or replace an unrelated one)
105 """add a new dirstate entry (or replace an unrelated one)
106
106
107 The fact it is actually new is the responsability of the caller
107 The fact it is actually new is the responsability of the caller
108 """
108 """
109
109
110 def _drop_entry(self, f):
110 def _drop_entry(self, f):
111 """remove any entry for file f
111 """remove any entry for file f
112
112
113 This should also drop associated copy information
113 This should also drop associated copy information
114
114
115 The fact we actually need to drop it is the responsability of the caller"""
115 The fact we actually need to drop it is the responsability of the caller"""
116
116
117 ### method to manipulate the entries
117 ### method to manipulate the entries
118
118
119 def set_possibly_dirty(self, filename):
119 def set_possibly_dirty(self, filename):
120 """record that the current state of the file on disk is unknown"""
120 """record that the current state of the file on disk is unknown"""
121 entry = self[filename]
121 entry = self[filename]
122 entry.set_possibly_dirty()
122 entry.set_possibly_dirty()
123 self._refresh_entry(filename, entry)
123 self._refresh_entry(filename, entry)
124
124
125 def set_clean(self, filename, mode, size, mtime):
125 def set_clean(self, filename, mode, size, mtime):
126 """mark a file as back to a clean state"""
126 """mark a file as back to a clean state"""
127 entry = self[filename]
127 entry = self[filename]
128 mtime = mtime & rangemask
128 mtime = mtime & rangemask
129 size = size & rangemask
129 size = size & rangemask
130 entry.set_clean(mode, size, mtime)
130 entry.set_clean(mode, size, mtime)
131 self._refresh_entry(filename, entry)
131 self._refresh_entry(filename, entry)
132 self.copymap.pop(filename, None)
132 self.copymap.pop(filename, None)
133
133
134 def set_tracked(self, filename):
134 def set_tracked(self, filename):
135 new = False
135 new = False
136 entry = self.get(filename)
136 entry = self.get(filename)
137 if entry is None:
137 if entry is None:
138 self._dirs_incr(filename)
138 self._dirs_incr(filename)
139 entry = DirstateItem(
139 entry = DirstateItem(
140 wc_tracked=True,
140 wc_tracked=True,
141 )
141 )
142
142
143 self._insert_entry(filename, entry)
143 self._insert_entry(filename, entry)
144 new = True
144 new = True
145 elif not entry.tracked:
145 elif not entry.tracked:
146 self._dirs_incr(filename, entry)
146 self._dirs_incr(filename, entry)
147 entry.set_tracked()
147 entry.set_tracked()
148 self._refresh_entry(filename, entry)
148 self._refresh_entry(filename, entry)
149 new = True
149 new = True
150 else:
150 else:
151 # XXX This is probably overkill for more case, but we need this to
151 # XXX This is probably overkill for more case, but we need this to
152 # fully replace the `normallookup` call with `set_tracked` one.
152 # fully replace the `normallookup` call with `set_tracked` one.
153 # Consider smoothing this in the future.
153 # Consider smoothing this in the future.
154 entry.set_possibly_dirty()
154 entry.set_possibly_dirty()
155 self._refresh_entry(filename, entry)
155 self._refresh_entry(filename, entry)
156 return new
156 return new
157
157
158 def set_untracked(self, f):
158 def set_untracked(self, f):
159 """Mark a file as no longer tracked in the dirstate map"""
159 """Mark a file as no longer tracked in the dirstate map"""
160 entry = self.get(f)
160 entry = self.get(f)
161 if entry is None:
161 if entry is None:
162 return False
162 return False
163 else:
163 else:
164 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
164 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
165 if not entry.merged:
165 if not entry.merged:
166 self.copymap.pop(f, None)
166 self.copymap.pop(f, None)
167 entry.set_untracked()
167 entry.set_untracked()
168 self._refresh_entry(f, entry)
168 self._refresh_entry(f, entry)
169 return True
169 return True
170
170
171 def reset_state(
171 def reset_state(
172 self,
172 self,
173 filename,
173 filename,
174 wc_tracked=False,
174 wc_tracked=False,
175 p1_tracked=False,
175 p1_tracked=False,
176 p2_info=False,
176 p2_info=False,
177 has_meaningful_mtime=True,
177 has_meaningful_mtime=True,
178 has_meaningful_data=True,
178 has_meaningful_data=True,
179 parentfiledata=None,
179 parentfiledata=None,
180 ):
180 ):
181 """Set a entry to a given state, diregarding all previous state
181 """Set a entry to a given state, diregarding all previous state
182
182
183 This is to be used by the part of the dirstate API dedicated to
183 This is to be used by the part of the dirstate API dedicated to
184 adjusting the dirstate after a update/merge.
184 adjusting the dirstate after a update/merge.
185
185
186 note: calling this might result to no entry existing at all if the
186 note: calling this might result to no entry existing at all if the
187 dirstate map does not see any point at having one for this file
187 dirstate map does not see any point at having one for this file
188 anymore.
188 anymore.
189 """
189 """
190 # copy information are now outdated
190 # copy information are now outdated
191 # (maybe new information should be in directly passed to this function)
191 # (maybe new information should be in directly passed to this function)
192 self.copymap.pop(filename, None)
192 self.copymap.pop(filename, None)
193
193
194 if not (p1_tracked or p2_info or wc_tracked):
194 if not (p1_tracked or p2_info or wc_tracked):
195 old_entry = self._map.get(filename)
195 old_entry = self._map.get(filename)
196 self._drop_entry(filename)
196 self._drop_entry(filename)
197 self._dirs_decr(filename, old_entry=old_entry)
197 self._dirs_decr(filename, old_entry=old_entry)
198 return
198 return
199
199
200 old_entry = self._map.get(filename)
200 old_entry = self._map.get(filename)
201 self._dirs_incr(filename, old_entry)
201 self._dirs_incr(filename, old_entry)
202 entry = DirstateItem(
202 entry = DirstateItem(
203 wc_tracked=wc_tracked,
203 wc_tracked=wc_tracked,
204 p1_tracked=p1_tracked,
204 p1_tracked=p1_tracked,
205 p2_info=p2_info,
205 p2_info=p2_info,
206 has_meaningful_mtime=has_meaningful_mtime,
206 has_meaningful_mtime=has_meaningful_mtime,
207 parentfiledata=parentfiledata,
207 parentfiledata=parentfiledata,
208 )
208 )
209 self._insert_entry(filename, entry)
209 self._insert_entry(filename, entry)
210
210
211
211
212 class dirstatemap(_dirstatemapcommon):
212 class dirstatemap(_dirstatemapcommon):
213 """Map encapsulating the dirstate's contents.
213 """Map encapsulating the dirstate's contents.
214
214
215 The dirstate contains the following state:
215 The dirstate contains the following state:
216
216
217 - `identity` is the identity of the dirstate file, which can be used to
217 - `identity` is the identity of the dirstate file, which can be used to
218 detect when changes have occurred to the dirstate file.
218 detect when changes have occurred to the dirstate file.
219
219
220 - `parents` is a pair containing the parents of the working copy. The
220 - `parents` is a pair containing the parents of the working copy. The
221 parents are updated by calling `setparents`.
221 parents are updated by calling `setparents`.
222
222
223 - the state map maps filenames to tuples of (state, mode, size, mtime),
223 - the state map maps filenames to tuples of (state, mode, size, mtime),
224 where state is a single character representing 'normal', 'added',
224 where state is a single character representing 'normal', 'added',
225 'removed', or 'merged'. It is read by treating the dirstate as a
225 'removed', or 'merged'. It is read by treating the dirstate as a
226 dict. File state is updated by calling various methods (see each
226 dict. File state is updated by calling various methods (see each
227 documentation for details):
227 documentation for details):
228
228
229 - `reset_state`,
229 - `reset_state`,
230 - `set_tracked`
230 - `set_tracked`
231 - `set_untracked`
231 - `set_untracked`
232 - `set_clean`
232 - `set_clean`
233 - `set_possibly_dirty`
233 - `set_possibly_dirty`
234
234
235 - `copymap` maps destination filenames to their source filename.
235 - `copymap` maps destination filenames to their source filename.
236
236
237 The dirstate also provides the following views onto the state:
237 The dirstate also provides the following views onto the state:
238
238
239 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
239 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
240 form that they appear as in the dirstate.
240 form that they appear as in the dirstate.
241
241
242 - `dirfoldmap` is a dict mapping normalized directory names to the
242 - `dirfoldmap` is a dict mapping normalized directory names to the
243 denormalized form that they appear as in the dirstate.
243 denormalized form that they appear as in the dirstate.
244 """
244 """
245
245
246 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
246 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
247 super(dirstatemap, self).__init__(
247 super(dirstatemap, self).__init__(
248 ui, opener, root, nodeconstants, use_dirstate_v2
248 ui, opener, root, nodeconstants, use_dirstate_v2
249 )
249 )
250 if self._use_dirstate_v2:
250 if self._use_dirstate_v2:
251 msg = "Dirstate V2 not supportedi"
251 msg = "Dirstate V2 not supportedi"
252 msg += "(should have detected unsupported requirement)"
252 msg += "(should have detected unsupported requirement)"
253 raise error.ProgrammingError(msg)
253 raise error.ProgrammingError(msg)
254
254
255 ### Core data storage and access
255 ### Core data storage and access
256
256
257 @propertycache
257 @propertycache
258 def _map(self):
258 def _map(self):
259 self._map = {}
259 self._map = {}
260 self.read()
260 self.read()
261 return self._map
261 return self._map
262
262
263 @propertycache
263 @propertycache
264 def copymap(self):
264 def copymap(self):
265 self.copymap = {}
265 self.copymap = {}
266 self._map
266 self._map
267 return self.copymap
267 return self.copymap
268
268
269 def clear(self):
269 def clear(self):
270 self._map.clear()
270 self._map.clear()
271 self.copymap.clear()
271 self.copymap.clear()
272 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
272 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
273 util.clearcachedproperty(self, b"_dirs")
273 util.clearcachedproperty(self, b"_dirs")
274 util.clearcachedproperty(self, b"_alldirs")
274 util.clearcachedproperty(self, b"_alldirs")
275 util.clearcachedproperty(self, b"filefoldmap")
275 util.clearcachedproperty(self, b"filefoldmap")
276 util.clearcachedproperty(self, b"dirfoldmap")
276 util.clearcachedproperty(self, b"dirfoldmap")
277
277
278 def items(self):
278 def items(self):
279 return pycompat.iteritems(self._map)
279 return pycompat.iteritems(self._map)
280
280
281 # forward for python2,3 compat
281 # forward for python2,3 compat
282 iteritems = items
282 iteritems = items
283
283
284 def debug_iter(self, all):
284 def debug_iter(self, all):
285 """
285 """
286 Return an iterator of (filename, state, mode, size, mtime) tuples
286 Return an iterator of (filename, state, mode, size, mtime) tuples
287
287
288 `all` is unused when Rust is not enabled
288 `all` is unused when Rust is not enabled
289 """
289 """
290 for (filename, item) in self.items():
290 for (filename, item) in self.items():
291 yield (filename, item.state, item.mode, item.size, item.mtime)
291 yield (filename, item.state, item.mode, item.size, item.mtime)
292
292
293 def keys(self):
293 def keys(self):
294 return self._map.keys()
294 return self._map.keys()
295
295
296 ### reading/setting parents
296 ### reading/setting parents
297
297
298 def parents(self):
298 def parents(self):
299 if not self._parents:
299 if not self._parents:
300 try:
300 try:
301 fp = self._opendirstatefile()
301 fp = self._opendirstatefile()
302 st = fp.read(2 * self._nodelen)
302 st = fp.read(2 * self._nodelen)
303 fp.close()
303 fp.close()
304 except IOError as err:
304 except IOError as err:
305 if err.errno != errno.ENOENT:
305 if err.errno != errno.ENOENT:
306 raise
306 raise
307 # File doesn't exist, so the current state is empty
307 # File doesn't exist, so the current state is empty
308 st = b''
308 st = b''
309
309
310 l = len(st)
310 l = len(st)
311 if l == self._nodelen * 2:
311 if l == self._nodelen * 2:
312 self._parents = (
312 self._parents = (
313 st[: self._nodelen],
313 st[: self._nodelen],
314 st[self._nodelen : 2 * self._nodelen],
314 st[self._nodelen : 2 * self._nodelen],
315 )
315 )
316 elif l == 0:
316 elif l == 0:
317 self._parents = (
317 self._parents = (
318 self._nodeconstants.nullid,
318 self._nodeconstants.nullid,
319 self._nodeconstants.nullid,
319 self._nodeconstants.nullid,
320 )
320 )
321 else:
321 else:
322 raise error.Abort(
322 raise error.Abort(
323 _(b'working directory state appears damaged!')
323 _(b'working directory state appears damaged!')
324 )
324 )
325
325
326 return self._parents
326 return self._parents
327
327
328 def setparents(self, p1, p2, fold_p2=False):
328 def setparents(self, p1, p2, fold_p2=False):
329 self._parents = (p1, p2)
329 self._parents = (p1, p2)
330 self._dirtyparents = True
330 self._dirtyparents = True
331 copies = {}
331 copies = {}
332 if fold_p2:
332 if fold_p2:
333 for f, s in pycompat.iteritems(self._map):
333 for f, s in pycompat.iteritems(self._map):
334 # Discard "merged" markers when moving away from a merge state
334 # Discard "merged" markers when moving away from a merge state
335 if s.merged or s.from_p2:
335 if s.p2_info:
336 source = self.copymap.pop(f, None)
336 source = self.copymap.pop(f, None)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 s.drop_merge_data()
339 s.drop_merge_data()
340 return copies
340 return copies
341
341
342 ### disk interaction
342 ### disk interaction
343
343
344 def read(self):
344 def read(self):
345 # ignore HG_PENDING because identity is used only for writing
345 # ignore HG_PENDING because identity is used only for writing
346 self.identity = util.filestat.frompath(
346 self.identity = util.filestat.frompath(
347 self._opener.join(self._filename)
347 self._opener.join(self._filename)
348 )
348 )
349
349
350 try:
350 try:
351 fp = self._opendirstatefile()
351 fp = self._opendirstatefile()
352 try:
352 try:
353 st = fp.read()
353 st = fp.read()
354 finally:
354 finally:
355 fp.close()
355 fp.close()
356 except IOError as err:
356 except IOError as err:
357 if err.errno != errno.ENOENT:
357 if err.errno != errno.ENOENT:
358 raise
358 raise
359 return
359 return
360 if not st:
360 if not st:
361 return
361 return
362
362
363 if util.safehasattr(parsers, b'dict_new_presized'):
363 if util.safehasattr(parsers, b'dict_new_presized'):
364 # Make an estimate of the number of files in the dirstate based on
364 # Make an estimate of the number of files in the dirstate based on
365 # its size. This trades wasting some memory for avoiding costly
365 # its size. This trades wasting some memory for avoiding costly
366 # resizes. Each entry have a prefix of 17 bytes followed by one or
366 # resizes. Each entry have a prefix of 17 bytes followed by one or
367 # two path names. Studies on various large-scale real-world repositories
367 # two path names. Studies on various large-scale real-world repositories
368 # found 54 bytes a reasonable upper limit for the average path names.
368 # found 54 bytes a reasonable upper limit for the average path names.
369 # Copy entries are ignored for the sake of this estimate.
369 # Copy entries are ignored for the sake of this estimate.
370 self._map = parsers.dict_new_presized(len(st) // 71)
370 self._map = parsers.dict_new_presized(len(st) // 71)
371
371
372 # Python's garbage collector triggers a GC each time a certain number
372 # Python's garbage collector triggers a GC each time a certain number
373 # of container objects (the number being defined by
373 # of container objects (the number being defined by
374 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
374 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
375 # for each file in the dirstate. The C version then immediately marks
375 # for each file in the dirstate. The C version then immediately marks
376 # them as not to be tracked by the collector. However, this has no
376 # them as not to be tracked by the collector. However, this has no
377 # effect on when GCs are triggered, only on what objects the GC looks
377 # effect on when GCs are triggered, only on what objects the GC looks
378 # into. This means that O(number of files) GCs are unavoidable.
378 # into. This means that O(number of files) GCs are unavoidable.
379 # Depending on when in the process's lifetime the dirstate is parsed,
379 # Depending on when in the process's lifetime the dirstate is parsed,
380 # this can get very expensive. As a workaround, disable GC while
380 # this can get very expensive. As a workaround, disable GC while
381 # parsing the dirstate.
381 # parsing the dirstate.
382 #
382 #
383 # (we cannot decorate the function directly since it is in a C module)
383 # (we cannot decorate the function directly since it is in a C module)
384 parse_dirstate = util.nogc(parsers.parse_dirstate)
384 parse_dirstate = util.nogc(parsers.parse_dirstate)
385 p = parse_dirstate(self._map, self.copymap, st)
385 p = parse_dirstate(self._map, self.copymap, st)
386 if not self._dirtyparents:
386 if not self._dirtyparents:
387 self.setparents(*p)
387 self.setparents(*p)
388
388
389 # Avoid excess attribute lookups by fast pathing certain checks
389 # Avoid excess attribute lookups by fast pathing certain checks
390 self.__contains__ = self._map.__contains__
390 self.__contains__ = self._map.__contains__
391 self.__getitem__ = self._map.__getitem__
391 self.__getitem__ = self._map.__getitem__
392 self.get = self._map.get
392 self.get = self._map.get
393
393
394 def write(self, _tr, st, now):
394 def write(self, _tr, st, now):
395 d = parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
395 d = parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
396 st.write(d)
396 st.write(d)
397 st.close()
397 st.close()
398 self._dirtyparents = False
398 self._dirtyparents = False
399
399
400 def _opendirstatefile(self):
400 def _opendirstatefile(self):
401 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
401 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
402 if self._pendingmode is not None and self._pendingmode != mode:
402 if self._pendingmode is not None and self._pendingmode != mode:
403 fp.close()
403 fp.close()
404 raise error.Abort(
404 raise error.Abort(
405 _(b'working directory state may be changed parallelly')
405 _(b'working directory state may be changed parallelly')
406 )
406 )
407 self._pendingmode = mode
407 self._pendingmode = mode
408 return fp
408 return fp
409
409
410 @propertycache
410 @propertycache
411 def identity(self):
411 def identity(self):
412 self._map
412 self._map
413 return self.identity
413 return self.identity
414
414
415 ### code related to maintaining and accessing "extra" property
415 ### code related to maintaining and accessing "extra" property
416 # (e.g. "has_dir")
416 # (e.g. "has_dir")
417
417
418 def _dirs_incr(self, filename, old_entry=None):
418 def _dirs_incr(self, filename, old_entry=None):
419 """incremente the dirstate counter if applicable"""
419 """incremente the dirstate counter if applicable"""
420 if (
420 if (
421 old_entry is None or old_entry.removed
421 old_entry is None or old_entry.removed
422 ) and "_dirs" in self.__dict__:
422 ) and "_dirs" in self.__dict__:
423 self._dirs.addpath(filename)
423 self._dirs.addpath(filename)
424 if old_entry is None and "_alldirs" in self.__dict__:
424 if old_entry is None and "_alldirs" in self.__dict__:
425 self._alldirs.addpath(filename)
425 self._alldirs.addpath(filename)
426
426
427 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
427 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
428 """decremente the dirstate counter if applicable"""
428 """decremente the dirstate counter if applicable"""
429 if old_entry is not None:
429 if old_entry is not None:
430 if "_dirs" in self.__dict__ and not old_entry.removed:
430 if "_dirs" in self.__dict__ and not old_entry.removed:
431 self._dirs.delpath(filename)
431 self._dirs.delpath(filename)
432 if "_alldirs" in self.__dict__ and not remove_variant:
432 if "_alldirs" in self.__dict__ and not remove_variant:
433 self._alldirs.delpath(filename)
433 self._alldirs.delpath(filename)
434 elif remove_variant and "_alldirs" in self.__dict__:
434 elif remove_variant and "_alldirs" in self.__dict__:
435 self._alldirs.addpath(filename)
435 self._alldirs.addpath(filename)
436 if "filefoldmap" in self.__dict__:
436 if "filefoldmap" in self.__dict__:
437 normed = util.normcase(filename)
437 normed = util.normcase(filename)
438 self.filefoldmap.pop(normed, None)
438 self.filefoldmap.pop(normed, None)
439
439
440 @propertycache
440 @propertycache
441 def filefoldmap(self):
441 def filefoldmap(self):
442 """Returns a dictionary mapping normalized case paths to their
442 """Returns a dictionary mapping normalized case paths to their
443 non-normalized versions.
443 non-normalized versions.
444 """
444 """
445 try:
445 try:
446 makefilefoldmap = parsers.make_file_foldmap
446 makefilefoldmap = parsers.make_file_foldmap
447 except AttributeError:
447 except AttributeError:
448 pass
448 pass
449 else:
449 else:
450 return makefilefoldmap(
450 return makefilefoldmap(
451 self._map, util.normcasespec, util.normcasefallback
451 self._map, util.normcasespec, util.normcasefallback
452 )
452 )
453
453
454 f = {}
454 f = {}
455 normcase = util.normcase
455 normcase = util.normcase
456 for name, s in pycompat.iteritems(self._map):
456 for name, s in pycompat.iteritems(self._map):
457 if not s.removed:
457 if not s.removed:
458 f[normcase(name)] = name
458 f[normcase(name)] = name
459 f[b'.'] = b'.' # prevents useless util.fspath() invocation
459 f[b'.'] = b'.' # prevents useless util.fspath() invocation
460 return f
460 return f
461
461
462 @propertycache
462 @propertycache
463 def dirfoldmap(self):
463 def dirfoldmap(self):
464 f = {}
464 f = {}
465 normcase = util.normcase
465 normcase = util.normcase
466 for name in self._dirs:
466 for name in self._dirs:
467 f[normcase(name)] = name
467 f[normcase(name)] = name
468 return f
468 return f
469
469
470 def hastrackeddir(self, d):
470 def hastrackeddir(self, d):
471 """
471 """
472 Returns True if the dirstate contains a tracked (not removed) file
472 Returns True if the dirstate contains a tracked (not removed) file
473 in this directory.
473 in this directory.
474 """
474 """
475 return d in self._dirs
475 return d in self._dirs
476
476
477 def hasdir(self, d):
477 def hasdir(self, d):
478 """
478 """
479 Returns True if the dirstate contains a file (tracked or removed)
479 Returns True if the dirstate contains a file (tracked or removed)
480 in this directory.
480 in this directory.
481 """
481 """
482 return d in self._alldirs
482 return d in self._alldirs
483
483
484 @propertycache
484 @propertycache
485 def _dirs(self):
485 def _dirs(self):
486 return pathutil.dirs(self._map, only_tracked=True)
486 return pathutil.dirs(self._map, only_tracked=True)
487
487
488 @propertycache
488 @propertycache
489 def _alldirs(self):
489 def _alldirs(self):
490 return pathutil.dirs(self._map)
490 return pathutil.dirs(self._map)
491
491
492 ### code related to manipulation of entries and copy-sources
492 ### code related to manipulation of entries and copy-sources
493
493
494 def _refresh_entry(self, f, entry):
494 def _refresh_entry(self, f, entry):
495 if not entry.any_tracked:
495 if not entry.any_tracked:
496 self._map.pop(f, None)
496 self._map.pop(f, None)
497
497
498 def _insert_entry(self, f, entry):
498 def _insert_entry(self, f, entry):
499 self._map[f] = entry
499 self._map[f] = entry
500
500
501 def _drop_entry(self, f):
501 def _drop_entry(self, f):
502 self._map.pop(f, None)
502 self._map.pop(f, None)
503 self.copymap.pop(f, None)
503 self.copymap.pop(f, None)
504
504
505
505
506 if rustmod is not None:
506 if rustmod is not None:
507
507
508 class dirstatemap(_dirstatemapcommon):
508 class dirstatemap(_dirstatemapcommon):
509 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
509 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
510 super(dirstatemap, self).__init__(
510 super(dirstatemap, self).__init__(
511 ui, opener, root, nodeconstants, use_dirstate_v2
511 ui, opener, root, nodeconstants, use_dirstate_v2
512 )
512 )
513 self._docket = None
513 self._docket = None
514
514
515 ### Core data storage and access
515 ### Core data storage and access
516
516
517 @property
517 @property
518 def docket(self):
518 def docket(self):
519 if not self._docket:
519 if not self._docket:
520 if not self._use_dirstate_v2:
520 if not self._use_dirstate_v2:
521 raise error.ProgrammingError(
521 raise error.ProgrammingError(
522 b'dirstate only has a docket in v2 format'
522 b'dirstate only has a docket in v2 format'
523 )
523 )
524 self._docket = docketmod.DirstateDocket.parse(
524 self._docket = docketmod.DirstateDocket.parse(
525 self._readdirstatefile(), self._nodeconstants
525 self._readdirstatefile(), self._nodeconstants
526 )
526 )
527 return self._docket
527 return self._docket
528
528
529 @propertycache
529 @propertycache
530 def _map(self):
530 def _map(self):
531 """
531 """
532 Fills the Dirstatemap when called.
532 Fills the Dirstatemap when called.
533 """
533 """
534 # ignore HG_PENDING because identity is used only for writing
534 # ignore HG_PENDING because identity is used only for writing
535 self.identity = util.filestat.frompath(
535 self.identity = util.filestat.frompath(
536 self._opener.join(self._filename)
536 self._opener.join(self._filename)
537 )
537 )
538
538
539 if self._use_dirstate_v2:
539 if self._use_dirstate_v2:
540 if self.docket.uuid:
540 if self.docket.uuid:
541 # TODO: use mmap when possible
541 # TODO: use mmap when possible
542 data = self._opener.read(self.docket.data_filename())
542 data = self._opener.read(self.docket.data_filename())
543 else:
543 else:
544 data = b''
544 data = b''
545 self._map = rustmod.DirstateMap.new_v2(
545 self._map = rustmod.DirstateMap.new_v2(
546 data, self.docket.data_size, self.docket.tree_metadata
546 data, self.docket.data_size, self.docket.tree_metadata
547 )
547 )
548 parents = self.docket.parents
548 parents = self.docket.parents
549 else:
549 else:
550 self._map, parents = rustmod.DirstateMap.new_v1(
550 self._map, parents = rustmod.DirstateMap.new_v1(
551 self._readdirstatefile()
551 self._readdirstatefile()
552 )
552 )
553
553
554 if parents and not self._dirtyparents:
554 if parents and not self._dirtyparents:
555 self.setparents(*parents)
555 self.setparents(*parents)
556
556
557 self.__contains__ = self._map.__contains__
557 self.__contains__ = self._map.__contains__
558 self.__getitem__ = self._map.__getitem__
558 self.__getitem__ = self._map.__getitem__
559 self.get = self._map.get
559 self.get = self._map.get
560 return self._map
560 return self._map
561
561
562 @property
562 @property
563 def copymap(self):
563 def copymap(self):
564 return self._map.copymap()
564 return self._map.copymap()
565
565
566 def debug_iter(self, all):
566 def debug_iter(self, all):
567 """
567 """
568 Return an iterator of (filename, state, mode, size, mtime) tuples
568 Return an iterator of (filename, state, mode, size, mtime) tuples
569
569
570 `all`: also include with `state == b' '` dirstate tree nodes that
570 `all`: also include with `state == b' '` dirstate tree nodes that
571 don't have an associated `DirstateItem`.
571 don't have an associated `DirstateItem`.
572
572
573 """
573 """
574 return self._map.debug_iter(all)
574 return self._map.debug_iter(all)
575
575
576 def clear(self):
576 def clear(self):
577 self._map.clear()
577 self._map.clear()
578 self.setparents(
578 self.setparents(
579 self._nodeconstants.nullid, self._nodeconstants.nullid
579 self._nodeconstants.nullid, self._nodeconstants.nullid
580 )
580 )
581 util.clearcachedproperty(self, b"_dirs")
581 util.clearcachedproperty(self, b"_dirs")
582 util.clearcachedproperty(self, b"_alldirs")
582 util.clearcachedproperty(self, b"_alldirs")
583 util.clearcachedproperty(self, b"dirfoldmap")
583 util.clearcachedproperty(self, b"dirfoldmap")
584
584
585 def items(self):
585 def items(self):
586 return self._map.items()
586 return self._map.items()
587
587
588 # forward for python2,3 compat
588 # forward for python2,3 compat
589 iteritems = items
589 iteritems = items
590
590
591 def keys(self):
591 def keys(self):
592 return iter(self._map)
592 return iter(self._map)
593
593
594 ### reading/setting parents
594 ### reading/setting parents
595
595
596 def setparents(self, p1, p2, fold_p2=False):
596 def setparents(self, p1, p2, fold_p2=False):
597 self._parents = (p1, p2)
597 self._parents = (p1, p2)
598 self._dirtyparents = True
598 self._dirtyparents = True
599 copies = {}
599 copies = {}
600 if fold_p2:
600 if fold_p2:
601 # Collect into an intermediate list to avoid a `RuntimeError`
601 # Collect into an intermediate list to avoid a `RuntimeError`
602 # exception due to mutation during iteration.
602 # exception due to mutation during iteration.
603 # TODO: move this the whole loop to Rust where `iter_mut`
603 # TODO: move this the whole loop to Rust where `iter_mut`
604 # enables in-place mutation of elements of a collection while
604 # enables in-place mutation of elements of a collection while
605 # iterating it, without mutating the collection itself.
605 # iterating it, without mutating the collection itself.
606 files_with_p2_info = [
606 files_with_p2_info = [
607 f for f, s in self._map.items() if s.merged or s.from_p2
607 f for f, s in self._map.items() if s.p2_info
608 ]
608 ]
609 rust_map = self._map
609 rust_map = self._map
610 for f in files_with_p2_info:
610 for f in files_with_p2_info:
611 e = rust_map.get(f)
611 e = rust_map.get(f)
612 source = self.copymap.pop(f, None)
612 source = self.copymap.pop(f, None)
613 if source:
613 if source:
614 copies[f] = source
614 copies[f] = source
615 e.drop_merge_data()
615 e.drop_merge_data()
616 rust_map.set_dirstate_item(f, e)
616 rust_map.set_dirstate_item(f, e)
617 return copies
617 return copies
618
618
619 def parents(self):
619 def parents(self):
620 if not self._parents:
620 if not self._parents:
621 if self._use_dirstate_v2:
621 if self._use_dirstate_v2:
622 self._parents = self.docket.parents
622 self._parents = self.docket.parents
623 else:
623 else:
624 read_len = self._nodelen * 2
624 read_len = self._nodelen * 2
625 st = self._readdirstatefile(read_len)
625 st = self._readdirstatefile(read_len)
626 l = len(st)
626 l = len(st)
627 if l == read_len:
627 if l == read_len:
628 self._parents = (
628 self._parents = (
629 st[: self._nodelen],
629 st[: self._nodelen],
630 st[self._nodelen : 2 * self._nodelen],
630 st[self._nodelen : 2 * self._nodelen],
631 )
631 )
632 elif l == 0:
632 elif l == 0:
633 self._parents = (
633 self._parents = (
634 self._nodeconstants.nullid,
634 self._nodeconstants.nullid,
635 self._nodeconstants.nullid,
635 self._nodeconstants.nullid,
636 )
636 )
637 else:
637 else:
638 raise error.Abort(
638 raise error.Abort(
639 _(b'working directory state appears damaged!')
639 _(b'working directory state appears damaged!')
640 )
640 )
641
641
642 return self._parents
642 return self._parents
643
643
644 ### disk interaction
644 ### disk interaction
645
645
646 @propertycache
646 @propertycache
647 def identity(self):
647 def identity(self):
648 self._map
648 self._map
649 return self.identity
649 return self.identity
650
650
651 def write(self, tr, st, now):
651 def write(self, tr, st, now):
652 if not self._use_dirstate_v2:
652 if not self._use_dirstate_v2:
653 p1, p2 = self.parents()
653 p1, p2 = self.parents()
654 packed = self._map.write_v1(p1, p2, now)
654 packed = self._map.write_v1(p1, p2, now)
655 st.write(packed)
655 st.write(packed)
656 st.close()
656 st.close()
657 self._dirtyparents = False
657 self._dirtyparents = False
658 return
658 return
659
659
660 # We can only append to an existing data file if there is one
660 # We can only append to an existing data file if there is one
661 can_append = self.docket.uuid is not None
661 can_append = self.docket.uuid is not None
662 packed, meta, append = self._map.write_v2(now, can_append)
662 packed, meta, append = self._map.write_v2(now, can_append)
663 if append:
663 if append:
664 docket = self.docket
664 docket = self.docket
665 data_filename = docket.data_filename()
665 data_filename = docket.data_filename()
666 if tr:
666 if tr:
667 tr.add(data_filename, docket.data_size)
667 tr.add(data_filename, docket.data_size)
668 with self._opener(data_filename, b'r+b') as fp:
668 with self._opener(data_filename, b'r+b') as fp:
669 fp.seek(docket.data_size)
669 fp.seek(docket.data_size)
670 assert fp.tell() == docket.data_size
670 assert fp.tell() == docket.data_size
671 written = fp.write(packed)
671 written = fp.write(packed)
672 if written is not None: # py2 may return None
672 if written is not None: # py2 may return None
673 assert written == len(packed), (written, len(packed))
673 assert written == len(packed), (written, len(packed))
674 docket.data_size += len(packed)
674 docket.data_size += len(packed)
675 docket.parents = self.parents()
675 docket.parents = self.parents()
676 docket.tree_metadata = meta
676 docket.tree_metadata = meta
677 st.write(docket.serialize())
677 st.write(docket.serialize())
678 st.close()
678 st.close()
679 else:
679 else:
680 old_docket = self.docket
680 old_docket = self.docket
681 new_docket = docketmod.DirstateDocket.with_new_uuid(
681 new_docket = docketmod.DirstateDocket.with_new_uuid(
682 self.parents(), len(packed), meta
682 self.parents(), len(packed), meta
683 )
683 )
684 data_filename = new_docket.data_filename()
684 data_filename = new_docket.data_filename()
685 if tr:
685 if tr:
686 tr.add(data_filename, 0)
686 tr.add(data_filename, 0)
687 self._opener.write(data_filename, packed)
687 self._opener.write(data_filename, packed)
688 # Write the new docket after the new data file has been
688 # Write the new docket after the new data file has been
689 # written. Because `st` was opened with `atomictemp=True`,
689 # written. Because `st` was opened with `atomictemp=True`,
690 # the actual `.hg/dirstate` file is only affected on close.
690 # the actual `.hg/dirstate` file is only affected on close.
691 st.write(new_docket.serialize())
691 st.write(new_docket.serialize())
692 st.close()
692 st.close()
693 # Remove the old data file after the new docket pointing to
693 # Remove the old data file after the new docket pointing to
694 # the new data file was written.
694 # the new data file was written.
695 if old_docket.uuid:
695 if old_docket.uuid:
696 data_filename = old_docket.data_filename()
696 data_filename = old_docket.data_filename()
697 unlink = lambda _tr=None: self._opener.unlink(data_filename)
697 unlink = lambda _tr=None: self._opener.unlink(data_filename)
698 if tr:
698 if tr:
699 category = b"dirstate-v2-clean-" + old_docket.uuid
699 category = b"dirstate-v2-clean-" + old_docket.uuid
700 tr.addpostclose(category, unlink)
700 tr.addpostclose(category, unlink)
701 else:
701 else:
702 unlink()
702 unlink()
703 self._docket = new_docket
703 self._docket = new_docket
704 # Reload from the newly-written file
704 # Reload from the newly-written file
705 util.clearcachedproperty(self, b"_map")
705 util.clearcachedproperty(self, b"_map")
706 self._dirtyparents = False
706 self._dirtyparents = False
707
707
708 def _opendirstatefile(self):
708 def _opendirstatefile(self):
709 fp, mode = txnutil.trypending(
709 fp, mode = txnutil.trypending(
710 self._root, self._opener, self._filename
710 self._root, self._opener, self._filename
711 )
711 )
712 if self._pendingmode is not None and self._pendingmode != mode:
712 if self._pendingmode is not None and self._pendingmode != mode:
713 fp.close()
713 fp.close()
714 raise error.Abort(
714 raise error.Abort(
715 _(b'working directory state may be changed parallelly')
715 _(b'working directory state may be changed parallelly')
716 )
716 )
717 self._pendingmode = mode
717 self._pendingmode = mode
718 return fp
718 return fp
719
719
720 def _readdirstatefile(self, size=-1):
720 def _readdirstatefile(self, size=-1):
721 try:
721 try:
722 with self._opendirstatefile() as fp:
722 with self._opendirstatefile() as fp:
723 return fp.read(size)
723 return fp.read(size)
724 except IOError as err:
724 except IOError as err:
725 if err.errno != errno.ENOENT:
725 if err.errno != errno.ENOENT:
726 raise
726 raise
727 # File doesn't exist, so the current state is empty
727 # File doesn't exist, so the current state is empty
728 return b''
728 return b''
729
729
730 ### code related to maintaining and accessing "extra" property
730 ### code related to maintaining and accessing "extra" property
731 # (e.g. "has_dir")
731 # (e.g. "has_dir")
732
732
733 @propertycache
733 @propertycache
734 def filefoldmap(self):
734 def filefoldmap(self):
735 """Returns a dictionary mapping normalized case paths to their
735 """Returns a dictionary mapping normalized case paths to their
736 non-normalized versions.
736 non-normalized versions.
737 """
737 """
738 return self._map.filefoldmapasdict()
738 return self._map.filefoldmapasdict()
739
739
740 def hastrackeddir(self, d):
740 def hastrackeddir(self, d):
741 return self._map.hastrackeddir(d)
741 return self._map.hastrackeddir(d)
742
742
743 def hasdir(self, d):
743 def hasdir(self, d):
744 return self._map.hasdir(d)
744 return self._map.hasdir(d)
745
745
746 @propertycache
746 @propertycache
747 def dirfoldmap(self):
747 def dirfoldmap(self):
748 f = {}
748 f = {}
749 normcase = util.normcase
749 normcase = util.normcase
750 for name in self._map.tracked_dirs():
750 for name in self._map.tracked_dirs():
751 f[normcase(name)] = name
751 f[normcase(name)] = name
752 return f
752 return f
753
753
754 ### code related to manipulation of entries and copy-sources
754 ### code related to manipulation of entries and copy-sources
755
755
756 def _refresh_entry(self, f, entry):
756 def _refresh_entry(self, f, entry):
757 if not entry.any_tracked:
757 if not entry.any_tracked:
758 self._map.drop_item_and_copy_source(f)
758 self._map.drop_item_and_copy_source(f)
759 else:
759 else:
760 self._map.addfile(f, entry)
760 self._map.addfile(f, entry)
761
761
762 def _insert_entry(self, f, entry):
762 def _insert_entry(self, f, entry):
763 self._map.addfile(f, entry)
763 self._map.addfile(f, entry)
764
764
765 def _drop_entry(self, f):
765 def _drop_entry(self, f):
766 self._map.drop_item_and_copy_source(f)
766 self._map.drop_item_and_copy_source(f)
767
767
768 def __setitem__(self, key, value):
768 def __setitem__(self, key, value):
769 assert isinstance(value, DirstateItem)
769 assert isinstance(value, DirstateItem)
770 self._map.set_dirstate_item(key, value)
770 self._map.set_dirstate_item(key, value)
General Comments 0
You need to be logged in to leave comments. Login now