##// END OF EJS Templates
dirstate: re-blacken file...
Augie Fackler -
r44010:dc9c570a default
parent child Browse files
Show More
@@ -1,1838 +1,1840 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
76 '''Create a new dirstate object.
77
77
78 opener is an open()-like callable that can be used to open the
78 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
79 dirstate file; root is the root of the directory tracked by
80 the dirstate.
80 the dirstate.
81 '''
81 '''
82 self._opener = opener
82 self._opener = opener
83 self._validate = validate
83 self._validate = validate
84 self._root = root
84 self._root = root
85 self._sparsematchfn = sparsematchfn
85 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
87 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
88 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
89 self._dirty = False
90 self._lastnormaltime = 0
90 self._lastnormaltime = 0
91 self._ui = ui
91 self._ui = ui
92 self._filecache = {}
92 self._filecache = {}
93 self._parentwriters = 0
93 self._parentwriters = 0
94 self._filename = b'dirstate'
94 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
97 self._origpl = None
97 self._origpl = None
98 self._updatedfiles = set()
98 self._updatedfiles = set()
99 self._mapcls = dirstatemap
99 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
100 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
101 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
102 # raises an exception).
103 self._cwd
103 self._cwd
104
104
105 @contextlib.contextmanager
105 @contextlib.contextmanager
106 def parentchange(self):
106 def parentchange(self):
107 '''Context manager for handling dirstate parents.
107 '''Context manager for handling dirstate parents.
108
108
109 If an exception occurs in the scope of the context manager,
109 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
110 the incoherent dirstate won't be written when wlock is
111 released.
111 released.
112 '''
112 '''
113 self._parentwriters += 1
113 self._parentwriters += 1
114 yield
114 yield
115 # Typically we want the "undo" step of a context manager in a
115 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
116 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
117 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
118 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
119 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache(b'branch')
147 @repocache(b'branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read(b"branch").strip() or b"default"
150 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return b"default"
154 return b"default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def hasdir(self, d):
160 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
161 return self._map.hastrackeddir(d)
162
162
163 @rootcache(b'.hgignore')
163 @rootcache(b'.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never()
167 return matchmod.never()
168
168
169 pats = [b'include:%s' % f for f in files]
169 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
186 return not util.fscasesensitive(self._join(b'.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195
195
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return b'l'
200 return b'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return b'x'
202 return b'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return b''
205 return b''
206
206
207 return f
207 return f
208
208
209 fallback = buildfallback()
209 fallback = buildfallback()
210 if self._checklink:
210 if self._checklink:
211
211
212 def f(x):
212 def f(x):
213 if os.path.islink(self._join(x)):
213 if os.path.islink(self._join(x)):
214 return b'l'
214 return b'l'
215 if b'x' in fallback(x):
215 if b'x' in fallback(x):
216 return b'x'
216 return b'x'
217 return b''
217 return b''
218
218
219 return f
219 return f
220 if self._checkexec:
220 if self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 if b'l' in fallback(x):
223 if b'l' in fallback(x):
224 return b'l'
224 return b'l'
225 if util.isexec(self._join(x)):
225 if util.isexec(self._join(x)):
226 return b'x'
226 return b'x'
227 return b''
227 return b''
228
228
229 return f
229 return f
230 else:
230 else:
231 return fallback
231 return fallback
232
232
233 @propertycache
233 @propertycache
234 def _cwd(self):
234 def _cwd(self):
235 # internal config: ui.forcecwd
235 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
237 if forcecwd:
238 return forcecwd
238 return forcecwd
239 return encoding.getcwd()
239 return encoding.getcwd()
240
240
241 def getcwd(self):
241 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
242 '''Return the path from which a canonical path is calculated.
243
243
244 This path should be used to resolve file patterns or to convert
244 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
245 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
246 used to get real file paths. Use vfs functions instead.
247 '''
247 '''
248 cwd = self._cwd
248 cwd = self._cwd
249 if cwd == self._root:
249 if cwd == self._root:
250 return b''
250 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
252 rootsep = self._root
253 if not util.endswithsep(rootsep):
253 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
254 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
255 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
256 return cwd[len(rootsep) :]
257 else:
257 else:
258 # we're outside the repo. return an absolute path.
258 # we're outside the repo. return an absolute path.
259 return cwd
259 return cwd
260
260
261 def pathto(self, f, cwd=None):
261 def pathto(self, f, cwd=None):
262 if cwd is None:
262 if cwd is None:
263 cwd = self.getcwd()
263 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
264 path = util.pathto(self._root, cwd, f)
265 if self._slash:
265 if self._slash:
266 return util.pconvert(path)
266 return util.pconvert(path)
267 return path
267 return path
268
268
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
270 '''Return the current state of key (a filename) in the dirstate.
271
271
272 States are:
272 States are:
273 n normal
273 n normal
274 m needs merging
274 m needs merging
275 r marked for removal
275 r marked for removal
276 a marked for addition
276 a marked for addition
277 ? not tracked
277 ? not tracked
278 '''
278 '''
279 return self._map.get(key, (b"?",))[0]
279 return self._map.get(key, (b"?",))[0]
280
280
281 def __contains__(self, key):
281 def __contains__(self, key):
282 return key in self._map
282 return key in self._map
283
283
284 def __iter__(self):
284 def __iter__(self):
285 return iter(sorted(self._map))
285 return iter(sorted(self._map))
286
286
287 def items(self):
287 def items(self):
288 return pycompat.iteritems(self._map)
288 return pycompat.iteritems(self._map)
289
289
290 iteritems = items
290 iteritems = items
291
291
292 def parents(self):
292 def parents(self):
293 return [self._validate(p) for p in self._pl]
293 return [self._validate(p) for p in self._pl]
294
294
295 def p1(self):
295 def p1(self):
296 return self._validate(self._pl[0])
296 return self._validate(self._pl[0])
297
297
298 def p2(self):
298 def p2(self):
299 return self._validate(self._pl[1])
299 return self._validate(self._pl[1])
300
300
301 def branch(self):
301 def branch(self):
302 return encoding.tolocal(self._branch)
302 return encoding.tolocal(self._branch)
303
303
304 def setparents(self, p1, p2=nullid):
304 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
305 """Set dirstate parents to p1 and p2.
306
306
307 When moving from two parents to one, 'm' merged entries a
307 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
308 adjusted to normal and previous copy records discarded and
309 returned by the call.
309 returned by the call.
310
310
311 See localrepo.setparents()
311 See localrepo.setparents()
312 """
312 """
313 if self._parentwriters == 0:
313 if self._parentwriters == 0:
314 raise ValueError(
314 raise ValueError(
315 b"cannot set dirstate parent outside of "
315 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
316 b"dirstate.parentchange context manager"
317 )
317 )
318
318
319 self._dirty = True
319 self._dirty = True
320 oldp2 = self._pl[1]
320 oldp2 = self._pl[1]
321 if self._origpl is None:
321 if self._origpl is None:
322 self._origpl = self._pl
322 self._origpl = self._pl
323 self._map.setparents(p1, p2)
323 self._map.setparents(p1, p2)
324 copies = {}
324 copies = {}
325 if oldp2 != nullid and p2 == nullid:
325 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
326 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
327 self._map.otherparentset
328 )
328 )
329 for f in candidatefiles:
329 for f in candidatefiles:
330 s = self._map.get(f)
330 s = self._map.get(f)
331 if s is None:
331 if s is None:
332 continue
332 continue
333
333
334 # Discard 'm' markers when moving away from a merge state
334 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
335 if s[0] == b'm':
336 source = self._map.copymap.get(f)
336 source = self._map.copymap.get(f)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 self.normallookup(f)
339 self.normallookup(f)
340 # Also fix up otherparent markers
340 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
341 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
342 source = self._map.copymap.get(f)
343 if source:
343 if source:
344 copies[f] = source
344 copies[f] = source
345 self.add(f)
345 self.add(f)
346 return copies
346 return copies
347
347
348 def setbranch(self, branch):
348 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
351 try:
352 f.write(self._branch + b'\n')
352 f.write(self._branch + b'\n')
353 f.close()
353 f.close()
354
354
355 # make sure filecache has the correct stat info for _branch after
355 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
356 # replacing the underlying file
357 ce = self._filecache[b'_branch']
357 ce = self._filecache[b'_branch']
358 if ce:
358 if ce:
359 ce.refresh()
359 ce.refresh()
360 except: # re-raises
360 except: # re-raises
361 f.discard()
361 f.discard()
362 raise
362 raise
363
363
364 def invalidate(self):
364 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
365 '''Causes the next access to reread the dirstate.
366
366
367 This is different from localrepo.invalidatedirstate() because it always
367 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
369 check whether the dirstate has changed before rereading it.'''
370
370
371 for a in ("_map", "_branch", "_ignore"):
371 for a in ("_map", "_branch", "_ignore"):
372 if a in self.__dict__:
372 if a in self.__dict__:
373 delattr(self, a)
373 delattr(self, a)
374 self._lastnormaltime = 0
374 self._lastnormaltime = 0
375 self._dirty = False
375 self._dirty = False
376 self._updatedfiles.clear()
376 self._updatedfiles.clear()
377 self._parentwriters = 0
377 self._parentwriters = 0
378 self._origpl = None
378 self._origpl = None
379
379
380 def copy(self, source, dest):
380 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
382 if source == dest:
383 return
383 return
384 self._dirty = True
384 self._dirty = True
385 if source is not None:
385 if source is not None:
386 self._map.copymap[dest] = source
386 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
388 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
389 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
390 self._updatedfiles.add(dest)
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self._map.copymap.get(file, None)
393 return self._map.copymap.get(file, None)
394
394
395 def copies(self):
395 def copies(self):
396 return self._map.copymap
396 return self._map.copymap
397
397
398 def _addpath(self, f, state, mode, size, mtime):
398 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
399 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
400 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
401 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
402 if self._map.hastrackeddir(f):
403 raise error.Abort(
403 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
405 )
406 # shadows
406 # shadows
407 for d in util.finddirs(f):
407 for d in util.finddirs(f):
408 if self._map.hastrackeddir(d):
408 if self._map.hastrackeddir(d):
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
411 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
412 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
413 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
415 )
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
419
420 def normal(self, f, parentfiledata=None):
420 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
421 '''Mark a file normal and clean.
422
422
423 parentfiledata: (mode, size, mtime) of the clean file
423 parentfiledata: (mode, size, mtime) of the clean file
424
424
425 parentfiledata should be computed from memory (for mode,
425 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
426 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
427 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
428 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
429 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
430 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
431 (mode, size, mtime) = parentfiledata
432 else:
432 else:
433 s = os.lstat(self._join(f))
433 s = os.lstat(self._join(f))
434 mode = s.st_mode
434 mode = s.st_mode
435 size = s.st_size
435 size = s.st_size
436 mtime = s[stat.ST_MTIME]
436 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
438 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
439 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
440 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
441 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
442 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
443 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
444 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
445 self._lastnormaltime = mtime
446
446
447 def normallookup(self, f):
447 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
448 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
450 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
452 # being removed, restore that state.
453 entry = self._map.get(f)
453 entry = self._map.get(f)
454 if entry is not None:
454 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
456 source = self._map.copymap.get(f)
457 if entry[2] == -1:
457 if entry[2] == -1:
458 self.merge(f)
458 self.merge(f)
459 elif entry[2] == -2:
459 elif entry[2] == -2:
460 self.otherparent(f)
460 self.otherparent(f)
461 if source:
461 if source:
462 self.copy(source, f)
462 self.copy(source, f)
463 return
463 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
465 return
466 self._addpath(f, b'n', 0, -1, -1)
466 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
467 self._map.copymap.pop(f, None)
468
468
469 def otherparent(self, f):
469 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
470 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
471 if self._pl[1] == nullid:
472 raise error.Abort(
472 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
473 _(b"setting %r to other parent only allowed in merges") % f
474 )
474 )
475 if f in self and self[f] == b'n':
475 if f in self and self[f] == b'n':
476 # merge-like
476 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
477 self._addpath(f, b'm', 0, -2, -1)
478 else:
478 else:
479 # add-like
479 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
480 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
481 self._map.copymap.pop(f, None)
482
482
483 def add(self, f):
483 def add(self, f):
484 '''Mark a file added.'''
484 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
485 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
486 self._map.copymap.pop(f, None)
487
487
488 def remove(self, f):
488 def remove(self, f):
489 '''Mark a file removed.'''
489 '''Mark a file removed.'''
490 self._dirty = True
490 self._dirty = True
491 oldstate = self[f]
491 oldstate = self[f]
492 size = 0
492 size = 0
493 if self._pl[1] != nullid:
493 if self._pl[1] != nullid:
494 entry = self._map.get(f)
494 entry = self._map.get(f)
495 if entry is not None:
495 if entry is not None:
496 # backup the previous state
496 # backup the previous state
497 if entry[0] == b'm': # merge
497 if entry[0] == b'm': # merge
498 size = -1
498 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
500 size = -2
501 self._map.otherparentset.add(f)
501 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
503 self._map.removefile(f, oldstate, size)
504 if size == 0:
504 if size == 0:
505 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
506
506
507 def merge(self, f):
507 def merge(self, f):
508 '''Mark a file merged.'''
508 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
509 if self._pl[1] == nullid:
510 return self.normallookup(f)
510 return self.normallookup(f)
511 return self.otherparent(f)
511 return self.otherparent(f)
512
512
513 def drop(self, f):
513 def drop(self, f):
514 '''Drop a file from the dirstate'''
514 '''Drop a file from the dirstate'''
515 oldstate = self[f]
515 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
516 if self._map.dropfile(f, oldstate):
517 self._dirty = True
517 self._dirty = True
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
526 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
527 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
529 folded = d + b"/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if b'/' in normed:
536 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
537 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
539 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
540 folded = d + b"/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(
554 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
556 )
557 return folded
557 return folded
558
558
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
560 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
561 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
562 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
563 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
564 if folded is None:
565 if isknown:
565 if isknown:
566 folded = path
566 folded = path
567 else:
567 else:
568 # store discovered result in dirfoldmap so that future
568 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
569 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def normalize(self, path, isknown=False, ignoremissing=False):
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
576 '''
577 normalize the case of a pathname when on a casefolding filesystem
577 normalize the case of a pathname when on a casefolding filesystem
578
578
579 isknown specifies whether the filename came from walking the
579 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
580 disk, to avoid extra filesystem access.
581
581
582 If ignoremissing is True, missing path are returned
582 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
583 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
584 existing path components.
585
585
586 The normalized case is determined based on the following precedence:
586 The normalized case is determined based on the following precedence:
587
587
588 - version of name already stored in the dirstate
588 - version of name already stored in the dirstate
589 - version of name stored on disk
589 - version of name stored on disk
590 - version provided via command arguments
590 - version provided via command arguments
591 '''
591 '''
592
592
593 if self._checkcase:
593 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
594 return self._normalize(path, isknown, ignoremissing)
595 return path
595 return path
596
596
597 def clear(self):
597 def clear(self):
598 self._map.clear()
598 self._map.clear()
599 self._lastnormaltime = 0
599 self._lastnormaltime = 0
600 self._updatedfiles.clear()
600 self._updatedfiles.clear()
601 self._dirty = True
601 self._dirty = True
602
602
603 def rebuild(self, parent, allfiles, changedfiles=None):
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
604 if changedfiles is None:
605 # Rebuild entire dirstate
605 # Rebuild entire dirstate
606 changedfiles = allfiles
606 changedfiles = allfiles
607 lastnormaltime = self._lastnormaltime
607 lastnormaltime = self._lastnormaltime
608 self.clear()
608 self.clear()
609 self._lastnormaltime = lastnormaltime
609 self._lastnormaltime = lastnormaltime
610
610
611 if self._origpl is None:
611 if self._origpl is None:
612 self._origpl = self._pl
612 self._origpl = self._pl
613 self._map.setparents(parent, nullid)
613 self._map.setparents(parent, nullid)
614 for f in changedfiles:
614 for f in changedfiles:
615 if f in allfiles:
615 if f in allfiles:
616 self.normallookup(f)
616 self.normallookup(f)
617 else:
617 else:
618 self.drop(f)
618 self.drop(f)
619
619
620 self._dirty = True
620 self._dirty = True
621
621
622 def identity(self):
622 def identity(self):
623 '''Return identity of dirstate itself to detect changing in storage
623 '''Return identity of dirstate itself to detect changing in storage
624
624
625 If identity of previous dirstate is equal to this, writing
625 If identity of previous dirstate is equal to this, writing
626 changes based on the former dirstate out can keep consistency.
626 changes based on the former dirstate out can keep consistency.
627 '''
627 '''
628 return self._map.identity
628 return self._map.identity
629
629
630 def write(self, tr):
630 def write(self, tr):
631 if not self._dirty:
631 if not self._dirty:
632 return
632 return
633
633
634 filename = self._filename
634 filename = self._filename
635 if tr:
635 if tr:
636 # 'dirstate.write()' is not only for writing in-memory
636 # 'dirstate.write()' is not only for writing in-memory
637 # changes out, but also for dropping ambiguous timestamp.
637 # changes out, but also for dropping ambiguous timestamp.
638 # delayed writing re-raise "ambiguous timestamp issue".
638 # delayed writing re-raise "ambiguous timestamp issue".
639 # See also the wiki page below for detail:
639 # See also the wiki page below for detail:
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641
641
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 now = _getfsnow(self._opener)
643 now = _getfsnow(self._opener)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
645
645
646 # emulate that all 'dirstate.normal' results are written out
646 # emulate that all 'dirstate.normal' results are written out
647 self._lastnormaltime = 0
647 self._lastnormaltime = 0
648 self._updatedfiles.clear()
648 self._updatedfiles.clear()
649
649
650 # delay writing in-memory changes out
650 # delay writing in-memory changes out
651 tr.addfilegenerator(
651 tr.addfilegenerator(
652 b'dirstate',
652 b'dirstate',
653 (self._filename,),
653 (self._filename,),
654 self._writedirstate,
654 self._writedirstate,
655 location=b'plain',
655 location=b'plain',
656 )
656 )
657 return
657 return
658
658
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
660 self._writedirstate(st)
661
661
662 def addparentchangecallback(self, category, callback):
662 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
663 """add a callback to be called when the wd parents are changed
664
664
665 Callback will be called with the following arguments:
665 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
666 dirstate, (oldp1, oldp2), (newp1, newp2)
667
667
668 Category is a unique identifier to allow overwriting an old callback
668 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
669 with a newer callback.
670 """
670 """
671 self._plchangecallbacks[category] = callback
671 self._plchangecallbacks[category] = callback
672
672
673 def _writedirstate(self, st):
673 def _writedirstate(self, st):
674 # notify callbacks about parents change
674 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
675 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(
676 for c, callback in sorted(
677 pycompat.iteritems(self._plchangecallbacks)
677 pycompat.iteritems(self._plchangecallbacks)
678 ):
678 ):
679 callback(self, self._origpl, self._pl)
679 callback(self, self._origpl, self._pl)
680 self._origpl = None
680 self._origpl = None
681 # use the modification time of the newly created temporary file as the
681 # use the modification time of the newly created temporary file as the
682 # filesystem's notion of 'now'
682 # filesystem's notion of 'now'
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684
684
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # timestamp of each entries in dirstate, because of 'now > mtime'
686 # timestamp of each entries in dirstate, because of 'now > mtime'
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 if delaywrite > 0:
688 if delaywrite > 0:
689 # do we have any files to delay for?
689 # do we have any files to delay for?
690 for f, e in pycompat.iteritems(self._map):
690 for f, e in pycompat.iteritems(self._map):
691 if e[0] == b'n' and e[3] == now:
691 if e[0] == b'n' and e[3] == now:
692 import time # to avoid useless import
692 import time # to avoid useless import
693
693
694 # rather than sleep n seconds, sleep until the next
694 # rather than sleep n seconds, sleep until the next
695 # multiple of n seconds
695 # multiple of n seconds
696 clock = time.time()
696 clock = time.time()
697 start = int(clock) - (int(clock) % delaywrite)
697 start = int(clock) - (int(clock) % delaywrite)
698 end = start + delaywrite
698 end = start + delaywrite
699 time.sleep(end - clock)
699 time.sleep(end - clock)
700 now = end # trust our estimate that the end is near now
700 now = end # trust our estimate that the end is near now
701 break
701 break
702
702
703 self._map.write(st, now)
703 self._map.write(st, now)
704 self._lastnormaltime = 0
704 self._lastnormaltime = 0
705 self._dirty = False
705 self._dirty = False
706
706
707 def _dirignore(self, f):
707 def _dirignore(self, f):
708 if self._ignore(f):
708 if self._ignore(f):
709 return True
709 return True
710 for p in util.finddirs(f):
710 for p in util.finddirs(f):
711 if self._ignore(p):
711 if self._ignore(p):
712 return True
712 return True
713 return False
713 return False
714
714
715 def _ignorefiles(self):
715 def _ignorefiles(self):
716 files = []
716 files = []
717 if os.path.exists(self._join(b'.hgignore')):
717 if os.path.exists(self._join(b'.hgignore')):
718 files.append(self._join(b'.hgignore'))
718 files.append(self._join(b'.hgignore'))
719 for name, path in self._ui.configitems(b"ui"):
719 for name, path in self._ui.configitems(b"ui"):
720 if name == b'ignore' or name.startswith(b'ignore.'):
720 if name == b'ignore' or name.startswith(b'ignore.'):
721 # we need to use os.path.join here rather than self._join
721 # we need to use os.path.join here rather than self._join
722 # because path is arbitrary and user-specified
722 # because path is arbitrary and user-specified
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
724 return files
724 return files
725
725
726 def _ignorefileandline(self, f):
726 def _ignorefileandline(self, f):
727 files = collections.deque(self._ignorefiles())
727 files = collections.deque(self._ignorefiles())
728 visited = set()
728 visited = set()
729 while files:
729 while files:
730 i = files.popleft()
730 i = files.popleft()
731 patterns = matchmod.readpatternfile(
731 patterns = matchmod.readpatternfile(
732 i, self._ui.warn, sourceinfo=True
732 i, self._ui.warn, sourceinfo=True
733 )
733 )
734 for pattern, lineno, line in patterns:
734 for pattern, lineno, line in patterns:
735 kind, p = matchmod._patsplit(pattern, b'glob')
735 kind, p = matchmod._patsplit(pattern, b'glob')
736 if kind == b"subinclude":
736 if kind == b"subinclude":
737 if p not in visited:
737 if p not in visited:
738 files.append(p)
738 files.append(p)
739 continue
739 continue
740 m = matchmod.match(
740 m = matchmod.match(
741 self._root, b'', [], [pattern], warn=self._ui.warn
741 self._root, b'', [], [pattern], warn=self._ui.warn
742 )
742 )
743 if m(f):
743 if m(f):
744 return (i, lineno, line)
744 return (i, lineno, line)
745 visited.add(i)
745 visited.add(i)
746 return (None, -1, b"")
746 return (None, -1, b"")
747
747
748 def _walkexplicit(self, match, subrepos):
748 def _walkexplicit(self, match, subrepos):
749 '''Get stat data about the files explicitly specified by match.
749 '''Get stat data about the files explicitly specified by match.
750
750
751 Return a triple (results, dirsfound, dirsnotfound).
751 Return a triple (results, dirsfound, dirsnotfound).
752 - results is a mapping from filename to stat result. It also contains
752 - results is a mapping from filename to stat result. It also contains
753 listings mapping subrepos and .hg to None.
753 listings mapping subrepos and .hg to None.
754 - dirsfound is a list of files found to be directories.
754 - dirsfound is a list of files found to be directories.
755 - dirsnotfound is a list of files that the dirstate thinks are
755 - dirsnotfound is a list of files that the dirstate thinks are
756 directories and that were not found.'''
756 directories and that were not found.'''
757
757
758 def badtype(mode):
758 def badtype(mode):
759 kind = _(b'unknown')
759 kind = _(b'unknown')
760 if stat.S_ISCHR(mode):
760 if stat.S_ISCHR(mode):
761 kind = _(b'character device')
761 kind = _(b'character device')
762 elif stat.S_ISBLK(mode):
762 elif stat.S_ISBLK(mode):
763 kind = _(b'block device')
763 kind = _(b'block device')
764 elif stat.S_ISFIFO(mode):
764 elif stat.S_ISFIFO(mode):
765 kind = _(b'fifo')
765 kind = _(b'fifo')
766 elif stat.S_ISSOCK(mode):
766 elif stat.S_ISSOCK(mode):
767 kind = _(b'socket')
767 kind = _(b'socket')
768 elif stat.S_ISDIR(mode):
768 elif stat.S_ISDIR(mode):
769 kind = _(b'directory')
769 kind = _(b'directory')
770 return _(b'unsupported file type (type is %s)') % kind
770 return _(b'unsupported file type (type is %s)') % kind
771
771
772 matchedir = match.explicitdir
772 matchedir = match.explicitdir
773 badfn = match.bad
773 badfn = match.bad
774 dmap = self._map
774 dmap = self._map
775 lstat = os.lstat
775 lstat = os.lstat
776 getkind = stat.S_IFMT
776 getkind = stat.S_IFMT
777 dirkind = stat.S_IFDIR
777 dirkind = stat.S_IFDIR
778 regkind = stat.S_IFREG
778 regkind = stat.S_IFREG
779 lnkkind = stat.S_IFLNK
779 lnkkind = stat.S_IFLNK
780 join = self._join
780 join = self._join
781 dirsfound = []
781 dirsfound = []
782 foundadd = dirsfound.append
782 foundadd = dirsfound.append
783 dirsnotfound = []
783 dirsnotfound = []
784 notfoundadd = dirsnotfound.append
784 notfoundadd = dirsnotfound.append
785
785
786 if not match.isexact() and self._checkcase:
786 if not match.isexact() and self._checkcase:
787 normalize = self._normalize
787 normalize = self._normalize
788 else:
788 else:
789 normalize = None
789 normalize = None
790
790
791 files = sorted(match.files())
791 files = sorted(match.files())
792 subrepos.sort()
792 subrepos.sort()
793 i, j = 0, 0
793 i, j = 0, 0
794 while i < len(files) and j < len(subrepos):
794 while i < len(files) and j < len(subrepos):
795 subpath = subrepos[j] + b"/"
795 subpath = subrepos[j] + b"/"
796 if files[i] < subpath:
796 if files[i] < subpath:
797 i += 1
797 i += 1
798 continue
798 continue
799 while i < len(files) and files[i].startswith(subpath):
799 while i < len(files) and files[i].startswith(subpath):
800 del files[i]
800 del files[i]
801 j += 1
801 j += 1
802
802
803 if not files or b'' in files:
803 if not files or b'' in files:
804 files = [b'']
804 files = [b'']
805 # constructing the foldmap is expensive, so don't do it for the
805 # constructing the foldmap is expensive, so don't do it for the
806 # common case where files is ['']
806 # common case where files is ['']
807 normalize = None
807 normalize = None
808 results = dict.fromkeys(subrepos)
808 results = dict.fromkeys(subrepos)
809 results[b'.hg'] = None
809 results[b'.hg'] = None
810
810
811 for ff in files:
811 for ff in files:
812 if normalize:
812 if normalize:
813 nf = normalize(ff, False, True)
813 nf = normalize(ff, False, True)
814 else:
814 else:
815 nf = ff
815 nf = ff
816 if nf in results:
816 if nf in results:
817 continue
817 continue
818
818
819 try:
819 try:
820 st = lstat(join(nf))
820 st = lstat(join(nf))
821 kind = getkind(st.st_mode)
821 kind = getkind(st.st_mode)
822 if kind == dirkind:
822 if kind == dirkind:
823 if nf in dmap:
823 if nf in dmap:
824 # file replaced by dir on disk but still in dirstate
824 # file replaced by dir on disk but still in dirstate
825 results[nf] = None
825 results[nf] = None
826 if matchedir:
826 if matchedir:
827 matchedir(nf)
827 matchedir(nf)
828 foundadd((nf, ff))
828 foundadd((nf, ff))
829 elif kind == regkind or kind == lnkkind:
829 elif kind == regkind or kind == lnkkind:
830 results[nf] = st
830 results[nf] = st
831 else:
831 else:
832 badfn(ff, badtype(kind))
832 badfn(ff, badtype(kind))
833 if nf in dmap:
833 if nf in dmap:
834 results[nf] = None
834 results[nf] = None
835 except OSError as inst: # nf not found on disk - it is dirstate only
835 except OSError as inst: # nf not found on disk - it is dirstate only
836 if nf in dmap: # does it exactly match a missing file?
836 if nf in dmap: # does it exactly match a missing file?
837 results[nf] = None
837 results[nf] = None
838 else: # does it match a missing directory?
838 else: # does it match a missing directory?
839 if self._map.hasdir(nf):
839 if self._map.hasdir(nf):
840 if matchedir:
840 if matchedir:
841 matchedir(nf)
841 matchedir(nf)
842 notfoundadd(nf)
842 notfoundadd(nf)
843 else:
843 else:
844 badfn(ff, encoding.strtolocal(inst.strerror))
844 badfn(ff, encoding.strtolocal(inst.strerror))
845
845
846 # match.files() may contain explicitly-specified paths that shouldn't
846 # match.files() may contain explicitly-specified paths that shouldn't
847 # be taken; drop them from the list of files found. dirsfound/notfound
847 # be taken; drop them from the list of files found. dirsfound/notfound
848 # aren't filtered here because they will be tested later.
848 # aren't filtered here because they will be tested later.
849 if match.anypats():
849 if match.anypats():
850 for f in list(results):
850 for f in list(results):
851 if f == b'.hg' or f in subrepos:
851 if f == b'.hg' or f in subrepos:
852 # keep sentinel to disable further out-of-repo walks
852 # keep sentinel to disable further out-of-repo walks
853 continue
853 continue
854 if not match(f):
854 if not match(f):
855 del results[f]
855 del results[f]
856
856
857 # Case insensitive filesystems cannot rely on lstat() failing to detect
857 # Case insensitive filesystems cannot rely on lstat() failing to detect
858 # a case-only rename. Prune the stat object for any file that does not
858 # a case-only rename. Prune the stat object for any file that does not
859 # match the case in the filesystem, if there are multiple files that
859 # match the case in the filesystem, if there are multiple files that
860 # normalize to the same path.
860 # normalize to the same path.
861 if match.isexact() and self._checkcase:
861 if match.isexact() and self._checkcase:
862 normed = {}
862 normed = {}
863
863
864 for f, st in pycompat.iteritems(results):
864 for f, st in pycompat.iteritems(results):
865 if st is None:
865 if st is None:
866 continue
866 continue
867
867
868 nc = util.normcase(f)
868 nc = util.normcase(f)
869 paths = normed.get(nc)
869 paths = normed.get(nc)
870
870
871 if paths is None:
871 if paths is None:
872 paths = set()
872 paths = set()
873 normed[nc] = paths
873 normed[nc] = paths
874
874
875 paths.add(f)
875 paths.add(f)
876
876
877 for norm, paths in pycompat.iteritems(normed):
877 for norm, paths in pycompat.iteritems(normed):
878 if len(paths) > 1:
878 if len(paths) > 1:
879 for path in paths:
879 for path in paths:
880 folded = self._discoverpath(
880 folded = self._discoverpath(
881 path, norm, True, None, self._map.dirfoldmap
881 path, norm, True, None, self._map.dirfoldmap
882 )
882 )
883 if path != folded:
883 if path != folded:
884 results[path] = None
884 results[path] = None
885
885
886 return results, dirsfound, dirsnotfound
886 return results, dirsfound, dirsnotfound
887
887
888 def walk(self, match, subrepos, unknown, ignored, full=True):
888 def walk(self, match, subrepos, unknown, ignored, full=True):
889 '''
889 '''
890 Walk recursively through the directory tree, finding all files
890 Walk recursively through the directory tree, finding all files
891 matched by match.
891 matched by match.
892
892
893 If full is False, maybe skip some known-clean files.
893 If full is False, maybe skip some known-clean files.
894
894
895 Return a dict mapping filename to stat-like object (either
895 Return a dict mapping filename to stat-like object (either
896 mercurial.osutil.stat instance or return value of os.stat()).
896 mercurial.osutil.stat instance or return value of os.stat()).
897
897
898 '''
898 '''
899 # full is a flag that extensions that hook into walk can use -- this
899 # full is a flag that extensions that hook into walk can use -- this
900 # implementation doesn't use it at all. This satisfies the contract
900 # implementation doesn't use it at all. This satisfies the contract
901 # because we only guarantee a "maybe".
901 # because we only guarantee a "maybe".
902
902
903 if ignored:
903 if ignored:
904 ignore = util.never
904 ignore = util.never
905 dirignore = util.never
905 dirignore = util.never
906 elif unknown:
906 elif unknown:
907 ignore = self._ignore
907 ignore = self._ignore
908 dirignore = self._dirignore
908 dirignore = self._dirignore
909 else:
909 else:
910 # if not unknown and not ignored, drop dir recursion and step 2
910 # if not unknown and not ignored, drop dir recursion and step 2
911 ignore = util.always
911 ignore = util.always
912 dirignore = util.always
912 dirignore = util.always
913
913
914 matchfn = match.matchfn
914 matchfn = match.matchfn
915 matchalways = match.always()
915 matchalways = match.always()
916 matchtdir = match.traversedir
916 matchtdir = match.traversedir
917 dmap = self._map
917 dmap = self._map
918 listdir = util.listdir
918 listdir = util.listdir
919 lstat = os.lstat
919 lstat = os.lstat
920 dirkind = stat.S_IFDIR
920 dirkind = stat.S_IFDIR
921 regkind = stat.S_IFREG
921 regkind = stat.S_IFREG
922 lnkkind = stat.S_IFLNK
922 lnkkind = stat.S_IFLNK
923 join = self._join
923 join = self._join
924
924
925 exact = skipstep3 = False
925 exact = skipstep3 = False
926 if match.isexact(): # match.exact
926 if match.isexact(): # match.exact
927 exact = True
927 exact = True
928 dirignore = util.always # skip step 2
928 dirignore = util.always # skip step 2
929 elif match.prefix(): # match.match, no patterns
929 elif match.prefix(): # match.match, no patterns
930 skipstep3 = True
930 skipstep3 = True
931
931
932 if not exact and self._checkcase:
932 if not exact and self._checkcase:
933 normalize = self._normalize
933 normalize = self._normalize
934 normalizefile = self._normalizefile
934 normalizefile = self._normalizefile
935 skipstep3 = False
935 skipstep3 = False
936 else:
936 else:
937 normalize = self._normalize
937 normalize = self._normalize
938 normalizefile = None
938 normalizefile = None
939
939
940 # step 1: find all explicit files
940 # step 1: find all explicit files
941 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
941 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
942
942
943 skipstep3 = skipstep3 and not (work or dirsnotfound)
943 skipstep3 = skipstep3 and not (work or dirsnotfound)
944 work = [d for d in work if not dirignore(d[0])]
944 work = [d for d in work if not dirignore(d[0])]
945
945
946 # step 2: visit subdirectories
946 # step 2: visit subdirectories
947 def traverse(work, alreadynormed):
947 def traverse(work, alreadynormed):
948 wadd = work.append
948 wadd = work.append
949 while work:
949 while work:
950 tracing.counter('dirstate.walk work', len(work))
950 tracing.counter('dirstate.walk work', len(work))
951 nd = work.pop()
951 nd = work.pop()
952 visitentries = match.visitchildrenset(nd)
952 visitentries = match.visitchildrenset(nd)
953 if not visitentries:
953 if not visitentries:
954 continue
954 continue
955 if visitentries == b'this' or visitentries == b'all':
955 if visitentries == b'this' or visitentries == b'all':
956 visitentries = None
956 visitentries = None
957 skip = None
957 skip = None
958 if nd != b'':
958 if nd != b'':
959 skip = b'.hg'
959 skip = b'.hg'
960 try:
960 try:
961 with tracing.log('dirstate.walk.traverse listdir %s', nd):
961 with tracing.log('dirstate.walk.traverse listdir %s', nd):
962 entries = listdir(join(nd), stat=True, skip=skip)
962 entries = listdir(join(nd), stat=True, skip=skip)
963 except OSError as inst:
963 except OSError as inst:
964 if inst.errno in (errno.EACCES, errno.ENOENT):
964 if inst.errno in (errno.EACCES, errno.ENOENT):
965 match.bad(
965 match.bad(
966 self.pathto(nd), encoding.strtolocal(inst.strerror)
966 self.pathto(nd), encoding.strtolocal(inst.strerror)
967 )
967 )
968 continue
968 continue
969 raise
969 raise
970 for f, kind, st in entries:
970 for f, kind, st in entries:
971 # Some matchers may return files in the visitentries set,
971 # Some matchers may return files in the visitentries set,
972 # instead of 'this', if the matcher explicitly mentions them
972 # instead of 'this', if the matcher explicitly mentions them
973 # and is not an exactmatcher. This is acceptable; we do not
973 # and is not an exactmatcher. This is acceptable; we do not
974 # make any hard assumptions about file-or-directory below
974 # make any hard assumptions about file-or-directory below
975 # based on the presence of `f` in visitentries. If
975 # based on the presence of `f` in visitentries. If
976 # visitchildrenset returned a set, we can always skip the
976 # visitchildrenset returned a set, we can always skip the
977 # entries *not* in the set it provided regardless of whether
977 # entries *not* in the set it provided regardless of whether
978 # they're actually a file or a directory.
978 # they're actually a file or a directory.
979 if visitentries and f not in visitentries:
979 if visitentries and f not in visitentries:
980 continue
980 continue
981 if normalizefile:
981 if normalizefile:
982 # even though f might be a directory, we're only
982 # even though f might be a directory, we're only
983 # interested in comparing it to files currently in the
983 # interested in comparing it to files currently in the
984 # dmap -- therefore normalizefile is enough
984 # dmap -- therefore normalizefile is enough
985 nf = normalizefile(
985 nf = normalizefile(
986 nd and (nd + b"/" + f) or f, True, True
986 nd and (nd + b"/" + f) or f, True, True
987 )
987 )
988 else:
988 else:
989 nf = nd and (nd + b"/" + f) or f
989 nf = nd and (nd + b"/" + f) or f
990 if nf not in results:
990 if nf not in results:
991 if kind == dirkind:
991 if kind == dirkind:
992 if not ignore(nf):
992 if not ignore(nf):
993 if matchtdir:
993 if matchtdir:
994 matchtdir(nf)
994 matchtdir(nf)
995 wadd(nf)
995 wadd(nf)
996 if nf in dmap and (matchalways or matchfn(nf)):
996 if nf in dmap and (matchalways or matchfn(nf)):
997 results[nf] = None
997 results[nf] = None
998 elif kind == regkind or kind == lnkkind:
998 elif kind == regkind or kind == lnkkind:
999 if nf in dmap:
999 if nf in dmap:
1000 if matchalways or matchfn(nf):
1000 if matchalways or matchfn(nf):
1001 results[nf] = st
1001 results[nf] = st
1002 elif (matchalways or matchfn(nf)) and not ignore(
1002 elif (matchalways or matchfn(nf)) and not ignore(
1003 nf
1003 nf
1004 ):
1004 ):
1005 # unknown file -- normalize if necessary
1005 # unknown file -- normalize if necessary
1006 if not alreadynormed:
1006 if not alreadynormed:
1007 nf = normalize(nf, False, True)
1007 nf = normalize(nf, False, True)
1008 results[nf] = st
1008 results[nf] = st
1009 elif nf in dmap and (matchalways or matchfn(nf)):
1009 elif nf in dmap and (matchalways or matchfn(nf)):
1010 results[nf] = None
1010 results[nf] = None
1011
1011
1012 for nd, d in work:
1012 for nd, d in work:
1013 # alreadynormed means that processwork doesn't have to do any
1013 # alreadynormed means that processwork doesn't have to do any
1014 # expensive directory normalization
1014 # expensive directory normalization
1015 alreadynormed = not normalize or nd == d
1015 alreadynormed = not normalize or nd == d
1016 traverse([d], alreadynormed)
1016 traverse([d], alreadynormed)
1017
1017
1018 for s in subrepos:
1018 for s in subrepos:
1019 del results[s]
1019 del results[s]
1020 del results[b'.hg']
1020 del results[b'.hg']
1021
1021
1022 # step 3: visit remaining files from dmap
1022 # step 3: visit remaining files from dmap
1023 if not skipstep3 and not exact:
1023 if not skipstep3 and not exact:
1024 # If a dmap file is not in results yet, it was either
1024 # If a dmap file is not in results yet, it was either
1025 # a) not matching matchfn b) ignored, c) missing, or d) under a
1025 # a) not matching matchfn b) ignored, c) missing, or d) under a
1026 # symlink directory.
1026 # symlink directory.
1027 if not results and matchalways:
1027 if not results and matchalways:
1028 visit = [f for f in dmap]
1028 visit = [f for f in dmap]
1029 else:
1029 else:
1030 visit = [f for f in dmap if f not in results and matchfn(f)]
1030 visit = [f for f in dmap if f not in results and matchfn(f)]
1031 visit.sort()
1031 visit.sort()
1032
1032
1033 if unknown:
1033 if unknown:
1034 # unknown == True means we walked all dirs under the roots
1034 # unknown == True means we walked all dirs under the roots
1035 # that wasn't ignored, and everything that matched was stat'ed
1035 # that wasn't ignored, and everything that matched was stat'ed
1036 # and is already in results.
1036 # and is already in results.
1037 # The rest must thus be ignored or under a symlink.
1037 # The rest must thus be ignored or under a symlink.
1038 audit_path = pathutil.pathauditor(self._root, cached=True)
1038 audit_path = pathutil.pathauditor(self._root, cached=True)
1039
1039
1040 for nf in iter(visit):
1040 for nf in iter(visit):
1041 # If a stat for the same file was already added with a
1041 # If a stat for the same file was already added with a
1042 # different case, don't add one for this, since that would
1042 # different case, don't add one for this, since that would
1043 # make it appear as if the file exists under both names
1043 # make it appear as if the file exists under both names
1044 # on disk.
1044 # on disk.
1045 if (
1045 if (
1046 normalizefile
1046 normalizefile
1047 and normalizefile(nf, True, True) in results
1047 and normalizefile(nf, True, True) in results
1048 ):
1048 ):
1049 results[nf] = None
1049 results[nf] = None
1050 # Report ignored items in the dmap as long as they are not
1050 # Report ignored items in the dmap as long as they are not
1051 # under a symlink directory.
1051 # under a symlink directory.
1052 elif audit_path.check(nf):
1052 elif audit_path.check(nf):
1053 try:
1053 try:
1054 results[nf] = lstat(join(nf))
1054 results[nf] = lstat(join(nf))
1055 # file was just ignored, no links, and exists
1055 # file was just ignored, no links, and exists
1056 except OSError:
1056 except OSError:
1057 # file doesn't exist
1057 # file doesn't exist
1058 results[nf] = None
1058 results[nf] = None
1059 else:
1059 else:
1060 # It's either missing or under a symlink directory
1060 # It's either missing or under a symlink directory
1061 # which we in this case report as missing
1061 # which we in this case report as missing
1062 results[nf] = None
1062 results[nf] = None
1063 else:
1063 else:
1064 # We may not have walked the full directory tree above,
1064 # We may not have walked the full directory tree above,
1065 # so stat and check everything we missed.
1065 # so stat and check everything we missed.
1066 iv = iter(visit)
1066 iv = iter(visit)
1067 for st in util.statfiles([join(i) for i in visit]):
1067 for st in util.statfiles([join(i) for i in visit]):
1068 results[next(iv)] = st
1068 results[next(iv)] = st
1069 return results
1069 return results
1070
1070
1071 def status(self, match, subrepos, ignored, clean, unknown):
1071 def status(self, match, subrepos, ignored, clean, unknown):
1072 '''Determine the status of the working copy relative to the
1072 '''Determine the status of the working copy relative to the
1073 dirstate and return a pair of (unsure, status), where status is of type
1073 dirstate and return a pair of (unsure, status), where status is of type
1074 scmutil.status and:
1074 scmutil.status and:
1075
1075
1076 unsure:
1076 unsure:
1077 files that might have been modified since the dirstate was
1077 files that might have been modified since the dirstate was
1078 written, but need to be read to be sure (size is the same
1078 written, but need to be read to be sure (size is the same
1079 but mtime differs)
1079 but mtime differs)
1080 status.modified:
1080 status.modified:
1081 files that have definitely been modified since the dirstate
1081 files that have definitely been modified since the dirstate
1082 was written (different size or mode)
1082 was written (different size or mode)
1083 status.clean:
1083 status.clean:
1084 files that have definitely not been modified since the
1084 files that have definitely not been modified since the
1085 dirstate was written
1085 dirstate was written
1086 '''
1086 '''
1087 listignored, listclean, listunknown = ignored, clean, unknown
1087 listignored, listclean, listunknown = ignored, clean, unknown
1088 lookup, modified, added, unknown, ignored = [], [], [], [], []
1088 lookup, modified, added, unknown, ignored = [], [], [], [], []
1089 removed, deleted, clean = [], [], []
1089 removed, deleted, clean = [], [], []
1090
1090
1091 dmap = self._map
1091 dmap = self._map
1092 dmap.preload()
1092 dmap.preload()
1093
1093
1094 use_rust = True
1094 use_rust = True
1095 if rustmod is None:
1095 if rustmod is None:
1096 use_rust = False
1096 use_rust = False
1097 elif subrepos:
1097 elif subrepos:
1098 use_rust = False
1098 use_rust = False
1099 if bool(listunknown):
1099 if bool(listunknown):
1100 # Pathauditor does not exist yet in Rust, unknown files
1100 # Pathauditor does not exist yet in Rust, unknown files
1101 # can't be trusted.
1101 # can't be trusted.
1102 use_rust = False
1102 use_rust = False
1103 elif self._ignorefiles() and listignored:
1103 elif self._ignorefiles() and listignored:
1104 # Rust has no ignore mechanism yet, so don't use Rust for
1104 # Rust has no ignore mechanism yet, so don't use Rust for
1105 # commands that need ignore.
1105 # commands that need ignore.
1106 use_rust = False
1106 use_rust = False
1107 elif not match.always():
1107 elif not match.always():
1108 # Matchers have yet to be implemented
1108 # Matchers have yet to be implemented
1109 use_rust = False
1109 use_rust = False
1110
1110
1111 if use_rust:
1111 if use_rust:
1112 # Force Rayon (Rust parallelism library) to respect the number of
1112 # Force Rayon (Rust parallelism library) to respect the number of
1113 # workers. This is a temporary workaround until Rust code knows
1113 # workers. This is a temporary workaround until Rust code knows
1114 # how to read the config file.
1114 # how to read the config file.
1115 numcpus = self._ui.configint("worker", "numcpus")
1115 numcpus = self._ui.configint("worker", "numcpus")
1116 if numcpus is not None:
1116 if numcpus is not None:
1117 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1117 encoding.environ.setdefault(
1118 b'RAYON_NUM_THREADS', b'%d' % numcpus
1119 )
1118
1120
1119 workers_enabled = self._ui.configbool("worker", "enabled", True)
1121 workers_enabled = self._ui.configbool("worker", "enabled", True)
1120 if not workers_enabled:
1122 if not workers_enabled:
1121 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1123 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1122
1124
1123 (
1125 (
1124 lookup,
1126 lookup,
1125 modified,
1127 modified,
1126 added,
1128 added,
1127 removed,
1129 removed,
1128 deleted,
1130 deleted,
1129 unknown,
1131 unknown,
1130 clean,
1132 clean,
1131 ) = rustmod.status(
1133 ) = rustmod.status(
1132 dmap._rustmap,
1134 dmap._rustmap,
1133 self._rootdir,
1135 self._rootdir,
1134 bool(listclean),
1136 bool(listclean),
1135 self._lastnormaltime,
1137 self._lastnormaltime,
1136 self._checkexec,
1138 self._checkexec,
1137 )
1139 )
1138
1140
1139 status = scmutil.status(
1141 status = scmutil.status(
1140 modified=modified,
1142 modified=modified,
1141 added=added,
1143 added=added,
1142 removed=removed,
1144 removed=removed,
1143 deleted=deleted,
1145 deleted=deleted,
1144 unknown=unknown,
1146 unknown=unknown,
1145 ignored=ignored,
1147 ignored=ignored,
1146 clean=clean,
1148 clean=clean,
1147 )
1149 )
1148 return (lookup, status)
1150 return (lookup, status)
1149
1151
1150 dcontains = dmap.__contains__
1152 dcontains = dmap.__contains__
1151 dget = dmap.__getitem__
1153 dget = dmap.__getitem__
1152 ladd = lookup.append # aka "unsure"
1154 ladd = lookup.append # aka "unsure"
1153 madd = modified.append
1155 madd = modified.append
1154 aadd = added.append
1156 aadd = added.append
1155 uadd = unknown.append
1157 uadd = unknown.append
1156 iadd = ignored.append
1158 iadd = ignored.append
1157 radd = removed.append
1159 radd = removed.append
1158 dadd = deleted.append
1160 dadd = deleted.append
1159 cadd = clean.append
1161 cadd = clean.append
1160 mexact = match.exact
1162 mexact = match.exact
1161 dirignore = self._dirignore
1163 dirignore = self._dirignore
1162 checkexec = self._checkexec
1164 checkexec = self._checkexec
1163 copymap = self._map.copymap
1165 copymap = self._map.copymap
1164 lastnormaltime = self._lastnormaltime
1166 lastnormaltime = self._lastnormaltime
1165
1167
1166 # We need to do full walks when either
1168 # We need to do full walks when either
1167 # - we're listing all clean files, or
1169 # - we're listing all clean files, or
1168 # - match.traversedir does something, because match.traversedir should
1170 # - match.traversedir does something, because match.traversedir should
1169 # be called for every dir in the working dir
1171 # be called for every dir in the working dir
1170 full = listclean or match.traversedir is not None
1172 full = listclean or match.traversedir is not None
1171 for fn, st in pycompat.iteritems(
1173 for fn, st in pycompat.iteritems(
1172 self.walk(match, subrepos, listunknown, listignored, full=full)
1174 self.walk(match, subrepos, listunknown, listignored, full=full)
1173 ):
1175 ):
1174 if not dcontains(fn):
1176 if not dcontains(fn):
1175 if (listignored or mexact(fn)) and dirignore(fn):
1177 if (listignored or mexact(fn)) and dirignore(fn):
1176 if listignored:
1178 if listignored:
1177 iadd(fn)
1179 iadd(fn)
1178 else:
1180 else:
1179 uadd(fn)
1181 uadd(fn)
1180 continue
1182 continue
1181
1183
1182 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1184 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1183 # written like that for performance reasons. dmap[fn] is not a
1185 # written like that for performance reasons. dmap[fn] is not a
1184 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1186 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1185 # opcode has fast paths when the value to be unpacked is a tuple or
1187 # opcode has fast paths when the value to be unpacked is a tuple or
1186 # a list, but falls back to creating a full-fledged iterator in
1188 # a list, but falls back to creating a full-fledged iterator in
1187 # general. That is much slower than simply accessing and storing the
1189 # general. That is much slower than simply accessing and storing the
1188 # tuple members one by one.
1190 # tuple members one by one.
1189 t = dget(fn)
1191 t = dget(fn)
1190 state = t[0]
1192 state = t[0]
1191 mode = t[1]
1193 mode = t[1]
1192 size = t[2]
1194 size = t[2]
1193 time = t[3]
1195 time = t[3]
1194
1196
1195 if not st and state in b"nma":
1197 if not st and state in b"nma":
1196 dadd(fn)
1198 dadd(fn)
1197 elif state == b'n':
1199 elif state == b'n':
1198 if (
1200 if (
1199 size >= 0
1201 size >= 0
1200 and (
1202 and (
1201 (size != st.st_size and size != st.st_size & _rangemask)
1203 (size != st.st_size and size != st.st_size & _rangemask)
1202 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1204 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1203 )
1205 )
1204 or size == -2 # other parent
1206 or size == -2 # other parent
1205 or fn in copymap
1207 or fn in copymap
1206 ):
1208 ):
1207 madd(fn)
1209 madd(fn)
1208 elif (
1210 elif (
1209 time != st[stat.ST_MTIME]
1211 time != st[stat.ST_MTIME]
1210 and time != st[stat.ST_MTIME] & _rangemask
1212 and time != st[stat.ST_MTIME] & _rangemask
1211 ):
1213 ):
1212 ladd(fn)
1214 ladd(fn)
1213 elif st[stat.ST_MTIME] == lastnormaltime:
1215 elif st[stat.ST_MTIME] == lastnormaltime:
1214 # fn may have just been marked as normal and it may have
1216 # fn may have just been marked as normal and it may have
1215 # changed in the same second without changing its size.
1217 # changed in the same second without changing its size.
1216 # This can happen if we quickly do multiple commits.
1218 # This can happen if we quickly do multiple commits.
1217 # Force lookup, so we don't miss such a racy file change.
1219 # Force lookup, so we don't miss such a racy file change.
1218 ladd(fn)
1220 ladd(fn)
1219 elif listclean:
1221 elif listclean:
1220 cadd(fn)
1222 cadd(fn)
1221 elif state == b'm':
1223 elif state == b'm':
1222 madd(fn)
1224 madd(fn)
1223 elif state == b'a':
1225 elif state == b'a':
1224 aadd(fn)
1226 aadd(fn)
1225 elif state == b'r':
1227 elif state == b'r':
1226 radd(fn)
1228 radd(fn)
1227
1229
1228 return (
1230 return (
1229 lookup,
1231 lookup,
1230 scmutil.status(
1232 scmutil.status(
1231 modified, added, removed, deleted, unknown, ignored, clean
1233 modified, added, removed, deleted, unknown, ignored, clean
1232 ),
1234 ),
1233 )
1235 )
1234
1236
1235 def matches(self, match):
1237 def matches(self, match):
1236 '''
1238 '''
1237 return files in the dirstate (in whatever state) filtered by match
1239 return files in the dirstate (in whatever state) filtered by match
1238 '''
1240 '''
1239 dmap = self._map
1241 dmap = self._map
1240 if match.always():
1242 if match.always():
1241 return dmap.keys()
1243 return dmap.keys()
1242 files = match.files()
1244 files = match.files()
1243 if match.isexact():
1245 if match.isexact():
1244 # fast path -- filter the other way around, since typically files is
1246 # fast path -- filter the other way around, since typically files is
1245 # much smaller than dmap
1247 # much smaller than dmap
1246 return [f for f in files if f in dmap]
1248 return [f for f in files if f in dmap]
1247 if match.prefix() and all(fn in dmap for fn in files):
1249 if match.prefix() and all(fn in dmap for fn in files):
1248 # fast path -- all the values are known to be files, so just return
1250 # fast path -- all the values are known to be files, so just return
1249 # that
1251 # that
1250 return list(files)
1252 return list(files)
1251 return [f for f in dmap if match(f)]
1253 return [f for f in dmap if match(f)]
1252
1254
1253 def _actualfilename(self, tr):
1255 def _actualfilename(self, tr):
1254 if tr:
1256 if tr:
1255 return self._pendingfilename
1257 return self._pendingfilename
1256 else:
1258 else:
1257 return self._filename
1259 return self._filename
1258
1260
1259 def savebackup(self, tr, backupname):
1261 def savebackup(self, tr, backupname):
1260 '''Save current dirstate into backup file'''
1262 '''Save current dirstate into backup file'''
1261 filename = self._actualfilename(tr)
1263 filename = self._actualfilename(tr)
1262 assert backupname != filename
1264 assert backupname != filename
1263
1265
1264 # use '_writedirstate' instead of 'write' to write changes certainly,
1266 # use '_writedirstate' instead of 'write' to write changes certainly,
1265 # because the latter omits writing out if transaction is running.
1267 # because the latter omits writing out if transaction is running.
1266 # output file will be used to create backup of dirstate at this point.
1268 # output file will be used to create backup of dirstate at this point.
1267 if self._dirty or not self._opener.exists(filename):
1269 if self._dirty or not self._opener.exists(filename):
1268 self._writedirstate(
1270 self._writedirstate(
1269 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1271 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1270 )
1272 )
1271
1273
1272 if tr:
1274 if tr:
1273 # ensure that subsequent tr.writepending returns True for
1275 # ensure that subsequent tr.writepending returns True for
1274 # changes written out above, even if dirstate is never
1276 # changes written out above, even if dirstate is never
1275 # changed after this
1277 # changed after this
1276 tr.addfilegenerator(
1278 tr.addfilegenerator(
1277 b'dirstate',
1279 b'dirstate',
1278 (self._filename,),
1280 (self._filename,),
1279 self._writedirstate,
1281 self._writedirstate,
1280 location=b'plain',
1282 location=b'plain',
1281 )
1283 )
1282
1284
1283 # ensure that pending file written above is unlinked at
1285 # ensure that pending file written above is unlinked at
1284 # failure, even if tr.writepending isn't invoked until the
1286 # failure, even if tr.writepending isn't invoked until the
1285 # end of this transaction
1287 # end of this transaction
1286 tr.registertmp(filename, location=b'plain')
1288 tr.registertmp(filename, location=b'plain')
1287
1289
1288 self._opener.tryunlink(backupname)
1290 self._opener.tryunlink(backupname)
1289 # hardlink backup is okay because _writedirstate is always called
1291 # hardlink backup is okay because _writedirstate is always called
1290 # with an "atomictemp=True" file.
1292 # with an "atomictemp=True" file.
1291 util.copyfile(
1293 util.copyfile(
1292 self._opener.join(filename),
1294 self._opener.join(filename),
1293 self._opener.join(backupname),
1295 self._opener.join(backupname),
1294 hardlink=True,
1296 hardlink=True,
1295 )
1297 )
1296
1298
1297 def restorebackup(self, tr, backupname):
1299 def restorebackup(self, tr, backupname):
1298 '''Restore dirstate by backup file'''
1300 '''Restore dirstate by backup file'''
1299 # this "invalidate()" prevents "wlock.release()" from writing
1301 # this "invalidate()" prevents "wlock.release()" from writing
1300 # changes of dirstate out after restoring from backup file
1302 # changes of dirstate out after restoring from backup file
1301 self.invalidate()
1303 self.invalidate()
1302 filename = self._actualfilename(tr)
1304 filename = self._actualfilename(tr)
1303 o = self._opener
1305 o = self._opener
1304 if util.samefile(o.join(backupname), o.join(filename)):
1306 if util.samefile(o.join(backupname), o.join(filename)):
1305 o.unlink(backupname)
1307 o.unlink(backupname)
1306 else:
1308 else:
1307 o.rename(backupname, filename, checkambig=True)
1309 o.rename(backupname, filename, checkambig=True)
1308
1310
1309 def clearbackup(self, tr, backupname):
1311 def clearbackup(self, tr, backupname):
1310 '''Clear backup file'''
1312 '''Clear backup file'''
1311 self._opener.unlink(backupname)
1313 self._opener.unlink(backupname)
1312
1314
1313
1315
1314 class dirstatemap(object):
1316 class dirstatemap(object):
1315 """Map encapsulating the dirstate's contents.
1317 """Map encapsulating the dirstate's contents.
1316
1318
1317 The dirstate contains the following state:
1319 The dirstate contains the following state:
1318
1320
1319 - `identity` is the identity of the dirstate file, which can be used to
1321 - `identity` is the identity of the dirstate file, which can be used to
1320 detect when changes have occurred to the dirstate file.
1322 detect when changes have occurred to the dirstate file.
1321
1323
1322 - `parents` is a pair containing the parents of the working copy. The
1324 - `parents` is a pair containing the parents of the working copy. The
1323 parents are updated by calling `setparents`.
1325 parents are updated by calling `setparents`.
1324
1326
1325 - the state map maps filenames to tuples of (state, mode, size, mtime),
1327 - the state map maps filenames to tuples of (state, mode, size, mtime),
1326 where state is a single character representing 'normal', 'added',
1328 where state is a single character representing 'normal', 'added',
1327 'removed', or 'merged'. It is read by treating the dirstate as a
1329 'removed', or 'merged'. It is read by treating the dirstate as a
1328 dict. File state is updated by calling the `addfile`, `removefile` and
1330 dict. File state is updated by calling the `addfile`, `removefile` and
1329 `dropfile` methods.
1331 `dropfile` methods.
1330
1332
1331 - `copymap` maps destination filenames to their source filename.
1333 - `copymap` maps destination filenames to their source filename.
1332
1334
1333 The dirstate also provides the following views onto the state:
1335 The dirstate also provides the following views onto the state:
1334
1336
1335 - `nonnormalset` is a set of the filenames that have state other
1337 - `nonnormalset` is a set of the filenames that have state other
1336 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1338 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1337
1339
1338 - `otherparentset` is a set of the filenames that are marked as coming
1340 - `otherparentset` is a set of the filenames that are marked as coming
1339 from the second parent when the dirstate is currently being merged.
1341 from the second parent when the dirstate is currently being merged.
1340
1342
1341 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1343 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1342 form that they appear as in the dirstate.
1344 form that they appear as in the dirstate.
1343
1345
1344 - `dirfoldmap` is a dict mapping normalized directory names to the
1346 - `dirfoldmap` is a dict mapping normalized directory names to the
1345 denormalized form that they appear as in the dirstate.
1347 denormalized form that they appear as in the dirstate.
1346 """
1348 """
1347
1349
1348 def __init__(self, ui, opener, root):
1350 def __init__(self, ui, opener, root):
1349 self._ui = ui
1351 self._ui = ui
1350 self._opener = opener
1352 self._opener = opener
1351 self._root = root
1353 self._root = root
1352 self._filename = b'dirstate'
1354 self._filename = b'dirstate'
1353
1355
1354 self._parents = None
1356 self._parents = None
1355 self._dirtyparents = False
1357 self._dirtyparents = False
1356
1358
1357 # for consistent view between _pl() and _read() invocations
1359 # for consistent view between _pl() and _read() invocations
1358 self._pendingmode = None
1360 self._pendingmode = None
1359
1361
1360 @propertycache
1362 @propertycache
1361 def _map(self):
1363 def _map(self):
1362 self._map = {}
1364 self._map = {}
1363 self.read()
1365 self.read()
1364 return self._map
1366 return self._map
1365
1367
1366 @propertycache
1368 @propertycache
1367 def copymap(self):
1369 def copymap(self):
1368 self.copymap = {}
1370 self.copymap = {}
1369 self._map
1371 self._map
1370 return self.copymap
1372 return self.copymap
1371
1373
1372 def clear(self):
1374 def clear(self):
1373 self._map.clear()
1375 self._map.clear()
1374 self.copymap.clear()
1376 self.copymap.clear()
1375 self.setparents(nullid, nullid)
1377 self.setparents(nullid, nullid)
1376 util.clearcachedproperty(self, b"_dirs")
1378 util.clearcachedproperty(self, b"_dirs")
1377 util.clearcachedproperty(self, b"_alldirs")
1379 util.clearcachedproperty(self, b"_alldirs")
1378 util.clearcachedproperty(self, b"filefoldmap")
1380 util.clearcachedproperty(self, b"filefoldmap")
1379 util.clearcachedproperty(self, b"dirfoldmap")
1381 util.clearcachedproperty(self, b"dirfoldmap")
1380 util.clearcachedproperty(self, b"nonnormalset")
1382 util.clearcachedproperty(self, b"nonnormalset")
1381 util.clearcachedproperty(self, b"otherparentset")
1383 util.clearcachedproperty(self, b"otherparentset")
1382
1384
1383 def items(self):
1385 def items(self):
1384 return pycompat.iteritems(self._map)
1386 return pycompat.iteritems(self._map)
1385
1387
1386 # forward for python2,3 compat
1388 # forward for python2,3 compat
1387 iteritems = items
1389 iteritems = items
1388
1390
1389 def __len__(self):
1391 def __len__(self):
1390 return len(self._map)
1392 return len(self._map)
1391
1393
1392 def __iter__(self):
1394 def __iter__(self):
1393 return iter(self._map)
1395 return iter(self._map)
1394
1396
1395 def get(self, key, default=None):
1397 def get(self, key, default=None):
1396 return self._map.get(key, default)
1398 return self._map.get(key, default)
1397
1399
1398 def __contains__(self, key):
1400 def __contains__(self, key):
1399 return key in self._map
1401 return key in self._map
1400
1402
1401 def __getitem__(self, key):
1403 def __getitem__(self, key):
1402 return self._map[key]
1404 return self._map[key]
1403
1405
1404 def keys(self):
1406 def keys(self):
1405 return self._map.keys()
1407 return self._map.keys()
1406
1408
1407 def preload(self):
1409 def preload(self):
1408 """Loads the underlying data, if it's not already loaded"""
1410 """Loads the underlying data, if it's not already loaded"""
1409 self._map
1411 self._map
1410
1412
1411 def addfile(self, f, oldstate, state, mode, size, mtime):
1413 def addfile(self, f, oldstate, state, mode, size, mtime):
1412 """Add a tracked file to the dirstate."""
1414 """Add a tracked file to the dirstate."""
1413 if oldstate in b"?r" and "_dirs" in self.__dict__:
1415 if oldstate in b"?r" and "_dirs" in self.__dict__:
1414 self._dirs.addpath(f)
1416 self._dirs.addpath(f)
1415 if oldstate == b"?" and "_alldirs" in self.__dict__:
1417 if oldstate == b"?" and "_alldirs" in self.__dict__:
1416 self._alldirs.addpath(f)
1418 self._alldirs.addpath(f)
1417 self._map[f] = dirstatetuple(state, mode, size, mtime)
1419 self._map[f] = dirstatetuple(state, mode, size, mtime)
1418 if state != b'n' or mtime == -1:
1420 if state != b'n' or mtime == -1:
1419 self.nonnormalset.add(f)
1421 self.nonnormalset.add(f)
1420 if size == -2:
1422 if size == -2:
1421 self.otherparentset.add(f)
1423 self.otherparentset.add(f)
1422
1424
1423 def removefile(self, f, oldstate, size):
1425 def removefile(self, f, oldstate, size):
1424 """
1426 """
1425 Mark a file as removed in the dirstate.
1427 Mark a file as removed in the dirstate.
1426
1428
1427 The `size` parameter is used to store sentinel values that indicate
1429 The `size` parameter is used to store sentinel values that indicate
1428 the file's previous state. In the future, we should refactor this
1430 the file's previous state. In the future, we should refactor this
1429 to be more explicit about what that state is.
1431 to be more explicit about what that state is.
1430 """
1432 """
1431 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1433 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1432 self._dirs.delpath(f)
1434 self._dirs.delpath(f)
1433 if oldstate == b"?" and "_alldirs" in self.__dict__:
1435 if oldstate == b"?" and "_alldirs" in self.__dict__:
1434 self._alldirs.addpath(f)
1436 self._alldirs.addpath(f)
1435 if "filefoldmap" in self.__dict__:
1437 if "filefoldmap" in self.__dict__:
1436 normed = util.normcase(f)
1438 normed = util.normcase(f)
1437 self.filefoldmap.pop(normed, None)
1439 self.filefoldmap.pop(normed, None)
1438 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1440 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1439 self.nonnormalset.add(f)
1441 self.nonnormalset.add(f)
1440
1442
1441 def dropfile(self, f, oldstate):
1443 def dropfile(self, f, oldstate):
1442 """
1444 """
1443 Remove a file from the dirstate. Returns True if the file was
1445 Remove a file from the dirstate. Returns True if the file was
1444 previously recorded.
1446 previously recorded.
1445 """
1447 """
1446 exists = self._map.pop(f, None) is not None
1448 exists = self._map.pop(f, None) is not None
1447 if exists:
1449 if exists:
1448 if oldstate != b"r" and "_dirs" in self.__dict__:
1450 if oldstate != b"r" and "_dirs" in self.__dict__:
1449 self._dirs.delpath(f)
1451 self._dirs.delpath(f)
1450 if "_alldirs" in self.__dict__:
1452 if "_alldirs" in self.__dict__:
1451 self._alldirs.delpath(f)
1453 self._alldirs.delpath(f)
1452 if "filefoldmap" in self.__dict__:
1454 if "filefoldmap" in self.__dict__:
1453 normed = util.normcase(f)
1455 normed = util.normcase(f)
1454 self.filefoldmap.pop(normed, None)
1456 self.filefoldmap.pop(normed, None)
1455 self.nonnormalset.discard(f)
1457 self.nonnormalset.discard(f)
1456 return exists
1458 return exists
1457
1459
1458 def clearambiguoustimes(self, files, now):
1460 def clearambiguoustimes(self, files, now):
1459 for f in files:
1461 for f in files:
1460 e = self.get(f)
1462 e = self.get(f)
1461 if e is not None and e[0] == b'n' and e[3] == now:
1463 if e is not None and e[0] == b'n' and e[3] == now:
1462 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1464 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1463 self.nonnormalset.add(f)
1465 self.nonnormalset.add(f)
1464
1466
1465 def nonnormalentries(self):
1467 def nonnormalentries(self):
1466 '''Compute the nonnormal dirstate entries from the dmap'''
1468 '''Compute the nonnormal dirstate entries from the dmap'''
1467 try:
1469 try:
1468 return parsers.nonnormalotherparententries(self._map)
1470 return parsers.nonnormalotherparententries(self._map)
1469 except AttributeError:
1471 except AttributeError:
1470 nonnorm = set()
1472 nonnorm = set()
1471 otherparent = set()
1473 otherparent = set()
1472 for fname, e in pycompat.iteritems(self._map):
1474 for fname, e in pycompat.iteritems(self._map):
1473 if e[0] != b'n' or e[3] == -1:
1475 if e[0] != b'n' or e[3] == -1:
1474 nonnorm.add(fname)
1476 nonnorm.add(fname)
1475 if e[0] == b'n' and e[2] == -2:
1477 if e[0] == b'n' and e[2] == -2:
1476 otherparent.add(fname)
1478 otherparent.add(fname)
1477 return nonnorm, otherparent
1479 return nonnorm, otherparent
1478
1480
1479 @propertycache
1481 @propertycache
1480 def filefoldmap(self):
1482 def filefoldmap(self):
1481 """Returns a dictionary mapping normalized case paths to their
1483 """Returns a dictionary mapping normalized case paths to their
1482 non-normalized versions.
1484 non-normalized versions.
1483 """
1485 """
1484 try:
1486 try:
1485 makefilefoldmap = parsers.make_file_foldmap
1487 makefilefoldmap = parsers.make_file_foldmap
1486 except AttributeError:
1488 except AttributeError:
1487 pass
1489 pass
1488 else:
1490 else:
1489 return makefilefoldmap(
1491 return makefilefoldmap(
1490 self._map, util.normcasespec, util.normcasefallback
1492 self._map, util.normcasespec, util.normcasefallback
1491 )
1493 )
1492
1494
1493 f = {}
1495 f = {}
1494 normcase = util.normcase
1496 normcase = util.normcase
1495 for name, s in pycompat.iteritems(self._map):
1497 for name, s in pycompat.iteritems(self._map):
1496 if s[0] != b'r':
1498 if s[0] != b'r':
1497 f[normcase(name)] = name
1499 f[normcase(name)] = name
1498 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1500 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1499 return f
1501 return f
1500
1502
1501 def hastrackeddir(self, d):
1503 def hastrackeddir(self, d):
1502 """
1504 """
1503 Returns True if the dirstate contains a tracked (not removed) file
1505 Returns True if the dirstate contains a tracked (not removed) file
1504 in this directory.
1506 in this directory.
1505 """
1507 """
1506 return d in self._dirs
1508 return d in self._dirs
1507
1509
1508 def hasdir(self, d):
1510 def hasdir(self, d):
1509 """
1511 """
1510 Returns True if the dirstate contains a file (tracked or removed)
1512 Returns True if the dirstate contains a file (tracked or removed)
1511 in this directory.
1513 in this directory.
1512 """
1514 """
1513 return d in self._alldirs
1515 return d in self._alldirs
1514
1516
1515 @propertycache
1517 @propertycache
1516 def _dirs(self):
1518 def _dirs(self):
1517 return pathutil.dirs(self._map, b'r')
1519 return pathutil.dirs(self._map, b'r')
1518
1520
1519 @propertycache
1521 @propertycache
1520 def _alldirs(self):
1522 def _alldirs(self):
1521 return pathutil.dirs(self._map)
1523 return pathutil.dirs(self._map)
1522
1524
1523 def _opendirstatefile(self):
1525 def _opendirstatefile(self):
1524 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1526 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1525 if self._pendingmode is not None and self._pendingmode != mode:
1527 if self._pendingmode is not None and self._pendingmode != mode:
1526 fp.close()
1528 fp.close()
1527 raise error.Abort(
1529 raise error.Abort(
1528 _(b'working directory state may be changed parallelly')
1530 _(b'working directory state may be changed parallelly')
1529 )
1531 )
1530 self._pendingmode = mode
1532 self._pendingmode = mode
1531 return fp
1533 return fp
1532
1534
1533 def parents(self):
1535 def parents(self):
1534 if not self._parents:
1536 if not self._parents:
1535 try:
1537 try:
1536 fp = self._opendirstatefile()
1538 fp = self._opendirstatefile()
1537 st = fp.read(40)
1539 st = fp.read(40)
1538 fp.close()
1540 fp.close()
1539 except IOError as err:
1541 except IOError as err:
1540 if err.errno != errno.ENOENT:
1542 if err.errno != errno.ENOENT:
1541 raise
1543 raise
1542 # File doesn't exist, so the current state is empty
1544 # File doesn't exist, so the current state is empty
1543 st = b''
1545 st = b''
1544
1546
1545 l = len(st)
1547 l = len(st)
1546 if l == 40:
1548 if l == 40:
1547 self._parents = (st[:20], st[20:40])
1549 self._parents = (st[:20], st[20:40])
1548 elif l == 0:
1550 elif l == 0:
1549 self._parents = (nullid, nullid)
1551 self._parents = (nullid, nullid)
1550 else:
1552 else:
1551 raise error.Abort(
1553 raise error.Abort(
1552 _(b'working directory state appears damaged!')
1554 _(b'working directory state appears damaged!')
1553 )
1555 )
1554
1556
1555 return self._parents
1557 return self._parents
1556
1558
1557 def setparents(self, p1, p2):
1559 def setparents(self, p1, p2):
1558 self._parents = (p1, p2)
1560 self._parents = (p1, p2)
1559 self._dirtyparents = True
1561 self._dirtyparents = True
1560
1562
1561 def read(self):
1563 def read(self):
1562 # ignore HG_PENDING because identity is used only for writing
1564 # ignore HG_PENDING because identity is used only for writing
1563 self.identity = util.filestat.frompath(
1565 self.identity = util.filestat.frompath(
1564 self._opener.join(self._filename)
1566 self._opener.join(self._filename)
1565 )
1567 )
1566
1568
1567 try:
1569 try:
1568 fp = self._opendirstatefile()
1570 fp = self._opendirstatefile()
1569 try:
1571 try:
1570 st = fp.read()
1572 st = fp.read()
1571 finally:
1573 finally:
1572 fp.close()
1574 fp.close()
1573 except IOError as err:
1575 except IOError as err:
1574 if err.errno != errno.ENOENT:
1576 if err.errno != errno.ENOENT:
1575 raise
1577 raise
1576 return
1578 return
1577 if not st:
1579 if not st:
1578 return
1580 return
1579
1581
1580 if util.safehasattr(parsers, b'dict_new_presized'):
1582 if util.safehasattr(parsers, b'dict_new_presized'):
1581 # Make an estimate of the number of files in the dirstate based on
1583 # Make an estimate of the number of files in the dirstate based on
1582 # its size. From a linear regression on a set of real-world repos,
1584 # its size. From a linear regression on a set of real-world repos,
1583 # all over 10,000 files, the size of a dirstate entry is 85
1585 # all over 10,000 files, the size of a dirstate entry is 85
1584 # bytes. The cost of resizing is significantly higher than the cost
1586 # bytes. The cost of resizing is significantly higher than the cost
1585 # of filling in a larger presized dict, so subtract 20% from the
1587 # of filling in a larger presized dict, so subtract 20% from the
1586 # size.
1588 # size.
1587 #
1589 #
1588 # This heuristic is imperfect in many ways, so in a future dirstate
1590 # This heuristic is imperfect in many ways, so in a future dirstate
1589 # format update it makes sense to just record the number of entries
1591 # format update it makes sense to just record the number of entries
1590 # on write.
1592 # on write.
1591 self._map = parsers.dict_new_presized(len(st) // 71)
1593 self._map = parsers.dict_new_presized(len(st) // 71)
1592
1594
1593 # Python's garbage collector triggers a GC each time a certain number
1595 # Python's garbage collector triggers a GC each time a certain number
1594 # of container objects (the number being defined by
1596 # of container objects (the number being defined by
1595 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1597 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1596 # for each file in the dirstate. The C version then immediately marks
1598 # for each file in the dirstate. The C version then immediately marks
1597 # them as not to be tracked by the collector. However, this has no
1599 # them as not to be tracked by the collector. However, this has no
1598 # effect on when GCs are triggered, only on what objects the GC looks
1600 # effect on when GCs are triggered, only on what objects the GC looks
1599 # into. This means that O(number of files) GCs are unavoidable.
1601 # into. This means that O(number of files) GCs are unavoidable.
1600 # Depending on when in the process's lifetime the dirstate is parsed,
1602 # Depending on when in the process's lifetime the dirstate is parsed,
1601 # this can get very expensive. As a workaround, disable GC while
1603 # this can get very expensive. As a workaround, disable GC while
1602 # parsing the dirstate.
1604 # parsing the dirstate.
1603 #
1605 #
1604 # (we cannot decorate the function directly since it is in a C module)
1606 # (we cannot decorate the function directly since it is in a C module)
1605 parse_dirstate = util.nogc(parsers.parse_dirstate)
1607 parse_dirstate = util.nogc(parsers.parse_dirstate)
1606 p = parse_dirstate(self._map, self.copymap, st)
1608 p = parse_dirstate(self._map, self.copymap, st)
1607 if not self._dirtyparents:
1609 if not self._dirtyparents:
1608 self.setparents(*p)
1610 self.setparents(*p)
1609
1611
1610 # Avoid excess attribute lookups by fast pathing certain checks
1612 # Avoid excess attribute lookups by fast pathing certain checks
1611 self.__contains__ = self._map.__contains__
1613 self.__contains__ = self._map.__contains__
1612 self.__getitem__ = self._map.__getitem__
1614 self.__getitem__ = self._map.__getitem__
1613 self.get = self._map.get
1615 self.get = self._map.get
1614
1616
1615 def write(self, st, now):
1617 def write(self, st, now):
1616 st.write(
1618 st.write(
1617 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1619 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1618 )
1620 )
1619 st.close()
1621 st.close()
1620 self._dirtyparents = False
1622 self._dirtyparents = False
1621 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1623 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1622
1624
1623 @propertycache
1625 @propertycache
1624 def nonnormalset(self):
1626 def nonnormalset(self):
1625 nonnorm, otherparents = self.nonnormalentries()
1627 nonnorm, otherparents = self.nonnormalentries()
1626 self.otherparentset = otherparents
1628 self.otherparentset = otherparents
1627 return nonnorm
1629 return nonnorm
1628
1630
1629 @propertycache
1631 @propertycache
1630 def otherparentset(self):
1632 def otherparentset(self):
1631 nonnorm, otherparents = self.nonnormalentries()
1633 nonnorm, otherparents = self.nonnormalentries()
1632 self.nonnormalset = nonnorm
1634 self.nonnormalset = nonnorm
1633 return otherparents
1635 return otherparents
1634
1636
1635 @propertycache
1637 @propertycache
1636 def identity(self):
1638 def identity(self):
1637 self._map
1639 self._map
1638 return self.identity
1640 return self.identity
1639
1641
1640 @propertycache
1642 @propertycache
1641 def dirfoldmap(self):
1643 def dirfoldmap(self):
1642 f = {}
1644 f = {}
1643 normcase = util.normcase
1645 normcase = util.normcase
1644 for name in self._dirs:
1646 for name in self._dirs:
1645 f[normcase(name)] = name
1647 f[normcase(name)] = name
1646 return f
1648 return f
1647
1649
1648
1650
1649 if rustmod is not None:
1651 if rustmod is not None:
1650
1652
1651 class dirstatemap(object):
1653 class dirstatemap(object):
1652 def __init__(self, ui, opener, root):
1654 def __init__(self, ui, opener, root):
1653 self._ui = ui
1655 self._ui = ui
1654 self._opener = opener
1656 self._opener = opener
1655 self._root = root
1657 self._root = root
1656 self._filename = b'dirstate'
1658 self._filename = b'dirstate'
1657 self._parents = None
1659 self._parents = None
1658 self._dirtyparents = False
1660 self._dirtyparents = False
1659
1661
1660 # for consistent view between _pl() and _read() invocations
1662 # for consistent view between _pl() and _read() invocations
1661 self._pendingmode = None
1663 self._pendingmode = None
1662
1664
1663 def addfile(self, *args, **kwargs):
1665 def addfile(self, *args, **kwargs):
1664 return self._rustmap.addfile(*args, **kwargs)
1666 return self._rustmap.addfile(*args, **kwargs)
1665
1667
1666 def removefile(self, *args, **kwargs):
1668 def removefile(self, *args, **kwargs):
1667 return self._rustmap.removefile(*args, **kwargs)
1669 return self._rustmap.removefile(*args, **kwargs)
1668
1670
1669 def dropfile(self, *args, **kwargs):
1671 def dropfile(self, *args, **kwargs):
1670 return self._rustmap.dropfile(*args, **kwargs)
1672 return self._rustmap.dropfile(*args, **kwargs)
1671
1673
1672 def clearambiguoustimes(self, *args, **kwargs):
1674 def clearambiguoustimes(self, *args, **kwargs):
1673 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1675 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1674
1676
1675 def nonnormalentries(self):
1677 def nonnormalentries(self):
1676 return self._rustmap.nonnormalentries()
1678 return self._rustmap.nonnormalentries()
1677
1679
1678 def get(self, *args, **kwargs):
1680 def get(self, *args, **kwargs):
1679 return self._rustmap.get(*args, **kwargs)
1681 return self._rustmap.get(*args, **kwargs)
1680
1682
1681 @propertycache
1683 @propertycache
1682 def _rustmap(self):
1684 def _rustmap(self):
1683 self._rustmap = rustmod.DirstateMap(self._root)
1685 self._rustmap = rustmod.DirstateMap(self._root)
1684 self.read()
1686 self.read()
1685 return self._rustmap
1687 return self._rustmap
1686
1688
1687 @property
1689 @property
1688 def copymap(self):
1690 def copymap(self):
1689 return self._rustmap.copymap()
1691 return self._rustmap.copymap()
1690
1692
1691 def preload(self):
1693 def preload(self):
1692 self._rustmap
1694 self._rustmap
1693
1695
1694 def clear(self):
1696 def clear(self):
1695 self._rustmap.clear()
1697 self._rustmap.clear()
1696 self.setparents(nullid, nullid)
1698 self.setparents(nullid, nullid)
1697 util.clearcachedproperty(self, b"_dirs")
1699 util.clearcachedproperty(self, b"_dirs")
1698 util.clearcachedproperty(self, b"_alldirs")
1700 util.clearcachedproperty(self, b"_alldirs")
1699 util.clearcachedproperty(self, b"dirfoldmap")
1701 util.clearcachedproperty(self, b"dirfoldmap")
1700
1702
1701 def items(self):
1703 def items(self):
1702 return self._rustmap.items()
1704 return self._rustmap.items()
1703
1705
1704 def keys(self):
1706 def keys(self):
1705 return iter(self._rustmap)
1707 return iter(self._rustmap)
1706
1708
1707 def __contains__(self, key):
1709 def __contains__(self, key):
1708 return key in self._rustmap
1710 return key in self._rustmap
1709
1711
1710 def __getitem__(self, item):
1712 def __getitem__(self, item):
1711 return self._rustmap[item]
1713 return self._rustmap[item]
1712
1714
1713 def __len__(self):
1715 def __len__(self):
1714 return len(self._rustmap)
1716 return len(self._rustmap)
1715
1717
1716 def __iter__(self):
1718 def __iter__(self):
1717 return iter(self._rustmap)
1719 return iter(self._rustmap)
1718
1720
1719 # forward for python2,3 compat
1721 # forward for python2,3 compat
1720 iteritems = items
1722 iteritems = items
1721
1723
1722 def _opendirstatefile(self):
1724 def _opendirstatefile(self):
1723 fp, mode = txnutil.trypending(
1725 fp, mode = txnutil.trypending(
1724 self._root, self._opener, self._filename
1726 self._root, self._opener, self._filename
1725 )
1727 )
1726 if self._pendingmode is not None and self._pendingmode != mode:
1728 if self._pendingmode is not None and self._pendingmode != mode:
1727 fp.close()
1729 fp.close()
1728 raise error.Abort(
1730 raise error.Abort(
1729 _(b'working directory state may be changed parallelly')
1731 _(b'working directory state may be changed parallelly')
1730 )
1732 )
1731 self._pendingmode = mode
1733 self._pendingmode = mode
1732 return fp
1734 return fp
1733
1735
1734 def setparents(self, p1, p2):
1736 def setparents(self, p1, p2):
1735 self._rustmap.setparents(p1, p2)
1737 self._rustmap.setparents(p1, p2)
1736 self._parents = (p1, p2)
1738 self._parents = (p1, p2)
1737 self._dirtyparents = True
1739 self._dirtyparents = True
1738
1740
1739 def parents(self):
1741 def parents(self):
1740 if not self._parents:
1742 if not self._parents:
1741 try:
1743 try:
1742 fp = self._opendirstatefile()
1744 fp = self._opendirstatefile()
1743 st = fp.read(40)
1745 st = fp.read(40)
1744 fp.close()
1746 fp.close()
1745 except IOError as err:
1747 except IOError as err:
1746 if err.errno != errno.ENOENT:
1748 if err.errno != errno.ENOENT:
1747 raise
1749 raise
1748 # File doesn't exist, so the current state is empty
1750 # File doesn't exist, so the current state is empty
1749 st = b''
1751 st = b''
1750
1752
1751 try:
1753 try:
1752 self._parents = self._rustmap.parents(st)
1754 self._parents = self._rustmap.parents(st)
1753 except ValueError:
1755 except ValueError:
1754 raise error.Abort(
1756 raise error.Abort(
1755 _(b'working directory state appears damaged!')
1757 _(b'working directory state appears damaged!')
1756 )
1758 )
1757
1759
1758 return self._parents
1760 return self._parents
1759
1761
1760 def read(self):
1762 def read(self):
1761 # ignore HG_PENDING because identity is used only for writing
1763 # ignore HG_PENDING because identity is used only for writing
1762 self.identity = util.filestat.frompath(
1764 self.identity = util.filestat.frompath(
1763 self._opener.join(self._filename)
1765 self._opener.join(self._filename)
1764 )
1766 )
1765
1767
1766 try:
1768 try:
1767 fp = self._opendirstatefile()
1769 fp = self._opendirstatefile()
1768 try:
1770 try:
1769 st = fp.read()
1771 st = fp.read()
1770 finally:
1772 finally:
1771 fp.close()
1773 fp.close()
1772 except IOError as err:
1774 except IOError as err:
1773 if err.errno != errno.ENOENT:
1775 if err.errno != errno.ENOENT:
1774 raise
1776 raise
1775 return
1777 return
1776 if not st:
1778 if not st:
1777 return
1779 return
1778
1780
1779 parse_dirstate = util.nogc(self._rustmap.read)
1781 parse_dirstate = util.nogc(self._rustmap.read)
1780 parents = parse_dirstate(st)
1782 parents = parse_dirstate(st)
1781 if parents and not self._dirtyparents:
1783 if parents and not self._dirtyparents:
1782 self.setparents(*parents)
1784 self.setparents(*parents)
1783
1785
1784 self.__contains__ = self._rustmap.__contains__
1786 self.__contains__ = self._rustmap.__contains__
1785 self.__getitem__ = self._rustmap.__getitem__
1787 self.__getitem__ = self._rustmap.__getitem__
1786 self.get = self._rustmap.get
1788 self.get = self._rustmap.get
1787
1789
1788 def write(self, st, now):
1790 def write(self, st, now):
1789 parents = self.parents()
1791 parents = self.parents()
1790 st.write(self._rustmap.write(parents[0], parents[1], now))
1792 st.write(self._rustmap.write(parents[0], parents[1], now))
1791 st.close()
1793 st.close()
1792 self._dirtyparents = False
1794 self._dirtyparents = False
1793
1795
1794 @propertycache
1796 @propertycache
1795 def filefoldmap(self):
1797 def filefoldmap(self):
1796 """Returns a dictionary mapping normalized case paths to their
1798 """Returns a dictionary mapping normalized case paths to their
1797 non-normalized versions.
1799 non-normalized versions.
1798 """
1800 """
1799 return self._rustmap.filefoldmapasdict()
1801 return self._rustmap.filefoldmapasdict()
1800
1802
1801 def hastrackeddir(self, d):
1803 def hastrackeddir(self, d):
1802 self._dirs # Trigger Python's propertycache
1804 self._dirs # Trigger Python's propertycache
1803 return self._rustmap.hastrackeddir(d)
1805 return self._rustmap.hastrackeddir(d)
1804
1806
1805 def hasdir(self, d):
1807 def hasdir(self, d):
1806 self._dirs # Trigger Python's propertycache
1808 self._dirs # Trigger Python's propertycache
1807 return self._rustmap.hasdir(d)
1809 return self._rustmap.hasdir(d)
1808
1810
1809 @propertycache
1811 @propertycache
1810 def _dirs(self):
1812 def _dirs(self):
1811 return self._rustmap.getdirs()
1813 return self._rustmap.getdirs()
1812
1814
1813 @propertycache
1815 @propertycache
1814 def _alldirs(self):
1816 def _alldirs(self):
1815 return self._rustmap.getalldirs()
1817 return self._rustmap.getalldirs()
1816
1818
1817 @propertycache
1819 @propertycache
1818 def identity(self):
1820 def identity(self):
1819 self._rustmap
1821 self._rustmap
1820 return self.identity
1822 return self.identity
1821
1823
1822 @property
1824 @property
1823 def nonnormalset(self):
1825 def nonnormalset(self):
1824 nonnorm, otherparents = self._rustmap.nonnormalentries()
1826 nonnorm, otherparents = self._rustmap.nonnormalentries()
1825 return nonnorm
1827 return nonnorm
1826
1828
1827 @property
1829 @property
1828 def otherparentset(self):
1830 def otherparentset(self):
1829 nonnorm, otherparents = self._rustmap.nonnormalentries()
1831 nonnorm, otherparents = self._rustmap.nonnormalentries()
1830 return otherparents
1832 return otherparents
1831
1833
1832 @propertycache
1834 @propertycache
1833 def dirfoldmap(self):
1835 def dirfoldmap(self):
1834 f = {}
1836 f = {}
1835 normcase = util.normcase
1837 normcase = util.normcase
1836 for name in self._dirs:
1838 for name in self._dirs:
1837 f[normcase(name)] = name
1839 f[normcase(name)] = name
1838 return f
1840 return f
General Comments 0
You need to be logged in to leave comments. Login now