##// END OF EJS Templates
rust-threads: force Rayon to respect the worker count in config...
Raphaël Gomès -
r43905:47fac169 default
parent child Browse files
Show More
@@ -1,1827 +1,1838 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod(r'parsers')
39 parsers = policy.importmod(r'parsers')
40 rustmod = policy.importrust(r'dirstate')
40 rustmod = policy.importrust(r'dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
76 '''Create a new dirstate object.
77
77
78 opener is an open()-like callable that can be used to open the
78 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
79 dirstate file; root is the root of the directory tracked by
80 the dirstate.
80 the dirstate.
81 '''
81 '''
82 self._opener = opener
82 self._opener = opener
83 self._validate = validate
83 self._validate = validate
84 self._root = root
84 self._root = root
85 self._sparsematchfn = sparsematchfn
85 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
87 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
88 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
89 self._dirty = False
90 self._lastnormaltime = 0
90 self._lastnormaltime = 0
91 self._ui = ui
91 self._ui = ui
92 self._filecache = {}
92 self._filecache = {}
93 self._parentwriters = 0
93 self._parentwriters = 0
94 self._filename = b'dirstate'
94 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
97 self._origpl = None
97 self._origpl = None
98 self._updatedfiles = set()
98 self._updatedfiles = set()
99 self._mapcls = dirstatemap
99 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
100 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
101 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
102 # raises an exception).
103 self._cwd
103 self._cwd
104
104
105 @contextlib.contextmanager
105 @contextlib.contextmanager
106 def parentchange(self):
106 def parentchange(self):
107 '''Context manager for handling dirstate parents.
107 '''Context manager for handling dirstate parents.
108
108
109 If an exception occurs in the scope of the context manager,
109 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
110 the incoherent dirstate won't be written when wlock is
111 released.
111 released.
112 '''
112 '''
113 self._parentwriters += 1
113 self._parentwriters += 1
114 yield
114 yield
115 # Typically we want the "undo" step of a context manager in a
115 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
116 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
117 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
118 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
119 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache(b'branch')
147 @repocache(b'branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read(b"branch").strip() or b"default"
150 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return b"default"
154 return b"default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def hasdir(self, d):
160 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
161 return self._map.hastrackeddir(d)
162
162
163 @rootcache(b'.hgignore')
163 @rootcache(b'.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never()
167 return matchmod.never()
168
168
169 pats = [b'include:%s' % f for f in files]
169 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
186 return not util.fscasesensitive(self._join(b'.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195
195
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return b'l'
200 return b'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return b'x'
202 return b'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return b''
205 return b''
206
206
207 return f
207 return f
208
208
209 fallback = buildfallback()
209 fallback = buildfallback()
210 if self._checklink:
210 if self._checklink:
211
211
212 def f(x):
212 def f(x):
213 if os.path.islink(self._join(x)):
213 if os.path.islink(self._join(x)):
214 return b'l'
214 return b'l'
215 if b'x' in fallback(x):
215 if b'x' in fallback(x):
216 return b'x'
216 return b'x'
217 return b''
217 return b''
218
218
219 return f
219 return f
220 if self._checkexec:
220 if self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 if b'l' in fallback(x):
223 if b'l' in fallback(x):
224 return b'l'
224 return b'l'
225 if util.isexec(self._join(x)):
225 if util.isexec(self._join(x)):
226 return b'x'
226 return b'x'
227 return b''
227 return b''
228
228
229 return f
229 return f
230 else:
230 else:
231 return fallback
231 return fallback
232
232
233 @propertycache
233 @propertycache
234 def _cwd(self):
234 def _cwd(self):
235 # internal config: ui.forcecwd
235 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
237 if forcecwd:
238 return forcecwd
238 return forcecwd
239 return encoding.getcwd()
239 return encoding.getcwd()
240
240
241 def getcwd(self):
241 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
242 '''Return the path from which a canonical path is calculated.
243
243
244 This path should be used to resolve file patterns or to convert
244 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
245 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
246 used to get real file paths. Use vfs functions instead.
247 '''
247 '''
248 cwd = self._cwd
248 cwd = self._cwd
249 if cwd == self._root:
249 if cwd == self._root:
250 return b''
250 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
252 rootsep = self._root
253 if not util.endswithsep(rootsep):
253 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
254 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
255 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
256 return cwd[len(rootsep) :]
257 else:
257 else:
258 # we're outside the repo. return an absolute path.
258 # we're outside the repo. return an absolute path.
259 return cwd
259 return cwd
260
260
261 def pathto(self, f, cwd=None):
261 def pathto(self, f, cwd=None):
262 if cwd is None:
262 if cwd is None:
263 cwd = self.getcwd()
263 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
264 path = util.pathto(self._root, cwd, f)
265 if self._slash:
265 if self._slash:
266 return util.pconvert(path)
266 return util.pconvert(path)
267 return path
267 return path
268
268
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
270 '''Return the current state of key (a filename) in the dirstate.
271
271
272 States are:
272 States are:
273 n normal
273 n normal
274 m needs merging
274 m needs merging
275 r marked for removal
275 r marked for removal
276 a marked for addition
276 a marked for addition
277 ? not tracked
277 ? not tracked
278 '''
278 '''
279 return self._map.get(key, (b"?",))[0]
279 return self._map.get(key, (b"?",))[0]
280
280
281 def __contains__(self, key):
281 def __contains__(self, key):
282 return key in self._map
282 return key in self._map
283
283
284 def __iter__(self):
284 def __iter__(self):
285 return iter(sorted(self._map))
285 return iter(sorted(self._map))
286
286
287 def items(self):
287 def items(self):
288 return pycompat.iteritems(self._map)
288 return pycompat.iteritems(self._map)
289
289
290 iteritems = items
290 iteritems = items
291
291
292 def parents(self):
292 def parents(self):
293 return [self._validate(p) for p in self._pl]
293 return [self._validate(p) for p in self._pl]
294
294
295 def p1(self):
295 def p1(self):
296 return self._validate(self._pl[0])
296 return self._validate(self._pl[0])
297
297
298 def p2(self):
298 def p2(self):
299 return self._validate(self._pl[1])
299 return self._validate(self._pl[1])
300
300
301 def branch(self):
301 def branch(self):
302 return encoding.tolocal(self._branch)
302 return encoding.tolocal(self._branch)
303
303
304 def setparents(self, p1, p2=nullid):
304 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
305 """Set dirstate parents to p1 and p2.
306
306
307 When moving from two parents to one, 'm' merged entries a
307 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
308 adjusted to normal and previous copy records discarded and
309 returned by the call.
309 returned by the call.
310
310
311 See localrepo.setparents()
311 See localrepo.setparents()
312 """
312 """
313 if self._parentwriters == 0:
313 if self._parentwriters == 0:
314 raise ValueError(
314 raise ValueError(
315 b"cannot set dirstate parent outside of "
315 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
316 b"dirstate.parentchange context manager"
317 )
317 )
318
318
319 self._dirty = True
319 self._dirty = True
320 oldp2 = self._pl[1]
320 oldp2 = self._pl[1]
321 if self._origpl is None:
321 if self._origpl is None:
322 self._origpl = self._pl
322 self._origpl = self._pl
323 self._map.setparents(p1, p2)
323 self._map.setparents(p1, p2)
324 copies = {}
324 copies = {}
325 if oldp2 != nullid and p2 == nullid:
325 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
326 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
327 self._map.otherparentset
328 )
328 )
329 for f in candidatefiles:
329 for f in candidatefiles:
330 s = self._map.get(f)
330 s = self._map.get(f)
331 if s is None:
331 if s is None:
332 continue
332 continue
333
333
334 # Discard 'm' markers when moving away from a merge state
334 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
335 if s[0] == b'm':
336 source = self._map.copymap.get(f)
336 source = self._map.copymap.get(f)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 self.normallookup(f)
339 self.normallookup(f)
340 # Also fix up otherparent markers
340 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
341 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
342 source = self._map.copymap.get(f)
343 if source:
343 if source:
344 copies[f] = source
344 copies[f] = source
345 self.add(f)
345 self.add(f)
346 return copies
346 return copies
347
347
348 def setbranch(self, branch):
348 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
351 try:
352 f.write(self._branch + b'\n')
352 f.write(self._branch + b'\n')
353 f.close()
353 f.close()
354
354
355 # make sure filecache has the correct stat info for _branch after
355 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
356 # replacing the underlying file
357 ce = self._filecache[b'_branch']
357 ce = self._filecache[b'_branch']
358 if ce:
358 if ce:
359 ce.refresh()
359 ce.refresh()
360 except: # re-raises
360 except: # re-raises
361 f.discard()
361 f.discard()
362 raise
362 raise
363
363
364 def invalidate(self):
364 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
365 '''Causes the next access to reread the dirstate.
366
366
367 This is different from localrepo.invalidatedirstate() because it always
367 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
369 check whether the dirstate has changed before rereading it.'''
370
370
371 for a in ("_map", "_branch", "_ignore"):
371 for a in ("_map", "_branch", "_ignore"):
372 if a in self.__dict__:
372 if a in self.__dict__:
373 delattr(self, a)
373 delattr(self, a)
374 self._lastnormaltime = 0
374 self._lastnormaltime = 0
375 self._dirty = False
375 self._dirty = False
376 self._updatedfiles.clear()
376 self._updatedfiles.clear()
377 self._parentwriters = 0
377 self._parentwriters = 0
378 self._origpl = None
378 self._origpl = None
379
379
380 def copy(self, source, dest):
380 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
382 if source == dest:
383 return
383 return
384 self._dirty = True
384 self._dirty = True
385 if source is not None:
385 if source is not None:
386 self._map.copymap[dest] = source
386 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
388 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
389 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
390 self._updatedfiles.add(dest)
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self._map.copymap.get(file, None)
393 return self._map.copymap.get(file, None)
394
394
395 def copies(self):
395 def copies(self):
396 return self._map.copymap
396 return self._map.copymap
397
397
398 def _addpath(self, f, state, mode, size, mtime):
398 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
399 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
400 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
401 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
402 if self._map.hastrackeddir(f):
403 raise error.Abort(
403 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
405 )
406 # shadows
406 # shadows
407 for d in util.finddirs(f):
407 for d in util.finddirs(f):
408 if self._map.hastrackeddir(d):
408 if self._map.hastrackeddir(d):
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
411 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
412 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
413 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
415 )
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
419
420 def normal(self, f, parentfiledata=None):
420 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
421 '''Mark a file normal and clean.
422
422
423 parentfiledata: (mode, size, mtime) of the clean file
423 parentfiledata: (mode, size, mtime) of the clean file
424
424
425 parentfiledata should be computed from memory (for mode,
425 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
426 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
427 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
428 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
429 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
430 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
431 (mode, size, mtime) = parentfiledata
432 else:
432 else:
433 s = os.lstat(self._join(f))
433 s = os.lstat(self._join(f))
434 mode = s.st_mode
434 mode = s.st_mode
435 size = s.st_size
435 size = s.st_size
436 mtime = s[stat.ST_MTIME]
436 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
438 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
439 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
440 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
441 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
442 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
443 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
444 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
445 self._lastnormaltime = mtime
446
446
447 def normallookup(self, f):
447 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
448 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
450 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
452 # being removed, restore that state.
453 entry = self._map.get(f)
453 entry = self._map.get(f)
454 if entry is not None:
454 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
456 source = self._map.copymap.get(f)
457 if entry[2] == -1:
457 if entry[2] == -1:
458 self.merge(f)
458 self.merge(f)
459 elif entry[2] == -2:
459 elif entry[2] == -2:
460 self.otherparent(f)
460 self.otherparent(f)
461 if source:
461 if source:
462 self.copy(source, f)
462 self.copy(source, f)
463 return
463 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
465 return
466 self._addpath(f, b'n', 0, -1, -1)
466 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
467 self._map.copymap.pop(f, None)
468
468
469 def otherparent(self, f):
469 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
470 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
471 if self._pl[1] == nullid:
472 raise error.Abort(
472 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
473 _(b"setting %r to other parent only allowed in merges") % f
474 )
474 )
475 if f in self and self[f] == b'n':
475 if f in self and self[f] == b'n':
476 # merge-like
476 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
477 self._addpath(f, b'm', 0, -2, -1)
478 else:
478 else:
479 # add-like
479 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
480 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
481 self._map.copymap.pop(f, None)
482
482
483 def add(self, f):
483 def add(self, f):
484 '''Mark a file added.'''
484 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
485 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
486 self._map.copymap.pop(f, None)
487
487
488 def remove(self, f):
488 def remove(self, f):
489 '''Mark a file removed.'''
489 '''Mark a file removed.'''
490 self._dirty = True
490 self._dirty = True
491 oldstate = self[f]
491 oldstate = self[f]
492 size = 0
492 size = 0
493 if self._pl[1] != nullid:
493 if self._pl[1] != nullid:
494 entry = self._map.get(f)
494 entry = self._map.get(f)
495 if entry is not None:
495 if entry is not None:
496 # backup the previous state
496 # backup the previous state
497 if entry[0] == b'm': # merge
497 if entry[0] == b'm': # merge
498 size = -1
498 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
500 size = -2
501 self._map.otherparentset.add(f)
501 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
503 self._map.removefile(f, oldstate, size)
504 if size == 0:
504 if size == 0:
505 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
506
506
507 def merge(self, f):
507 def merge(self, f):
508 '''Mark a file merged.'''
508 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
509 if self._pl[1] == nullid:
510 return self.normallookup(f)
510 return self.normallookup(f)
511 return self.otherparent(f)
511 return self.otherparent(f)
512
512
513 def drop(self, f):
513 def drop(self, f):
514 '''Drop a file from the dirstate'''
514 '''Drop a file from the dirstate'''
515 oldstate = self[f]
515 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
516 if self._map.dropfile(f, oldstate):
517 self._dirty = True
517 self._dirty = True
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
526 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
527 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
529 folded = d + b"/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if b'/' in normed:
536 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
537 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
539 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
540 folded = d + b"/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(
554 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
556 )
557 return folded
557 return folded
558
558
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
560 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
561 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
562 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
563 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
564 if folded is None:
565 if isknown:
565 if isknown:
566 folded = path
566 folded = path
567 else:
567 else:
568 # store discovered result in dirfoldmap so that future
568 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
569 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def normalize(self, path, isknown=False, ignoremissing=False):
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
576 '''
577 normalize the case of a pathname when on a casefolding filesystem
577 normalize the case of a pathname when on a casefolding filesystem
578
578
579 isknown specifies whether the filename came from walking the
579 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
580 disk, to avoid extra filesystem access.
581
581
582 If ignoremissing is True, missing path are returned
582 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
583 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
584 existing path components.
585
585
586 The normalized case is determined based on the following precedence:
586 The normalized case is determined based on the following precedence:
587
587
588 - version of name already stored in the dirstate
588 - version of name already stored in the dirstate
589 - version of name stored on disk
589 - version of name stored on disk
590 - version provided via command arguments
590 - version provided via command arguments
591 '''
591 '''
592
592
593 if self._checkcase:
593 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
594 return self._normalize(path, isknown, ignoremissing)
595 return path
595 return path
596
596
597 def clear(self):
597 def clear(self):
598 self._map.clear()
598 self._map.clear()
599 self._lastnormaltime = 0
599 self._lastnormaltime = 0
600 self._updatedfiles.clear()
600 self._updatedfiles.clear()
601 self._dirty = True
601 self._dirty = True
602
602
603 def rebuild(self, parent, allfiles, changedfiles=None):
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
604 if changedfiles is None:
605 # Rebuild entire dirstate
605 # Rebuild entire dirstate
606 changedfiles = allfiles
606 changedfiles = allfiles
607 lastnormaltime = self._lastnormaltime
607 lastnormaltime = self._lastnormaltime
608 self.clear()
608 self.clear()
609 self._lastnormaltime = lastnormaltime
609 self._lastnormaltime = lastnormaltime
610
610
611 if self._origpl is None:
611 if self._origpl is None:
612 self._origpl = self._pl
612 self._origpl = self._pl
613 self._map.setparents(parent, nullid)
613 self._map.setparents(parent, nullid)
614 for f in changedfiles:
614 for f in changedfiles:
615 if f in allfiles:
615 if f in allfiles:
616 self.normallookup(f)
616 self.normallookup(f)
617 else:
617 else:
618 self.drop(f)
618 self.drop(f)
619
619
620 self._dirty = True
620 self._dirty = True
621
621
622 def identity(self):
622 def identity(self):
623 '''Return identity of dirstate itself to detect changing in storage
623 '''Return identity of dirstate itself to detect changing in storage
624
624
625 If identity of previous dirstate is equal to this, writing
625 If identity of previous dirstate is equal to this, writing
626 changes based on the former dirstate out can keep consistency.
626 changes based on the former dirstate out can keep consistency.
627 '''
627 '''
628 return self._map.identity
628 return self._map.identity
629
629
630 def write(self, tr):
630 def write(self, tr):
631 if not self._dirty:
631 if not self._dirty:
632 return
632 return
633
633
634 filename = self._filename
634 filename = self._filename
635 if tr:
635 if tr:
636 # 'dirstate.write()' is not only for writing in-memory
636 # 'dirstate.write()' is not only for writing in-memory
637 # changes out, but also for dropping ambiguous timestamp.
637 # changes out, but also for dropping ambiguous timestamp.
638 # delayed writing re-raise "ambiguous timestamp issue".
638 # delayed writing re-raise "ambiguous timestamp issue".
639 # See also the wiki page below for detail:
639 # See also the wiki page below for detail:
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641
641
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 now = _getfsnow(self._opener)
643 now = _getfsnow(self._opener)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
645
645
646 # emulate that all 'dirstate.normal' results are written out
646 # emulate that all 'dirstate.normal' results are written out
647 self._lastnormaltime = 0
647 self._lastnormaltime = 0
648 self._updatedfiles.clear()
648 self._updatedfiles.clear()
649
649
650 # delay writing in-memory changes out
650 # delay writing in-memory changes out
651 tr.addfilegenerator(
651 tr.addfilegenerator(
652 b'dirstate',
652 b'dirstate',
653 (self._filename,),
653 (self._filename,),
654 self._writedirstate,
654 self._writedirstate,
655 location=b'plain',
655 location=b'plain',
656 )
656 )
657 return
657 return
658
658
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
660 self._writedirstate(st)
661
661
662 def addparentchangecallback(self, category, callback):
662 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
663 """add a callback to be called when the wd parents are changed
664
664
665 Callback will be called with the following arguments:
665 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
666 dirstate, (oldp1, oldp2), (newp1, newp2)
667
667
668 Category is a unique identifier to allow overwriting an old callback
668 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
669 with a newer callback.
670 """
670 """
671 self._plchangecallbacks[category] = callback
671 self._plchangecallbacks[category] = callback
672
672
673 def _writedirstate(self, st):
673 def _writedirstate(self, st):
674 # notify callbacks about parents change
674 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
675 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(
676 for c, callback in sorted(
677 pycompat.iteritems(self._plchangecallbacks)
677 pycompat.iteritems(self._plchangecallbacks)
678 ):
678 ):
679 callback(self, self._origpl, self._pl)
679 callback(self, self._origpl, self._pl)
680 self._origpl = None
680 self._origpl = None
681 # use the modification time of the newly created temporary file as the
681 # use the modification time of the newly created temporary file as the
682 # filesystem's notion of 'now'
682 # filesystem's notion of 'now'
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684
684
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # timestamp of each entries in dirstate, because of 'now > mtime'
686 # timestamp of each entries in dirstate, because of 'now > mtime'
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 if delaywrite > 0:
688 if delaywrite > 0:
689 # do we have any files to delay for?
689 # do we have any files to delay for?
690 for f, e in pycompat.iteritems(self._map):
690 for f, e in pycompat.iteritems(self._map):
691 if e[0] == b'n' and e[3] == now:
691 if e[0] == b'n' and e[3] == now:
692 import time # to avoid useless import
692 import time # to avoid useless import
693
693
694 # rather than sleep n seconds, sleep until the next
694 # rather than sleep n seconds, sleep until the next
695 # multiple of n seconds
695 # multiple of n seconds
696 clock = time.time()
696 clock = time.time()
697 start = int(clock) - (int(clock) % delaywrite)
697 start = int(clock) - (int(clock) % delaywrite)
698 end = start + delaywrite
698 end = start + delaywrite
699 time.sleep(end - clock)
699 time.sleep(end - clock)
700 now = end # trust our estimate that the end is near now
700 now = end # trust our estimate that the end is near now
701 break
701 break
702
702
703 self._map.write(st, now)
703 self._map.write(st, now)
704 self._lastnormaltime = 0
704 self._lastnormaltime = 0
705 self._dirty = False
705 self._dirty = False
706
706
707 def _dirignore(self, f):
707 def _dirignore(self, f):
708 if self._ignore(f):
708 if self._ignore(f):
709 return True
709 return True
710 for p in util.finddirs(f):
710 for p in util.finddirs(f):
711 if self._ignore(p):
711 if self._ignore(p):
712 return True
712 return True
713 return False
713 return False
714
714
715 def _ignorefiles(self):
715 def _ignorefiles(self):
716 files = []
716 files = []
717 if os.path.exists(self._join(b'.hgignore')):
717 if os.path.exists(self._join(b'.hgignore')):
718 files.append(self._join(b'.hgignore'))
718 files.append(self._join(b'.hgignore'))
719 for name, path in self._ui.configitems(b"ui"):
719 for name, path in self._ui.configitems(b"ui"):
720 if name == b'ignore' or name.startswith(b'ignore.'):
720 if name == b'ignore' or name.startswith(b'ignore.'):
721 # we need to use os.path.join here rather than self._join
721 # we need to use os.path.join here rather than self._join
722 # because path is arbitrary and user-specified
722 # because path is arbitrary and user-specified
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
724 return files
724 return files
725
725
726 def _ignorefileandline(self, f):
726 def _ignorefileandline(self, f):
727 files = collections.deque(self._ignorefiles())
727 files = collections.deque(self._ignorefiles())
728 visited = set()
728 visited = set()
729 while files:
729 while files:
730 i = files.popleft()
730 i = files.popleft()
731 patterns = matchmod.readpatternfile(
731 patterns = matchmod.readpatternfile(
732 i, self._ui.warn, sourceinfo=True
732 i, self._ui.warn, sourceinfo=True
733 )
733 )
734 for pattern, lineno, line in patterns:
734 for pattern, lineno, line in patterns:
735 kind, p = matchmod._patsplit(pattern, b'glob')
735 kind, p = matchmod._patsplit(pattern, b'glob')
736 if kind == b"subinclude":
736 if kind == b"subinclude":
737 if p not in visited:
737 if p not in visited:
738 files.append(p)
738 files.append(p)
739 continue
739 continue
740 m = matchmod.match(
740 m = matchmod.match(
741 self._root, b'', [], [pattern], warn=self._ui.warn
741 self._root, b'', [], [pattern], warn=self._ui.warn
742 )
742 )
743 if m(f):
743 if m(f):
744 return (i, lineno, line)
744 return (i, lineno, line)
745 visited.add(i)
745 visited.add(i)
746 return (None, -1, b"")
746 return (None, -1, b"")
747
747
748 def _walkexplicit(self, match, subrepos):
748 def _walkexplicit(self, match, subrepos):
749 '''Get stat data about the files explicitly specified by match.
749 '''Get stat data about the files explicitly specified by match.
750
750
751 Return a triple (results, dirsfound, dirsnotfound).
751 Return a triple (results, dirsfound, dirsnotfound).
752 - results is a mapping from filename to stat result. It also contains
752 - results is a mapping from filename to stat result. It also contains
753 listings mapping subrepos and .hg to None.
753 listings mapping subrepos and .hg to None.
754 - dirsfound is a list of files found to be directories.
754 - dirsfound is a list of files found to be directories.
755 - dirsnotfound is a list of files that the dirstate thinks are
755 - dirsnotfound is a list of files that the dirstate thinks are
756 directories and that were not found.'''
756 directories and that were not found.'''
757
757
758 def badtype(mode):
758 def badtype(mode):
759 kind = _(b'unknown')
759 kind = _(b'unknown')
760 if stat.S_ISCHR(mode):
760 if stat.S_ISCHR(mode):
761 kind = _(b'character device')
761 kind = _(b'character device')
762 elif stat.S_ISBLK(mode):
762 elif stat.S_ISBLK(mode):
763 kind = _(b'block device')
763 kind = _(b'block device')
764 elif stat.S_ISFIFO(mode):
764 elif stat.S_ISFIFO(mode):
765 kind = _(b'fifo')
765 kind = _(b'fifo')
766 elif stat.S_ISSOCK(mode):
766 elif stat.S_ISSOCK(mode):
767 kind = _(b'socket')
767 kind = _(b'socket')
768 elif stat.S_ISDIR(mode):
768 elif stat.S_ISDIR(mode):
769 kind = _(b'directory')
769 kind = _(b'directory')
770 return _(b'unsupported file type (type is %s)') % kind
770 return _(b'unsupported file type (type is %s)') % kind
771
771
772 matchedir = match.explicitdir
772 matchedir = match.explicitdir
773 badfn = match.bad
773 badfn = match.bad
774 dmap = self._map
774 dmap = self._map
775 lstat = os.lstat
775 lstat = os.lstat
776 getkind = stat.S_IFMT
776 getkind = stat.S_IFMT
777 dirkind = stat.S_IFDIR
777 dirkind = stat.S_IFDIR
778 regkind = stat.S_IFREG
778 regkind = stat.S_IFREG
779 lnkkind = stat.S_IFLNK
779 lnkkind = stat.S_IFLNK
780 join = self._join
780 join = self._join
781 dirsfound = []
781 dirsfound = []
782 foundadd = dirsfound.append
782 foundadd = dirsfound.append
783 dirsnotfound = []
783 dirsnotfound = []
784 notfoundadd = dirsnotfound.append
784 notfoundadd = dirsnotfound.append
785
785
786 if not match.isexact() and self._checkcase:
786 if not match.isexact() and self._checkcase:
787 normalize = self._normalize
787 normalize = self._normalize
788 else:
788 else:
789 normalize = None
789 normalize = None
790
790
791 files = sorted(match.files())
791 files = sorted(match.files())
792 subrepos.sort()
792 subrepos.sort()
793 i, j = 0, 0
793 i, j = 0, 0
794 while i < len(files) and j < len(subrepos):
794 while i < len(files) and j < len(subrepos):
795 subpath = subrepos[j] + b"/"
795 subpath = subrepos[j] + b"/"
796 if files[i] < subpath:
796 if files[i] < subpath:
797 i += 1
797 i += 1
798 continue
798 continue
799 while i < len(files) and files[i].startswith(subpath):
799 while i < len(files) and files[i].startswith(subpath):
800 del files[i]
800 del files[i]
801 j += 1
801 j += 1
802
802
803 if not files or b'' in files:
803 if not files or b'' in files:
804 files = [b'']
804 files = [b'']
805 # constructing the foldmap is expensive, so don't do it for the
805 # constructing the foldmap is expensive, so don't do it for the
806 # common case where files is ['']
806 # common case where files is ['']
807 normalize = None
807 normalize = None
808 results = dict.fromkeys(subrepos)
808 results = dict.fromkeys(subrepos)
809 results[b'.hg'] = None
809 results[b'.hg'] = None
810
810
811 for ff in files:
811 for ff in files:
812 if normalize:
812 if normalize:
813 nf = normalize(ff, False, True)
813 nf = normalize(ff, False, True)
814 else:
814 else:
815 nf = ff
815 nf = ff
816 if nf in results:
816 if nf in results:
817 continue
817 continue
818
818
819 try:
819 try:
820 st = lstat(join(nf))
820 st = lstat(join(nf))
821 kind = getkind(st.st_mode)
821 kind = getkind(st.st_mode)
822 if kind == dirkind:
822 if kind == dirkind:
823 if nf in dmap:
823 if nf in dmap:
824 # file replaced by dir on disk but still in dirstate
824 # file replaced by dir on disk but still in dirstate
825 results[nf] = None
825 results[nf] = None
826 if matchedir:
826 if matchedir:
827 matchedir(nf)
827 matchedir(nf)
828 foundadd((nf, ff))
828 foundadd((nf, ff))
829 elif kind == regkind or kind == lnkkind:
829 elif kind == regkind or kind == lnkkind:
830 results[nf] = st
830 results[nf] = st
831 else:
831 else:
832 badfn(ff, badtype(kind))
832 badfn(ff, badtype(kind))
833 if nf in dmap:
833 if nf in dmap:
834 results[nf] = None
834 results[nf] = None
835 except OSError as inst: # nf not found on disk - it is dirstate only
835 except OSError as inst: # nf not found on disk - it is dirstate only
836 if nf in dmap: # does it exactly match a missing file?
836 if nf in dmap: # does it exactly match a missing file?
837 results[nf] = None
837 results[nf] = None
838 else: # does it match a missing directory?
838 else: # does it match a missing directory?
839 if self._map.hasdir(nf):
839 if self._map.hasdir(nf):
840 if matchedir:
840 if matchedir:
841 matchedir(nf)
841 matchedir(nf)
842 notfoundadd(nf)
842 notfoundadd(nf)
843 else:
843 else:
844 badfn(ff, encoding.strtolocal(inst.strerror))
844 badfn(ff, encoding.strtolocal(inst.strerror))
845
845
846 # match.files() may contain explicitly-specified paths that shouldn't
846 # match.files() may contain explicitly-specified paths that shouldn't
847 # be taken; drop them from the list of files found. dirsfound/notfound
847 # be taken; drop them from the list of files found. dirsfound/notfound
848 # aren't filtered here because they will be tested later.
848 # aren't filtered here because they will be tested later.
849 if match.anypats():
849 if match.anypats():
850 for f in list(results):
850 for f in list(results):
851 if f == b'.hg' or f in subrepos:
851 if f == b'.hg' or f in subrepos:
852 # keep sentinel to disable further out-of-repo walks
852 # keep sentinel to disable further out-of-repo walks
853 continue
853 continue
854 if not match(f):
854 if not match(f):
855 del results[f]
855 del results[f]
856
856
857 # Case insensitive filesystems cannot rely on lstat() failing to detect
857 # Case insensitive filesystems cannot rely on lstat() failing to detect
858 # a case-only rename. Prune the stat object for any file that does not
858 # a case-only rename. Prune the stat object for any file that does not
859 # match the case in the filesystem, if there are multiple files that
859 # match the case in the filesystem, if there are multiple files that
860 # normalize to the same path.
860 # normalize to the same path.
861 if match.isexact() and self._checkcase:
861 if match.isexact() and self._checkcase:
862 normed = {}
862 normed = {}
863
863
864 for f, st in pycompat.iteritems(results):
864 for f, st in pycompat.iteritems(results):
865 if st is None:
865 if st is None:
866 continue
866 continue
867
867
868 nc = util.normcase(f)
868 nc = util.normcase(f)
869 paths = normed.get(nc)
869 paths = normed.get(nc)
870
870
871 if paths is None:
871 if paths is None:
872 paths = set()
872 paths = set()
873 normed[nc] = paths
873 normed[nc] = paths
874
874
875 paths.add(f)
875 paths.add(f)
876
876
877 for norm, paths in pycompat.iteritems(normed):
877 for norm, paths in pycompat.iteritems(normed):
878 if len(paths) > 1:
878 if len(paths) > 1:
879 for path in paths:
879 for path in paths:
880 folded = self._discoverpath(
880 folded = self._discoverpath(
881 path, norm, True, None, self._map.dirfoldmap
881 path, norm, True, None, self._map.dirfoldmap
882 )
882 )
883 if path != folded:
883 if path != folded:
884 results[path] = None
884 results[path] = None
885
885
886 return results, dirsfound, dirsnotfound
886 return results, dirsfound, dirsnotfound
887
887
888 def walk(self, match, subrepos, unknown, ignored, full=True):
888 def walk(self, match, subrepos, unknown, ignored, full=True):
889 '''
889 '''
890 Walk recursively through the directory tree, finding all files
890 Walk recursively through the directory tree, finding all files
891 matched by match.
891 matched by match.
892
892
893 If full is False, maybe skip some known-clean files.
893 If full is False, maybe skip some known-clean files.
894
894
895 Return a dict mapping filename to stat-like object (either
895 Return a dict mapping filename to stat-like object (either
896 mercurial.osutil.stat instance or return value of os.stat()).
896 mercurial.osutil.stat instance or return value of os.stat()).
897
897
898 '''
898 '''
899 # full is a flag that extensions that hook into walk can use -- this
899 # full is a flag that extensions that hook into walk can use -- this
900 # implementation doesn't use it at all. This satisfies the contract
900 # implementation doesn't use it at all. This satisfies the contract
901 # because we only guarantee a "maybe".
901 # because we only guarantee a "maybe".
902
902
903 if ignored:
903 if ignored:
904 ignore = util.never
904 ignore = util.never
905 dirignore = util.never
905 dirignore = util.never
906 elif unknown:
906 elif unknown:
907 ignore = self._ignore
907 ignore = self._ignore
908 dirignore = self._dirignore
908 dirignore = self._dirignore
909 else:
909 else:
910 # if not unknown and not ignored, drop dir recursion and step 2
910 # if not unknown and not ignored, drop dir recursion and step 2
911 ignore = util.always
911 ignore = util.always
912 dirignore = util.always
912 dirignore = util.always
913
913
914 matchfn = match.matchfn
914 matchfn = match.matchfn
915 matchalways = match.always()
915 matchalways = match.always()
916 matchtdir = match.traversedir
916 matchtdir = match.traversedir
917 dmap = self._map
917 dmap = self._map
918 listdir = util.listdir
918 listdir = util.listdir
919 lstat = os.lstat
919 lstat = os.lstat
920 dirkind = stat.S_IFDIR
920 dirkind = stat.S_IFDIR
921 regkind = stat.S_IFREG
921 regkind = stat.S_IFREG
922 lnkkind = stat.S_IFLNK
922 lnkkind = stat.S_IFLNK
923 join = self._join
923 join = self._join
924
924
925 exact = skipstep3 = False
925 exact = skipstep3 = False
926 if match.isexact(): # match.exact
926 if match.isexact(): # match.exact
927 exact = True
927 exact = True
928 dirignore = util.always # skip step 2
928 dirignore = util.always # skip step 2
929 elif match.prefix(): # match.match, no patterns
929 elif match.prefix(): # match.match, no patterns
930 skipstep3 = True
930 skipstep3 = True
931
931
932 if not exact and self._checkcase:
932 if not exact and self._checkcase:
933 normalize = self._normalize
933 normalize = self._normalize
934 normalizefile = self._normalizefile
934 normalizefile = self._normalizefile
935 skipstep3 = False
935 skipstep3 = False
936 else:
936 else:
937 normalize = self._normalize
937 normalize = self._normalize
938 normalizefile = None
938 normalizefile = None
939
939
940 # step 1: find all explicit files
940 # step 1: find all explicit files
941 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
941 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
942
942
943 skipstep3 = skipstep3 and not (work or dirsnotfound)
943 skipstep3 = skipstep3 and not (work or dirsnotfound)
944 work = [d for d in work if not dirignore(d[0])]
944 work = [d for d in work if not dirignore(d[0])]
945
945
946 # step 2: visit subdirectories
946 # step 2: visit subdirectories
947 def traverse(work, alreadynormed):
947 def traverse(work, alreadynormed):
948 wadd = work.append
948 wadd = work.append
949 while work:
949 while work:
950 tracing.counter('dirstate.walk work', len(work))
950 tracing.counter('dirstate.walk work', len(work))
951 nd = work.pop()
951 nd = work.pop()
952 visitentries = match.visitchildrenset(nd)
952 visitentries = match.visitchildrenset(nd)
953 if not visitentries:
953 if not visitentries:
954 continue
954 continue
955 if visitentries == b'this' or visitentries == b'all':
955 if visitentries == b'this' or visitentries == b'all':
956 visitentries = None
956 visitentries = None
957 skip = None
957 skip = None
958 if nd != b'':
958 if nd != b'':
959 skip = b'.hg'
959 skip = b'.hg'
960 try:
960 try:
961 with tracing.log('dirstate.walk.traverse listdir %s', nd):
961 with tracing.log('dirstate.walk.traverse listdir %s', nd):
962 entries = listdir(join(nd), stat=True, skip=skip)
962 entries = listdir(join(nd), stat=True, skip=skip)
963 except OSError as inst:
963 except OSError as inst:
964 if inst.errno in (errno.EACCES, errno.ENOENT):
964 if inst.errno in (errno.EACCES, errno.ENOENT):
965 match.bad(
965 match.bad(
966 self.pathto(nd), encoding.strtolocal(inst.strerror)
966 self.pathto(nd), encoding.strtolocal(inst.strerror)
967 )
967 )
968 continue
968 continue
969 raise
969 raise
970 for f, kind, st in entries:
970 for f, kind, st in entries:
971 # Some matchers may return files in the visitentries set,
971 # Some matchers may return files in the visitentries set,
972 # instead of 'this', if the matcher explicitly mentions them
972 # instead of 'this', if the matcher explicitly mentions them
973 # and is not an exactmatcher. This is acceptable; we do not
973 # and is not an exactmatcher. This is acceptable; we do not
974 # make any hard assumptions about file-or-directory below
974 # make any hard assumptions about file-or-directory below
975 # based on the presence of `f` in visitentries. If
975 # based on the presence of `f` in visitentries. If
976 # visitchildrenset returned a set, we can always skip the
976 # visitchildrenset returned a set, we can always skip the
977 # entries *not* in the set it provided regardless of whether
977 # entries *not* in the set it provided regardless of whether
978 # they're actually a file or a directory.
978 # they're actually a file or a directory.
979 if visitentries and f not in visitentries:
979 if visitentries and f not in visitentries:
980 continue
980 continue
981 if normalizefile:
981 if normalizefile:
982 # even though f might be a directory, we're only
982 # even though f might be a directory, we're only
983 # interested in comparing it to files currently in the
983 # interested in comparing it to files currently in the
984 # dmap -- therefore normalizefile is enough
984 # dmap -- therefore normalizefile is enough
985 nf = normalizefile(
985 nf = normalizefile(
986 nd and (nd + b"/" + f) or f, True, True
986 nd and (nd + b"/" + f) or f, True, True
987 )
987 )
988 else:
988 else:
989 nf = nd and (nd + b"/" + f) or f
989 nf = nd and (nd + b"/" + f) or f
990 if nf not in results:
990 if nf not in results:
991 if kind == dirkind:
991 if kind == dirkind:
992 if not ignore(nf):
992 if not ignore(nf):
993 if matchtdir:
993 if matchtdir:
994 matchtdir(nf)
994 matchtdir(nf)
995 wadd(nf)
995 wadd(nf)
996 if nf in dmap and (matchalways or matchfn(nf)):
996 if nf in dmap and (matchalways or matchfn(nf)):
997 results[nf] = None
997 results[nf] = None
998 elif kind == regkind or kind == lnkkind:
998 elif kind == regkind or kind == lnkkind:
999 if nf in dmap:
999 if nf in dmap:
1000 if matchalways or matchfn(nf):
1000 if matchalways or matchfn(nf):
1001 results[nf] = st
1001 results[nf] = st
1002 elif (matchalways or matchfn(nf)) and not ignore(
1002 elif (matchalways or matchfn(nf)) and not ignore(
1003 nf
1003 nf
1004 ):
1004 ):
1005 # unknown file -- normalize if necessary
1005 # unknown file -- normalize if necessary
1006 if not alreadynormed:
1006 if not alreadynormed:
1007 nf = normalize(nf, False, True)
1007 nf = normalize(nf, False, True)
1008 results[nf] = st
1008 results[nf] = st
1009 elif nf in dmap and (matchalways or matchfn(nf)):
1009 elif nf in dmap and (matchalways or matchfn(nf)):
1010 results[nf] = None
1010 results[nf] = None
1011
1011
1012 for nd, d in work:
1012 for nd, d in work:
1013 # alreadynormed means that processwork doesn't have to do any
1013 # alreadynormed means that processwork doesn't have to do any
1014 # expensive directory normalization
1014 # expensive directory normalization
1015 alreadynormed = not normalize or nd == d
1015 alreadynormed = not normalize or nd == d
1016 traverse([d], alreadynormed)
1016 traverse([d], alreadynormed)
1017
1017
1018 for s in subrepos:
1018 for s in subrepos:
1019 del results[s]
1019 del results[s]
1020 del results[b'.hg']
1020 del results[b'.hg']
1021
1021
1022 # step 3: visit remaining files from dmap
1022 # step 3: visit remaining files from dmap
1023 if not skipstep3 and not exact:
1023 if not skipstep3 and not exact:
1024 # If a dmap file is not in results yet, it was either
1024 # If a dmap file is not in results yet, it was either
1025 # a) not matching matchfn b) ignored, c) missing, or d) under a
1025 # a) not matching matchfn b) ignored, c) missing, or d) under a
1026 # symlink directory.
1026 # symlink directory.
1027 if not results and matchalways:
1027 if not results and matchalways:
1028 visit = [f for f in dmap]
1028 visit = [f for f in dmap]
1029 else:
1029 else:
1030 visit = [f for f in dmap if f not in results and matchfn(f)]
1030 visit = [f for f in dmap if f not in results and matchfn(f)]
1031 visit.sort()
1031 visit.sort()
1032
1032
1033 if unknown:
1033 if unknown:
1034 # unknown == True means we walked all dirs under the roots
1034 # unknown == True means we walked all dirs under the roots
1035 # that wasn't ignored, and everything that matched was stat'ed
1035 # that wasn't ignored, and everything that matched was stat'ed
1036 # and is already in results.
1036 # and is already in results.
1037 # The rest must thus be ignored or under a symlink.
1037 # The rest must thus be ignored or under a symlink.
1038 audit_path = pathutil.pathauditor(self._root, cached=True)
1038 audit_path = pathutil.pathauditor(self._root, cached=True)
1039
1039
1040 for nf in iter(visit):
1040 for nf in iter(visit):
1041 # If a stat for the same file was already added with a
1041 # If a stat for the same file was already added with a
1042 # different case, don't add one for this, since that would
1042 # different case, don't add one for this, since that would
1043 # make it appear as if the file exists under both names
1043 # make it appear as if the file exists under both names
1044 # on disk.
1044 # on disk.
1045 if (
1045 if (
1046 normalizefile
1046 normalizefile
1047 and normalizefile(nf, True, True) in results
1047 and normalizefile(nf, True, True) in results
1048 ):
1048 ):
1049 results[nf] = None
1049 results[nf] = None
1050 # Report ignored items in the dmap as long as they are not
1050 # Report ignored items in the dmap as long as they are not
1051 # under a symlink directory.
1051 # under a symlink directory.
1052 elif audit_path.check(nf):
1052 elif audit_path.check(nf):
1053 try:
1053 try:
1054 results[nf] = lstat(join(nf))
1054 results[nf] = lstat(join(nf))
1055 # file was just ignored, no links, and exists
1055 # file was just ignored, no links, and exists
1056 except OSError:
1056 except OSError:
1057 # file doesn't exist
1057 # file doesn't exist
1058 results[nf] = None
1058 results[nf] = None
1059 else:
1059 else:
1060 # It's either missing or under a symlink directory
1060 # It's either missing or under a symlink directory
1061 # which we in this case report as missing
1061 # which we in this case report as missing
1062 results[nf] = None
1062 results[nf] = None
1063 else:
1063 else:
1064 # We may not have walked the full directory tree above,
1064 # We may not have walked the full directory tree above,
1065 # so stat and check everything we missed.
1065 # so stat and check everything we missed.
1066 iv = iter(visit)
1066 iv = iter(visit)
1067 for st in util.statfiles([join(i) for i in visit]):
1067 for st in util.statfiles([join(i) for i in visit]):
1068 results[next(iv)] = st
1068 results[next(iv)] = st
1069 return results
1069 return results
1070
1070
1071 def status(self, match, subrepos, ignored, clean, unknown):
1071 def status(self, match, subrepos, ignored, clean, unknown):
1072 '''Determine the status of the working copy relative to the
1072 '''Determine the status of the working copy relative to the
1073 dirstate and return a pair of (unsure, status), where status is of type
1073 dirstate and return a pair of (unsure, status), where status is of type
1074 scmutil.status and:
1074 scmutil.status and:
1075
1075
1076 unsure:
1076 unsure:
1077 files that might have been modified since the dirstate was
1077 files that might have been modified since the dirstate was
1078 written, but need to be read to be sure (size is the same
1078 written, but need to be read to be sure (size is the same
1079 but mtime differs)
1079 but mtime differs)
1080 status.modified:
1080 status.modified:
1081 files that have definitely been modified since the dirstate
1081 files that have definitely been modified since the dirstate
1082 was written (different size or mode)
1082 was written (different size or mode)
1083 status.clean:
1083 status.clean:
1084 files that have definitely not been modified since the
1084 files that have definitely not been modified since the
1085 dirstate was written
1085 dirstate was written
1086 '''
1086 '''
1087 listignored, listclean, listunknown = ignored, clean, unknown
1087 listignored, listclean, listunknown = ignored, clean, unknown
1088 lookup, modified, added, unknown, ignored = [], [], [], [], []
1088 lookup, modified, added, unknown, ignored = [], [], [], [], []
1089 removed, deleted, clean = [], [], []
1089 removed, deleted, clean = [], [], []
1090
1090
1091 dmap = self._map
1091 dmap = self._map
1092 dmap.preload()
1092 dmap.preload()
1093
1093
1094 use_rust = True
1094 use_rust = True
1095 if rustmod is None:
1095 if rustmod is None:
1096 use_rust = False
1096 use_rust = False
1097 elif subrepos:
1097 elif subrepos:
1098 use_rust = False
1098 use_rust = False
1099 if bool(listunknown):
1099 if bool(listunknown):
1100 # Pathauditor does not exist yet in Rust, unknown files
1100 # Pathauditor does not exist yet in Rust, unknown files
1101 # can't be trusted.
1101 # can't be trusted.
1102 use_rust = False
1102 use_rust = False
1103 elif self._ignorefiles() and listignored:
1103 elif self._ignorefiles() and listignored:
1104 # Rust has no ignore mechanism yet, so don't use Rust for
1104 # Rust has no ignore mechanism yet, so don't use Rust for
1105 # commands that need ignore.
1105 # commands that need ignore.
1106 use_rust = False
1106 use_rust = False
1107 elif not match.always():
1107 elif not match.always():
1108 # Matchers have yet to be implemented
1108 # Matchers have yet to be implemented
1109 use_rust = False
1109 use_rust = False
1110
1110
1111 if use_rust:
1111 if use_rust:
1112 # Force Rayon (Rust parallelism library) to respect the number of
1113 # workers. This is a temporary workaround until Rust code knows
1114 # how to read the config file.
1115 numcpus = self._ui.configint("worker", "numcpus")
1116 if numcpus is not None:
1117 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1118
1119 workers_enabled = self._ui.configbool("worker", "enabled", True)
1120 if not workers_enabled:
1121 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1122
1112 (
1123 (
1113 lookup,
1124 lookup,
1114 modified,
1125 modified,
1115 added,
1126 added,
1116 removed,
1127 removed,
1117 deleted,
1128 deleted,
1118 unknown,
1129 unknown,
1119 clean,
1130 clean,
1120 ) = rustmod.status(
1131 ) = rustmod.status(
1121 dmap._rustmap,
1132 dmap._rustmap,
1122 self._rootdir,
1133 self._rootdir,
1123 bool(listclean),
1134 bool(listclean),
1124 self._lastnormaltime,
1135 self._lastnormaltime,
1125 self._checkexec,
1136 self._checkexec,
1126 )
1137 )
1127
1138
1128 status = scmutil.status(
1139 status = scmutil.status(
1129 modified=modified,
1140 modified=modified,
1130 added=added,
1141 added=added,
1131 removed=removed,
1142 removed=removed,
1132 deleted=deleted,
1143 deleted=deleted,
1133 unknown=unknown,
1144 unknown=unknown,
1134 ignored=ignored,
1145 ignored=ignored,
1135 clean=clean,
1146 clean=clean,
1136 )
1147 )
1137 return (lookup, status)
1148 return (lookup, status)
1138
1149
1139 dcontains = dmap.__contains__
1150 dcontains = dmap.__contains__
1140 dget = dmap.__getitem__
1151 dget = dmap.__getitem__
1141 ladd = lookup.append # aka "unsure"
1152 ladd = lookup.append # aka "unsure"
1142 madd = modified.append
1153 madd = modified.append
1143 aadd = added.append
1154 aadd = added.append
1144 uadd = unknown.append
1155 uadd = unknown.append
1145 iadd = ignored.append
1156 iadd = ignored.append
1146 radd = removed.append
1157 radd = removed.append
1147 dadd = deleted.append
1158 dadd = deleted.append
1148 cadd = clean.append
1159 cadd = clean.append
1149 mexact = match.exact
1160 mexact = match.exact
1150 dirignore = self._dirignore
1161 dirignore = self._dirignore
1151 checkexec = self._checkexec
1162 checkexec = self._checkexec
1152 copymap = self._map.copymap
1163 copymap = self._map.copymap
1153 lastnormaltime = self._lastnormaltime
1164 lastnormaltime = self._lastnormaltime
1154
1165
1155 # We need to do full walks when either
1166 # We need to do full walks when either
1156 # - we're listing all clean files, or
1167 # - we're listing all clean files, or
1157 # - match.traversedir does something, because match.traversedir should
1168 # - match.traversedir does something, because match.traversedir should
1158 # be called for every dir in the working dir
1169 # be called for every dir in the working dir
1159 full = listclean or match.traversedir is not None
1170 full = listclean or match.traversedir is not None
1160 for fn, st in pycompat.iteritems(
1171 for fn, st in pycompat.iteritems(
1161 self.walk(match, subrepos, listunknown, listignored, full=full)
1172 self.walk(match, subrepos, listunknown, listignored, full=full)
1162 ):
1173 ):
1163 if not dcontains(fn):
1174 if not dcontains(fn):
1164 if (listignored or mexact(fn)) and dirignore(fn):
1175 if (listignored or mexact(fn)) and dirignore(fn):
1165 if listignored:
1176 if listignored:
1166 iadd(fn)
1177 iadd(fn)
1167 else:
1178 else:
1168 uadd(fn)
1179 uadd(fn)
1169 continue
1180 continue
1170
1181
1171 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1182 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1172 # written like that for performance reasons. dmap[fn] is not a
1183 # written like that for performance reasons. dmap[fn] is not a
1173 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1184 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1174 # opcode has fast paths when the value to be unpacked is a tuple or
1185 # opcode has fast paths when the value to be unpacked is a tuple or
1175 # a list, but falls back to creating a full-fledged iterator in
1186 # a list, but falls back to creating a full-fledged iterator in
1176 # general. That is much slower than simply accessing and storing the
1187 # general. That is much slower than simply accessing and storing the
1177 # tuple members one by one.
1188 # tuple members one by one.
1178 t = dget(fn)
1189 t = dget(fn)
1179 state = t[0]
1190 state = t[0]
1180 mode = t[1]
1191 mode = t[1]
1181 size = t[2]
1192 size = t[2]
1182 time = t[3]
1193 time = t[3]
1183
1194
1184 if not st and state in b"nma":
1195 if not st and state in b"nma":
1185 dadd(fn)
1196 dadd(fn)
1186 elif state == b'n':
1197 elif state == b'n':
1187 if (
1198 if (
1188 size >= 0
1199 size >= 0
1189 and (
1200 and (
1190 (size != st.st_size and size != st.st_size & _rangemask)
1201 (size != st.st_size and size != st.st_size & _rangemask)
1191 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1202 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1192 )
1203 )
1193 or size == -2 # other parent
1204 or size == -2 # other parent
1194 or fn in copymap
1205 or fn in copymap
1195 ):
1206 ):
1196 madd(fn)
1207 madd(fn)
1197 elif (
1208 elif (
1198 time != st[stat.ST_MTIME]
1209 time != st[stat.ST_MTIME]
1199 and time != st[stat.ST_MTIME] & _rangemask
1210 and time != st[stat.ST_MTIME] & _rangemask
1200 ):
1211 ):
1201 ladd(fn)
1212 ladd(fn)
1202 elif st[stat.ST_MTIME] == lastnormaltime:
1213 elif st[stat.ST_MTIME] == lastnormaltime:
1203 # fn may have just been marked as normal and it may have
1214 # fn may have just been marked as normal and it may have
1204 # changed in the same second without changing its size.
1215 # changed in the same second without changing its size.
1205 # This can happen if we quickly do multiple commits.
1216 # This can happen if we quickly do multiple commits.
1206 # Force lookup, so we don't miss such a racy file change.
1217 # Force lookup, so we don't miss such a racy file change.
1207 ladd(fn)
1218 ladd(fn)
1208 elif listclean:
1219 elif listclean:
1209 cadd(fn)
1220 cadd(fn)
1210 elif state == b'm':
1221 elif state == b'm':
1211 madd(fn)
1222 madd(fn)
1212 elif state == b'a':
1223 elif state == b'a':
1213 aadd(fn)
1224 aadd(fn)
1214 elif state == b'r':
1225 elif state == b'r':
1215 radd(fn)
1226 radd(fn)
1216
1227
1217 return (
1228 return (
1218 lookup,
1229 lookup,
1219 scmutil.status(
1230 scmutil.status(
1220 modified, added, removed, deleted, unknown, ignored, clean
1231 modified, added, removed, deleted, unknown, ignored, clean
1221 ),
1232 ),
1222 )
1233 )
1223
1234
1224 def matches(self, match):
1235 def matches(self, match):
1225 '''
1236 '''
1226 return files in the dirstate (in whatever state) filtered by match
1237 return files in the dirstate (in whatever state) filtered by match
1227 '''
1238 '''
1228 dmap = self._map
1239 dmap = self._map
1229 if match.always():
1240 if match.always():
1230 return dmap.keys()
1241 return dmap.keys()
1231 files = match.files()
1242 files = match.files()
1232 if match.isexact():
1243 if match.isexact():
1233 # fast path -- filter the other way around, since typically files is
1244 # fast path -- filter the other way around, since typically files is
1234 # much smaller than dmap
1245 # much smaller than dmap
1235 return [f for f in files if f in dmap]
1246 return [f for f in files if f in dmap]
1236 if match.prefix() and all(fn in dmap for fn in files):
1247 if match.prefix() and all(fn in dmap for fn in files):
1237 # fast path -- all the values are known to be files, so just return
1248 # fast path -- all the values are known to be files, so just return
1238 # that
1249 # that
1239 return list(files)
1250 return list(files)
1240 return [f for f in dmap if match(f)]
1251 return [f for f in dmap if match(f)]
1241
1252
1242 def _actualfilename(self, tr):
1253 def _actualfilename(self, tr):
1243 if tr:
1254 if tr:
1244 return self._pendingfilename
1255 return self._pendingfilename
1245 else:
1256 else:
1246 return self._filename
1257 return self._filename
1247
1258
1248 def savebackup(self, tr, backupname):
1259 def savebackup(self, tr, backupname):
1249 '''Save current dirstate into backup file'''
1260 '''Save current dirstate into backup file'''
1250 filename = self._actualfilename(tr)
1261 filename = self._actualfilename(tr)
1251 assert backupname != filename
1262 assert backupname != filename
1252
1263
1253 # use '_writedirstate' instead of 'write' to write changes certainly,
1264 # use '_writedirstate' instead of 'write' to write changes certainly,
1254 # because the latter omits writing out if transaction is running.
1265 # because the latter omits writing out if transaction is running.
1255 # output file will be used to create backup of dirstate at this point.
1266 # output file will be used to create backup of dirstate at this point.
1256 if self._dirty or not self._opener.exists(filename):
1267 if self._dirty or not self._opener.exists(filename):
1257 self._writedirstate(
1268 self._writedirstate(
1258 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1269 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1259 )
1270 )
1260
1271
1261 if tr:
1272 if tr:
1262 # ensure that subsequent tr.writepending returns True for
1273 # ensure that subsequent tr.writepending returns True for
1263 # changes written out above, even if dirstate is never
1274 # changes written out above, even if dirstate is never
1264 # changed after this
1275 # changed after this
1265 tr.addfilegenerator(
1276 tr.addfilegenerator(
1266 b'dirstate',
1277 b'dirstate',
1267 (self._filename,),
1278 (self._filename,),
1268 self._writedirstate,
1279 self._writedirstate,
1269 location=b'plain',
1280 location=b'plain',
1270 )
1281 )
1271
1282
1272 # ensure that pending file written above is unlinked at
1283 # ensure that pending file written above is unlinked at
1273 # failure, even if tr.writepending isn't invoked until the
1284 # failure, even if tr.writepending isn't invoked until the
1274 # end of this transaction
1285 # end of this transaction
1275 tr.registertmp(filename, location=b'plain')
1286 tr.registertmp(filename, location=b'plain')
1276
1287
1277 self._opener.tryunlink(backupname)
1288 self._opener.tryunlink(backupname)
1278 # hardlink backup is okay because _writedirstate is always called
1289 # hardlink backup is okay because _writedirstate is always called
1279 # with an "atomictemp=True" file.
1290 # with an "atomictemp=True" file.
1280 util.copyfile(
1291 util.copyfile(
1281 self._opener.join(filename),
1292 self._opener.join(filename),
1282 self._opener.join(backupname),
1293 self._opener.join(backupname),
1283 hardlink=True,
1294 hardlink=True,
1284 )
1295 )
1285
1296
1286 def restorebackup(self, tr, backupname):
1297 def restorebackup(self, tr, backupname):
1287 '''Restore dirstate by backup file'''
1298 '''Restore dirstate by backup file'''
1288 # this "invalidate()" prevents "wlock.release()" from writing
1299 # this "invalidate()" prevents "wlock.release()" from writing
1289 # changes of dirstate out after restoring from backup file
1300 # changes of dirstate out after restoring from backup file
1290 self.invalidate()
1301 self.invalidate()
1291 filename = self._actualfilename(tr)
1302 filename = self._actualfilename(tr)
1292 o = self._opener
1303 o = self._opener
1293 if util.samefile(o.join(backupname), o.join(filename)):
1304 if util.samefile(o.join(backupname), o.join(filename)):
1294 o.unlink(backupname)
1305 o.unlink(backupname)
1295 else:
1306 else:
1296 o.rename(backupname, filename, checkambig=True)
1307 o.rename(backupname, filename, checkambig=True)
1297
1308
1298 def clearbackup(self, tr, backupname):
1309 def clearbackup(self, tr, backupname):
1299 '''Clear backup file'''
1310 '''Clear backup file'''
1300 self._opener.unlink(backupname)
1311 self._opener.unlink(backupname)
1301
1312
1302
1313
1303 class dirstatemap(object):
1314 class dirstatemap(object):
1304 """Map encapsulating the dirstate's contents.
1315 """Map encapsulating the dirstate's contents.
1305
1316
1306 The dirstate contains the following state:
1317 The dirstate contains the following state:
1307
1318
1308 - `identity` is the identity of the dirstate file, which can be used to
1319 - `identity` is the identity of the dirstate file, which can be used to
1309 detect when changes have occurred to the dirstate file.
1320 detect when changes have occurred to the dirstate file.
1310
1321
1311 - `parents` is a pair containing the parents of the working copy. The
1322 - `parents` is a pair containing the parents of the working copy. The
1312 parents are updated by calling `setparents`.
1323 parents are updated by calling `setparents`.
1313
1324
1314 - the state map maps filenames to tuples of (state, mode, size, mtime),
1325 - the state map maps filenames to tuples of (state, mode, size, mtime),
1315 where state is a single character representing 'normal', 'added',
1326 where state is a single character representing 'normal', 'added',
1316 'removed', or 'merged'. It is read by treating the dirstate as a
1327 'removed', or 'merged'. It is read by treating the dirstate as a
1317 dict. File state is updated by calling the `addfile`, `removefile` and
1328 dict. File state is updated by calling the `addfile`, `removefile` and
1318 `dropfile` methods.
1329 `dropfile` methods.
1319
1330
1320 - `copymap` maps destination filenames to their source filename.
1331 - `copymap` maps destination filenames to their source filename.
1321
1332
1322 The dirstate also provides the following views onto the state:
1333 The dirstate also provides the following views onto the state:
1323
1334
1324 - `nonnormalset` is a set of the filenames that have state other
1335 - `nonnormalset` is a set of the filenames that have state other
1325 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1336 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1326
1337
1327 - `otherparentset` is a set of the filenames that are marked as coming
1338 - `otherparentset` is a set of the filenames that are marked as coming
1328 from the second parent when the dirstate is currently being merged.
1339 from the second parent when the dirstate is currently being merged.
1329
1340
1330 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1341 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1331 form that they appear as in the dirstate.
1342 form that they appear as in the dirstate.
1332
1343
1333 - `dirfoldmap` is a dict mapping normalized directory names to the
1344 - `dirfoldmap` is a dict mapping normalized directory names to the
1334 denormalized form that they appear as in the dirstate.
1345 denormalized form that they appear as in the dirstate.
1335 """
1346 """
1336
1347
1337 def __init__(self, ui, opener, root):
1348 def __init__(self, ui, opener, root):
1338 self._ui = ui
1349 self._ui = ui
1339 self._opener = opener
1350 self._opener = opener
1340 self._root = root
1351 self._root = root
1341 self._filename = b'dirstate'
1352 self._filename = b'dirstate'
1342
1353
1343 self._parents = None
1354 self._parents = None
1344 self._dirtyparents = False
1355 self._dirtyparents = False
1345
1356
1346 # for consistent view between _pl() and _read() invocations
1357 # for consistent view between _pl() and _read() invocations
1347 self._pendingmode = None
1358 self._pendingmode = None
1348
1359
1349 @propertycache
1360 @propertycache
1350 def _map(self):
1361 def _map(self):
1351 self._map = {}
1362 self._map = {}
1352 self.read()
1363 self.read()
1353 return self._map
1364 return self._map
1354
1365
1355 @propertycache
1366 @propertycache
1356 def copymap(self):
1367 def copymap(self):
1357 self.copymap = {}
1368 self.copymap = {}
1358 self._map
1369 self._map
1359 return self.copymap
1370 return self.copymap
1360
1371
1361 def clear(self):
1372 def clear(self):
1362 self._map.clear()
1373 self._map.clear()
1363 self.copymap.clear()
1374 self.copymap.clear()
1364 self.setparents(nullid, nullid)
1375 self.setparents(nullid, nullid)
1365 util.clearcachedproperty(self, b"_dirs")
1376 util.clearcachedproperty(self, b"_dirs")
1366 util.clearcachedproperty(self, b"_alldirs")
1377 util.clearcachedproperty(self, b"_alldirs")
1367 util.clearcachedproperty(self, b"filefoldmap")
1378 util.clearcachedproperty(self, b"filefoldmap")
1368 util.clearcachedproperty(self, b"dirfoldmap")
1379 util.clearcachedproperty(self, b"dirfoldmap")
1369 util.clearcachedproperty(self, b"nonnormalset")
1380 util.clearcachedproperty(self, b"nonnormalset")
1370 util.clearcachedproperty(self, b"otherparentset")
1381 util.clearcachedproperty(self, b"otherparentset")
1371
1382
1372 def items(self):
1383 def items(self):
1373 return pycompat.iteritems(self._map)
1384 return pycompat.iteritems(self._map)
1374
1385
1375 # forward for python2,3 compat
1386 # forward for python2,3 compat
1376 iteritems = items
1387 iteritems = items
1377
1388
1378 def __len__(self):
1389 def __len__(self):
1379 return len(self._map)
1390 return len(self._map)
1380
1391
1381 def __iter__(self):
1392 def __iter__(self):
1382 return iter(self._map)
1393 return iter(self._map)
1383
1394
1384 def get(self, key, default=None):
1395 def get(self, key, default=None):
1385 return self._map.get(key, default)
1396 return self._map.get(key, default)
1386
1397
1387 def __contains__(self, key):
1398 def __contains__(self, key):
1388 return key in self._map
1399 return key in self._map
1389
1400
1390 def __getitem__(self, key):
1401 def __getitem__(self, key):
1391 return self._map[key]
1402 return self._map[key]
1392
1403
1393 def keys(self):
1404 def keys(self):
1394 return self._map.keys()
1405 return self._map.keys()
1395
1406
1396 def preload(self):
1407 def preload(self):
1397 """Loads the underlying data, if it's not already loaded"""
1408 """Loads the underlying data, if it's not already loaded"""
1398 self._map
1409 self._map
1399
1410
1400 def addfile(self, f, oldstate, state, mode, size, mtime):
1411 def addfile(self, f, oldstate, state, mode, size, mtime):
1401 """Add a tracked file to the dirstate."""
1412 """Add a tracked file to the dirstate."""
1402 if oldstate in b"?r" and "_dirs" in self.__dict__:
1413 if oldstate in b"?r" and "_dirs" in self.__dict__:
1403 self._dirs.addpath(f)
1414 self._dirs.addpath(f)
1404 if oldstate == b"?" and "_alldirs" in self.__dict__:
1415 if oldstate == b"?" and "_alldirs" in self.__dict__:
1405 self._alldirs.addpath(f)
1416 self._alldirs.addpath(f)
1406 self._map[f] = dirstatetuple(state, mode, size, mtime)
1417 self._map[f] = dirstatetuple(state, mode, size, mtime)
1407 if state != b'n' or mtime == -1:
1418 if state != b'n' or mtime == -1:
1408 self.nonnormalset.add(f)
1419 self.nonnormalset.add(f)
1409 if size == -2:
1420 if size == -2:
1410 self.otherparentset.add(f)
1421 self.otherparentset.add(f)
1411
1422
1412 def removefile(self, f, oldstate, size):
1423 def removefile(self, f, oldstate, size):
1413 """
1424 """
1414 Mark a file as removed in the dirstate.
1425 Mark a file as removed in the dirstate.
1415
1426
1416 The `size` parameter is used to store sentinel values that indicate
1427 The `size` parameter is used to store sentinel values that indicate
1417 the file's previous state. In the future, we should refactor this
1428 the file's previous state. In the future, we should refactor this
1418 to be more explicit about what that state is.
1429 to be more explicit about what that state is.
1419 """
1430 """
1420 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1431 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1421 self._dirs.delpath(f)
1432 self._dirs.delpath(f)
1422 if oldstate == b"?" and "_alldirs" in self.__dict__:
1433 if oldstate == b"?" and "_alldirs" in self.__dict__:
1423 self._alldirs.addpath(f)
1434 self._alldirs.addpath(f)
1424 if "filefoldmap" in self.__dict__:
1435 if "filefoldmap" in self.__dict__:
1425 normed = util.normcase(f)
1436 normed = util.normcase(f)
1426 self.filefoldmap.pop(normed, None)
1437 self.filefoldmap.pop(normed, None)
1427 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1438 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1428 self.nonnormalset.add(f)
1439 self.nonnormalset.add(f)
1429
1440
1430 def dropfile(self, f, oldstate):
1441 def dropfile(self, f, oldstate):
1431 """
1442 """
1432 Remove a file from the dirstate. Returns True if the file was
1443 Remove a file from the dirstate. Returns True if the file was
1433 previously recorded.
1444 previously recorded.
1434 """
1445 """
1435 exists = self._map.pop(f, None) is not None
1446 exists = self._map.pop(f, None) is not None
1436 if exists:
1447 if exists:
1437 if oldstate != b"r" and "_dirs" in self.__dict__:
1448 if oldstate != b"r" and "_dirs" in self.__dict__:
1438 self._dirs.delpath(f)
1449 self._dirs.delpath(f)
1439 if "_alldirs" in self.__dict__:
1450 if "_alldirs" in self.__dict__:
1440 self._alldirs.delpath(f)
1451 self._alldirs.delpath(f)
1441 if "filefoldmap" in self.__dict__:
1452 if "filefoldmap" in self.__dict__:
1442 normed = util.normcase(f)
1453 normed = util.normcase(f)
1443 self.filefoldmap.pop(normed, None)
1454 self.filefoldmap.pop(normed, None)
1444 self.nonnormalset.discard(f)
1455 self.nonnormalset.discard(f)
1445 return exists
1456 return exists
1446
1457
1447 def clearambiguoustimes(self, files, now):
1458 def clearambiguoustimes(self, files, now):
1448 for f in files:
1459 for f in files:
1449 e = self.get(f)
1460 e = self.get(f)
1450 if e is not None and e[0] == b'n' and e[3] == now:
1461 if e is not None and e[0] == b'n' and e[3] == now:
1451 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1462 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1452 self.nonnormalset.add(f)
1463 self.nonnormalset.add(f)
1453
1464
1454 def nonnormalentries(self):
1465 def nonnormalentries(self):
1455 '''Compute the nonnormal dirstate entries from the dmap'''
1466 '''Compute the nonnormal dirstate entries from the dmap'''
1456 try:
1467 try:
1457 return parsers.nonnormalotherparententries(self._map)
1468 return parsers.nonnormalotherparententries(self._map)
1458 except AttributeError:
1469 except AttributeError:
1459 nonnorm = set()
1470 nonnorm = set()
1460 otherparent = set()
1471 otherparent = set()
1461 for fname, e in pycompat.iteritems(self._map):
1472 for fname, e in pycompat.iteritems(self._map):
1462 if e[0] != b'n' or e[3] == -1:
1473 if e[0] != b'n' or e[3] == -1:
1463 nonnorm.add(fname)
1474 nonnorm.add(fname)
1464 if e[0] == b'n' and e[2] == -2:
1475 if e[0] == b'n' and e[2] == -2:
1465 otherparent.add(fname)
1476 otherparent.add(fname)
1466 return nonnorm, otherparent
1477 return nonnorm, otherparent
1467
1478
1468 @propertycache
1479 @propertycache
1469 def filefoldmap(self):
1480 def filefoldmap(self):
1470 """Returns a dictionary mapping normalized case paths to their
1481 """Returns a dictionary mapping normalized case paths to their
1471 non-normalized versions.
1482 non-normalized versions.
1472 """
1483 """
1473 try:
1484 try:
1474 makefilefoldmap = parsers.make_file_foldmap
1485 makefilefoldmap = parsers.make_file_foldmap
1475 except AttributeError:
1486 except AttributeError:
1476 pass
1487 pass
1477 else:
1488 else:
1478 return makefilefoldmap(
1489 return makefilefoldmap(
1479 self._map, util.normcasespec, util.normcasefallback
1490 self._map, util.normcasespec, util.normcasefallback
1480 )
1491 )
1481
1492
1482 f = {}
1493 f = {}
1483 normcase = util.normcase
1494 normcase = util.normcase
1484 for name, s in pycompat.iteritems(self._map):
1495 for name, s in pycompat.iteritems(self._map):
1485 if s[0] != b'r':
1496 if s[0] != b'r':
1486 f[normcase(name)] = name
1497 f[normcase(name)] = name
1487 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1498 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1488 return f
1499 return f
1489
1500
1490 def hastrackeddir(self, d):
1501 def hastrackeddir(self, d):
1491 """
1502 """
1492 Returns True if the dirstate contains a tracked (not removed) file
1503 Returns True if the dirstate contains a tracked (not removed) file
1493 in this directory.
1504 in this directory.
1494 """
1505 """
1495 return d in self._dirs
1506 return d in self._dirs
1496
1507
1497 def hasdir(self, d):
1508 def hasdir(self, d):
1498 """
1509 """
1499 Returns True if the dirstate contains a file (tracked or removed)
1510 Returns True if the dirstate contains a file (tracked or removed)
1500 in this directory.
1511 in this directory.
1501 """
1512 """
1502 return d in self._alldirs
1513 return d in self._alldirs
1503
1514
1504 @propertycache
1515 @propertycache
1505 def _dirs(self):
1516 def _dirs(self):
1506 return util.dirs(self._map, b'r')
1517 return util.dirs(self._map, b'r')
1507
1518
1508 @propertycache
1519 @propertycache
1509 def _alldirs(self):
1520 def _alldirs(self):
1510 return util.dirs(self._map)
1521 return util.dirs(self._map)
1511
1522
1512 def _opendirstatefile(self):
1523 def _opendirstatefile(self):
1513 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1524 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1514 if self._pendingmode is not None and self._pendingmode != mode:
1525 if self._pendingmode is not None and self._pendingmode != mode:
1515 fp.close()
1526 fp.close()
1516 raise error.Abort(
1527 raise error.Abort(
1517 _(b'working directory state may be changed parallelly')
1528 _(b'working directory state may be changed parallelly')
1518 )
1529 )
1519 self._pendingmode = mode
1530 self._pendingmode = mode
1520 return fp
1531 return fp
1521
1532
1522 def parents(self):
1533 def parents(self):
1523 if not self._parents:
1534 if not self._parents:
1524 try:
1535 try:
1525 fp = self._opendirstatefile()
1536 fp = self._opendirstatefile()
1526 st = fp.read(40)
1537 st = fp.read(40)
1527 fp.close()
1538 fp.close()
1528 except IOError as err:
1539 except IOError as err:
1529 if err.errno != errno.ENOENT:
1540 if err.errno != errno.ENOENT:
1530 raise
1541 raise
1531 # File doesn't exist, so the current state is empty
1542 # File doesn't exist, so the current state is empty
1532 st = b''
1543 st = b''
1533
1544
1534 l = len(st)
1545 l = len(st)
1535 if l == 40:
1546 if l == 40:
1536 self._parents = (st[:20], st[20:40])
1547 self._parents = (st[:20], st[20:40])
1537 elif l == 0:
1548 elif l == 0:
1538 self._parents = (nullid, nullid)
1549 self._parents = (nullid, nullid)
1539 else:
1550 else:
1540 raise error.Abort(
1551 raise error.Abort(
1541 _(b'working directory state appears damaged!')
1552 _(b'working directory state appears damaged!')
1542 )
1553 )
1543
1554
1544 return self._parents
1555 return self._parents
1545
1556
1546 def setparents(self, p1, p2):
1557 def setparents(self, p1, p2):
1547 self._parents = (p1, p2)
1558 self._parents = (p1, p2)
1548 self._dirtyparents = True
1559 self._dirtyparents = True
1549
1560
1550 def read(self):
1561 def read(self):
1551 # ignore HG_PENDING because identity is used only for writing
1562 # ignore HG_PENDING because identity is used only for writing
1552 self.identity = util.filestat.frompath(
1563 self.identity = util.filestat.frompath(
1553 self._opener.join(self._filename)
1564 self._opener.join(self._filename)
1554 )
1565 )
1555
1566
1556 try:
1567 try:
1557 fp = self._opendirstatefile()
1568 fp = self._opendirstatefile()
1558 try:
1569 try:
1559 st = fp.read()
1570 st = fp.read()
1560 finally:
1571 finally:
1561 fp.close()
1572 fp.close()
1562 except IOError as err:
1573 except IOError as err:
1563 if err.errno != errno.ENOENT:
1574 if err.errno != errno.ENOENT:
1564 raise
1575 raise
1565 return
1576 return
1566 if not st:
1577 if not st:
1567 return
1578 return
1568
1579
1569 if util.safehasattr(parsers, b'dict_new_presized'):
1580 if util.safehasattr(parsers, b'dict_new_presized'):
1570 # Make an estimate of the number of files in the dirstate based on
1581 # Make an estimate of the number of files in the dirstate based on
1571 # its size. From a linear regression on a set of real-world repos,
1582 # its size. From a linear regression on a set of real-world repos,
1572 # all over 10,000 files, the size of a dirstate entry is 85
1583 # all over 10,000 files, the size of a dirstate entry is 85
1573 # bytes. The cost of resizing is significantly higher than the cost
1584 # bytes. The cost of resizing is significantly higher than the cost
1574 # of filling in a larger presized dict, so subtract 20% from the
1585 # of filling in a larger presized dict, so subtract 20% from the
1575 # size.
1586 # size.
1576 #
1587 #
1577 # This heuristic is imperfect in many ways, so in a future dirstate
1588 # This heuristic is imperfect in many ways, so in a future dirstate
1578 # format update it makes sense to just record the number of entries
1589 # format update it makes sense to just record the number of entries
1579 # on write.
1590 # on write.
1580 self._map = parsers.dict_new_presized(len(st) // 71)
1591 self._map = parsers.dict_new_presized(len(st) // 71)
1581
1592
1582 # Python's garbage collector triggers a GC each time a certain number
1593 # Python's garbage collector triggers a GC each time a certain number
1583 # of container objects (the number being defined by
1594 # of container objects (the number being defined by
1584 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1595 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1585 # for each file in the dirstate. The C version then immediately marks
1596 # for each file in the dirstate. The C version then immediately marks
1586 # them as not to be tracked by the collector. However, this has no
1597 # them as not to be tracked by the collector. However, this has no
1587 # effect on when GCs are triggered, only on what objects the GC looks
1598 # effect on when GCs are triggered, only on what objects the GC looks
1588 # into. This means that O(number of files) GCs are unavoidable.
1599 # into. This means that O(number of files) GCs are unavoidable.
1589 # Depending on when in the process's lifetime the dirstate is parsed,
1600 # Depending on when in the process's lifetime the dirstate is parsed,
1590 # this can get very expensive. As a workaround, disable GC while
1601 # this can get very expensive. As a workaround, disable GC while
1591 # parsing the dirstate.
1602 # parsing the dirstate.
1592 #
1603 #
1593 # (we cannot decorate the function directly since it is in a C module)
1604 # (we cannot decorate the function directly since it is in a C module)
1594 parse_dirstate = util.nogc(parsers.parse_dirstate)
1605 parse_dirstate = util.nogc(parsers.parse_dirstate)
1595 p = parse_dirstate(self._map, self.copymap, st)
1606 p = parse_dirstate(self._map, self.copymap, st)
1596 if not self._dirtyparents:
1607 if not self._dirtyparents:
1597 self.setparents(*p)
1608 self.setparents(*p)
1598
1609
1599 # Avoid excess attribute lookups by fast pathing certain checks
1610 # Avoid excess attribute lookups by fast pathing certain checks
1600 self.__contains__ = self._map.__contains__
1611 self.__contains__ = self._map.__contains__
1601 self.__getitem__ = self._map.__getitem__
1612 self.__getitem__ = self._map.__getitem__
1602 self.get = self._map.get
1613 self.get = self._map.get
1603
1614
1604 def write(self, st, now):
1615 def write(self, st, now):
1605 st.write(
1616 st.write(
1606 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1617 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1607 )
1618 )
1608 st.close()
1619 st.close()
1609 self._dirtyparents = False
1620 self._dirtyparents = False
1610 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1621 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1611
1622
1612 @propertycache
1623 @propertycache
1613 def nonnormalset(self):
1624 def nonnormalset(self):
1614 nonnorm, otherparents = self.nonnormalentries()
1625 nonnorm, otherparents = self.nonnormalentries()
1615 self.otherparentset = otherparents
1626 self.otherparentset = otherparents
1616 return nonnorm
1627 return nonnorm
1617
1628
1618 @propertycache
1629 @propertycache
1619 def otherparentset(self):
1630 def otherparentset(self):
1620 nonnorm, otherparents = self.nonnormalentries()
1631 nonnorm, otherparents = self.nonnormalentries()
1621 self.nonnormalset = nonnorm
1632 self.nonnormalset = nonnorm
1622 return otherparents
1633 return otherparents
1623
1634
1624 @propertycache
1635 @propertycache
1625 def identity(self):
1636 def identity(self):
1626 self._map
1637 self._map
1627 return self.identity
1638 return self.identity
1628
1639
1629 @propertycache
1640 @propertycache
1630 def dirfoldmap(self):
1641 def dirfoldmap(self):
1631 f = {}
1642 f = {}
1632 normcase = util.normcase
1643 normcase = util.normcase
1633 for name in self._dirs:
1644 for name in self._dirs:
1634 f[normcase(name)] = name
1645 f[normcase(name)] = name
1635 return f
1646 return f
1636
1647
1637
1648
1638 if rustmod is not None:
1649 if rustmod is not None:
1639
1650
1640 class dirstatemap(object):
1651 class dirstatemap(object):
1641 def __init__(self, ui, opener, root):
1652 def __init__(self, ui, opener, root):
1642 self._ui = ui
1653 self._ui = ui
1643 self._opener = opener
1654 self._opener = opener
1644 self._root = root
1655 self._root = root
1645 self._filename = b'dirstate'
1656 self._filename = b'dirstate'
1646 self._parents = None
1657 self._parents = None
1647 self._dirtyparents = False
1658 self._dirtyparents = False
1648
1659
1649 # for consistent view between _pl() and _read() invocations
1660 # for consistent view between _pl() and _read() invocations
1650 self._pendingmode = None
1661 self._pendingmode = None
1651
1662
1652 def addfile(self, *args, **kwargs):
1663 def addfile(self, *args, **kwargs):
1653 return self._rustmap.addfile(*args, **kwargs)
1664 return self._rustmap.addfile(*args, **kwargs)
1654
1665
1655 def removefile(self, *args, **kwargs):
1666 def removefile(self, *args, **kwargs):
1656 return self._rustmap.removefile(*args, **kwargs)
1667 return self._rustmap.removefile(*args, **kwargs)
1657
1668
1658 def dropfile(self, *args, **kwargs):
1669 def dropfile(self, *args, **kwargs):
1659 return self._rustmap.dropfile(*args, **kwargs)
1670 return self._rustmap.dropfile(*args, **kwargs)
1660
1671
1661 def clearambiguoustimes(self, *args, **kwargs):
1672 def clearambiguoustimes(self, *args, **kwargs):
1662 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1673 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1663
1674
1664 def nonnormalentries(self):
1675 def nonnormalentries(self):
1665 return self._rustmap.nonnormalentries()
1676 return self._rustmap.nonnormalentries()
1666
1677
1667 def get(self, *args, **kwargs):
1678 def get(self, *args, **kwargs):
1668 return self._rustmap.get(*args, **kwargs)
1679 return self._rustmap.get(*args, **kwargs)
1669
1680
1670 @propertycache
1681 @propertycache
1671 def _rustmap(self):
1682 def _rustmap(self):
1672 self._rustmap = rustmod.DirstateMap(self._root)
1683 self._rustmap = rustmod.DirstateMap(self._root)
1673 self.read()
1684 self.read()
1674 return self._rustmap
1685 return self._rustmap
1675
1686
1676 @property
1687 @property
1677 def copymap(self):
1688 def copymap(self):
1678 return self._rustmap.copymap()
1689 return self._rustmap.copymap()
1679
1690
1680 def preload(self):
1691 def preload(self):
1681 self._rustmap
1692 self._rustmap
1682
1693
1683 def clear(self):
1694 def clear(self):
1684 self._rustmap.clear()
1695 self._rustmap.clear()
1685 self.setparents(nullid, nullid)
1696 self.setparents(nullid, nullid)
1686 util.clearcachedproperty(self, b"_dirs")
1697 util.clearcachedproperty(self, b"_dirs")
1687 util.clearcachedproperty(self, b"_alldirs")
1698 util.clearcachedproperty(self, b"_alldirs")
1688 util.clearcachedproperty(self, b"dirfoldmap")
1699 util.clearcachedproperty(self, b"dirfoldmap")
1689
1700
1690 def items(self):
1701 def items(self):
1691 return self._rustmap.items()
1702 return self._rustmap.items()
1692
1703
1693 def keys(self):
1704 def keys(self):
1694 return iter(self._rustmap)
1705 return iter(self._rustmap)
1695
1706
1696 def __contains__(self, key):
1707 def __contains__(self, key):
1697 return key in self._rustmap
1708 return key in self._rustmap
1698
1709
1699 def __getitem__(self, item):
1710 def __getitem__(self, item):
1700 return self._rustmap[item]
1711 return self._rustmap[item]
1701
1712
1702 def __len__(self):
1713 def __len__(self):
1703 return len(self._rustmap)
1714 return len(self._rustmap)
1704
1715
1705 def __iter__(self):
1716 def __iter__(self):
1706 return iter(self._rustmap)
1717 return iter(self._rustmap)
1707
1718
1708 # forward for python2,3 compat
1719 # forward for python2,3 compat
1709 iteritems = items
1720 iteritems = items
1710
1721
1711 def _opendirstatefile(self):
1722 def _opendirstatefile(self):
1712 fp, mode = txnutil.trypending(
1723 fp, mode = txnutil.trypending(
1713 self._root, self._opener, self._filename
1724 self._root, self._opener, self._filename
1714 )
1725 )
1715 if self._pendingmode is not None and self._pendingmode != mode:
1726 if self._pendingmode is not None and self._pendingmode != mode:
1716 fp.close()
1727 fp.close()
1717 raise error.Abort(
1728 raise error.Abort(
1718 _(b'working directory state may be changed parallelly')
1729 _(b'working directory state may be changed parallelly')
1719 )
1730 )
1720 self._pendingmode = mode
1731 self._pendingmode = mode
1721 return fp
1732 return fp
1722
1733
1723 def setparents(self, p1, p2):
1734 def setparents(self, p1, p2):
1724 self._rustmap.setparents(p1, p2)
1735 self._rustmap.setparents(p1, p2)
1725 self._parents = (p1, p2)
1736 self._parents = (p1, p2)
1726 self._dirtyparents = True
1737 self._dirtyparents = True
1727
1738
1728 def parents(self):
1739 def parents(self):
1729 if not self._parents:
1740 if not self._parents:
1730 try:
1741 try:
1731 fp = self._opendirstatefile()
1742 fp = self._opendirstatefile()
1732 st = fp.read(40)
1743 st = fp.read(40)
1733 fp.close()
1744 fp.close()
1734 except IOError as err:
1745 except IOError as err:
1735 if err.errno != errno.ENOENT:
1746 if err.errno != errno.ENOENT:
1736 raise
1747 raise
1737 # File doesn't exist, so the current state is empty
1748 # File doesn't exist, so the current state is empty
1738 st = b''
1749 st = b''
1739
1750
1740 try:
1751 try:
1741 self._parents = self._rustmap.parents(st)
1752 self._parents = self._rustmap.parents(st)
1742 except ValueError:
1753 except ValueError:
1743 raise error.Abort(
1754 raise error.Abort(
1744 _(b'working directory state appears damaged!')
1755 _(b'working directory state appears damaged!')
1745 )
1756 )
1746
1757
1747 return self._parents
1758 return self._parents
1748
1759
1749 def read(self):
1760 def read(self):
1750 # ignore HG_PENDING because identity is used only for writing
1761 # ignore HG_PENDING because identity is used only for writing
1751 self.identity = util.filestat.frompath(
1762 self.identity = util.filestat.frompath(
1752 self._opener.join(self._filename)
1763 self._opener.join(self._filename)
1753 )
1764 )
1754
1765
1755 try:
1766 try:
1756 fp = self._opendirstatefile()
1767 fp = self._opendirstatefile()
1757 try:
1768 try:
1758 st = fp.read()
1769 st = fp.read()
1759 finally:
1770 finally:
1760 fp.close()
1771 fp.close()
1761 except IOError as err:
1772 except IOError as err:
1762 if err.errno != errno.ENOENT:
1773 if err.errno != errno.ENOENT:
1763 raise
1774 raise
1764 return
1775 return
1765 if not st:
1776 if not st:
1766 return
1777 return
1767
1778
1768 parse_dirstate = util.nogc(self._rustmap.read)
1779 parse_dirstate = util.nogc(self._rustmap.read)
1769 parents = parse_dirstate(st)
1780 parents = parse_dirstate(st)
1770 if parents and not self._dirtyparents:
1781 if parents and not self._dirtyparents:
1771 self.setparents(*parents)
1782 self.setparents(*parents)
1772
1783
1773 self.__contains__ = self._rustmap.__contains__
1784 self.__contains__ = self._rustmap.__contains__
1774 self.__getitem__ = self._rustmap.__getitem__
1785 self.__getitem__ = self._rustmap.__getitem__
1775 self.get = self._rustmap.get
1786 self.get = self._rustmap.get
1776
1787
1777 def write(self, st, now):
1788 def write(self, st, now):
1778 parents = self.parents()
1789 parents = self.parents()
1779 st.write(self._rustmap.write(parents[0], parents[1], now))
1790 st.write(self._rustmap.write(parents[0], parents[1], now))
1780 st.close()
1791 st.close()
1781 self._dirtyparents = False
1792 self._dirtyparents = False
1782
1793
1783 @propertycache
1794 @propertycache
1784 def filefoldmap(self):
1795 def filefoldmap(self):
1785 """Returns a dictionary mapping normalized case paths to their
1796 """Returns a dictionary mapping normalized case paths to their
1786 non-normalized versions.
1797 non-normalized versions.
1787 """
1798 """
1788 return self._rustmap.filefoldmapasdict()
1799 return self._rustmap.filefoldmapasdict()
1789
1800
1790 def hastrackeddir(self, d):
1801 def hastrackeddir(self, d):
1791 self._dirs # Trigger Python's propertycache
1802 self._dirs # Trigger Python's propertycache
1792 return self._rustmap.hastrackeddir(d)
1803 return self._rustmap.hastrackeddir(d)
1793
1804
1794 def hasdir(self, d):
1805 def hasdir(self, d):
1795 self._dirs # Trigger Python's propertycache
1806 self._dirs # Trigger Python's propertycache
1796 return self._rustmap.hasdir(d)
1807 return self._rustmap.hasdir(d)
1797
1808
1798 @propertycache
1809 @propertycache
1799 def _dirs(self):
1810 def _dirs(self):
1800 return self._rustmap.getdirs()
1811 return self._rustmap.getdirs()
1801
1812
1802 @propertycache
1813 @propertycache
1803 def _alldirs(self):
1814 def _alldirs(self):
1804 return self._rustmap.getalldirs()
1815 return self._rustmap.getalldirs()
1805
1816
1806 @propertycache
1817 @propertycache
1807 def identity(self):
1818 def identity(self):
1808 self._rustmap
1819 self._rustmap
1809 return self.identity
1820 return self.identity
1810
1821
1811 @property
1822 @property
1812 def nonnormalset(self):
1823 def nonnormalset(self):
1813 nonnorm, otherparents = self._rustmap.nonnormalentries()
1824 nonnorm, otherparents = self._rustmap.nonnormalentries()
1814 return nonnorm
1825 return nonnorm
1815
1826
1816 @property
1827 @property
1817 def otherparentset(self):
1828 def otherparentset(self):
1818 nonnorm, otherparents = self._rustmap.nonnormalentries()
1829 nonnorm, otherparents = self._rustmap.nonnormalentries()
1819 return otherparents
1830 return otherparents
1820
1831
1821 @propertycache
1832 @propertycache
1822 def dirfoldmap(self):
1833 def dirfoldmap(self):
1823 f = {}
1834 f = {}
1824 normcase = util.normcase
1835 normcase = util.normcase
1825 for name in self._dirs:
1836 for name in self._dirs:
1826 f[normcase(name)] = name
1837 f[normcase(name)] = name
1827 return f
1838 return f
General Comments 0
You need to be logged in to leave comments. Login now