##// END OF EJS Templates
rust-threads: force Rayon to respect the worker count in config...
Raphaël Gomès -
r44316:37cbf45b stable
parent child Browse files
Show More
@@ -1,1835 +1,1846 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod(r'parsers')
39 parsers = policy.importmod(r'parsers')
40 rustmod = policy.importrust(r'dirstate')
40 rustmod = policy.importrust(r'dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
76 '''Create a new dirstate object.
77
77
78 opener is an open()-like callable that can be used to open the
78 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
79 dirstate file; root is the root of the directory tracked by
80 the dirstate.
80 the dirstate.
81 '''
81 '''
82 self._opener = opener
82 self._opener = opener
83 self._validate = validate
83 self._validate = validate
84 self._root = root
84 self._root = root
85 self._sparsematchfn = sparsematchfn
85 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
87 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
88 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
89 self._dirty = False
90 self._lastnormaltime = 0
90 self._lastnormaltime = 0
91 self._ui = ui
91 self._ui = ui
92 self._filecache = {}
92 self._filecache = {}
93 self._parentwriters = 0
93 self._parentwriters = 0
94 self._filename = b'dirstate'
94 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
97 self._origpl = None
97 self._origpl = None
98 self._updatedfiles = set()
98 self._updatedfiles = set()
99 self._mapcls = dirstatemap
99 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
100 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
101 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
102 # raises an exception).
103 self._cwd
103 self._cwd
104
104
105 @contextlib.contextmanager
105 @contextlib.contextmanager
106 def parentchange(self):
106 def parentchange(self):
107 '''Context manager for handling dirstate parents.
107 '''Context manager for handling dirstate parents.
108
108
109 If an exception occurs in the scope of the context manager,
109 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
110 the incoherent dirstate won't be written when wlock is
111 released.
111 released.
112 '''
112 '''
113 self._parentwriters += 1
113 self._parentwriters += 1
114 yield
114 yield
115 # Typically we want the "undo" step of a context manager in a
115 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
116 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
117 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
118 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
119 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache(b'branch')
147 @repocache(b'branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read(b"branch").strip() or b"default"
150 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return b"default"
154 return b"default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def hasdir(self, d):
160 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
161 return self._map.hastrackeddir(d)
162
162
163 @rootcache(b'.hgignore')
163 @rootcache(b'.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never()
167 return matchmod.never()
168
168
169 pats = [b'include:%s' % f for f in files]
169 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
186 return not util.fscasesensitive(self._join(b'.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195
195
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return b'l'
200 return b'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return b'x'
202 return b'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return b''
205 return b''
206
206
207 return f
207 return f
208
208
209 fallback = buildfallback()
209 fallback = buildfallback()
210 if self._checklink:
210 if self._checklink:
211
211
212 def f(x):
212 def f(x):
213 if os.path.islink(self._join(x)):
213 if os.path.islink(self._join(x)):
214 return b'l'
214 return b'l'
215 if b'x' in fallback(x):
215 if b'x' in fallback(x):
216 return b'x'
216 return b'x'
217 return b''
217 return b''
218
218
219 return f
219 return f
220 if self._checkexec:
220 if self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 if b'l' in fallback(x):
223 if b'l' in fallback(x):
224 return b'l'
224 return b'l'
225 if util.isexec(self._join(x)):
225 if util.isexec(self._join(x)):
226 return b'x'
226 return b'x'
227 return b''
227 return b''
228
228
229 return f
229 return f
230 else:
230 else:
231 return fallback
231 return fallback
232
232
233 @propertycache
233 @propertycache
234 def _cwd(self):
234 def _cwd(self):
235 # internal config: ui.forcecwd
235 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
237 if forcecwd:
238 return forcecwd
238 return forcecwd
239 return encoding.getcwd()
239 return encoding.getcwd()
240
240
241 def getcwd(self):
241 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
242 '''Return the path from which a canonical path is calculated.
243
243
244 This path should be used to resolve file patterns or to convert
244 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
245 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
246 used to get real file paths. Use vfs functions instead.
247 '''
247 '''
248 cwd = self._cwd
248 cwd = self._cwd
249 if cwd == self._root:
249 if cwd == self._root:
250 return b''
250 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
252 rootsep = self._root
253 if not util.endswithsep(rootsep):
253 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
254 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
255 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
256 return cwd[len(rootsep) :]
257 else:
257 else:
258 # we're outside the repo. return an absolute path.
258 # we're outside the repo. return an absolute path.
259 return cwd
259 return cwd
260
260
261 def pathto(self, f, cwd=None):
261 def pathto(self, f, cwd=None):
262 if cwd is None:
262 if cwd is None:
263 cwd = self.getcwd()
263 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
264 path = util.pathto(self._root, cwd, f)
265 if self._slash:
265 if self._slash:
266 return util.pconvert(path)
266 return util.pconvert(path)
267 return path
267 return path
268
268
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
270 '''Return the current state of key (a filename) in the dirstate.
271
271
272 States are:
272 States are:
273 n normal
273 n normal
274 m needs merging
274 m needs merging
275 r marked for removal
275 r marked for removal
276 a marked for addition
276 a marked for addition
277 ? not tracked
277 ? not tracked
278 '''
278 '''
279 return self._map.get(key, (b"?",))[0]
279 return self._map.get(key, (b"?",))[0]
280
280
281 def __contains__(self, key):
281 def __contains__(self, key):
282 return key in self._map
282 return key in self._map
283
283
284 def __iter__(self):
284 def __iter__(self):
285 return iter(sorted(self._map))
285 return iter(sorted(self._map))
286
286
287 def items(self):
287 def items(self):
288 return pycompat.iteritems(self._map)
288 return pycompat.iteritems(self._map)
289
289
290 iteritems = items
290 iteritems = items
291
291
292 def parents(self):
292 def parents(self):
293 return [self._validate(p) for p in self._pl]
293 return [self._validate(p) for p in self._pl]
294
294
295 def p1(self):
295 def p1(self):
296 return self._validate(self._pl[0])
296 return self._validate(self._pl[0])
297
297
298 def p2(self):
298 def p2(self):
299 return self._validate(self._pl[1])
299 return self._validate(self._pl[1])
300
300
301 def branch(self):
301 def branch(self):
302 return encoding.tolocal(self._branch)
302 return encoding.tolocal(self._branch)
303
303
304 def setparents(self, p1, p2=nullid):
304 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
305 """Set dirstate parents to p1 and p2.
306
306
307 When moving from two parents to one, 'm' merged entries a
307 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
308 adjusted to normal and previous copy records discarded and
309 returned by the call.
309 returned by the call.
310
310
311 See localrepo.setparents()
311 See localrepo.setparents()
312 """
312 """
313 if self._parentwriters == 0:
313 if self._parentwriters == 0:
314 raise ValueError(
314 raise ValueError(
315 b"cannot set dirstate parent outside of "
315 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
316 b"dirstate.parentchange context manager"
317 )
317 )
318
318
319 self._dirty = True
319 self._dirty = True
320 oldp2 = self._pl[1]
320 oldp2 = self._pl[1]
321 if self._origpl is None:
321 if self._origpl is None:
322 self._origpl = self._pl
322 self._origpl = self._pl
323 self._map.setparents(p1, p2)
323 self._map.setparents(p1, p2)
324 copies = {}
324 copies = {}
325 if oldp2 != nullid and p2 == nullid:
325 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
326 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
327 self._map.otherparentset
328 )
328 )
329 for f in candidatefiles:
329 for f in candidatefiles:
330 s = self._map.get(f)
330 s = self._map.get(f)
331 if s is None:
331 if s is None:
332 continue
332 continue
333
333
334 # Discard 'm' markers when moving away from a merge state
334 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
335 if s[0] == b'm':
336 source = self._map.copymap.get(f)
336 source = self._map.copymap.get(f)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 self.normallookup(f)
339 self.normallookup(f)
340 # Also fix up otherparent markers
340 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
341 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
342 source = self._map.copymap.get(f)
343 if source:
343 if source:
344 copies[f] = source
344 copies[f] = source
345 self.add(f)
345 self.add(f)
346 return copies
346 return copies
347
347
348 def setbranch(self, branch):
348 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
351 try:
352 f.write(self._branch + b'\n')
352 f.write(self._branch + b'\n')
353 f.close()
353 f.close()
354
354
355 # make sure filecache has the correct stat info for _branch after
355 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
356 # replacing the underlying file
357 ce = self._filecache[b'_branch']
357 ce = self._filecache[b'_branch']
358 if ce:
358 if ce:
359 ce.refresh()
359 ce.refresh()
360 except: # re-raises
360 except: # re-raises
361 f.discard()
361 f.discard()
362 raise
362 raise
363
363
364 def invalidate(self):
364 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
365 '''Causes the next access to reread the dirstate.
366
366
367 This is different from localrepo.invalidatedirstate() because it always
367 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
369 check whether the dirstate has changed before rereading it.'''
370
370
371 for a in (r"_map", r"_branch", r"_ignore"):
371 for a in (r"_map", r"_branch", r"_ignore"):
372 if a in self.__dict__:
372 if a in self.__dict__:
373 delattr(self, a)
373 delattr(self, a)
374 self._lastnormaltime = 0
374 self._lastnormaltime = 0
375 self._dirty = False
375 self._dirty = False
376 self._updatedfiles.clear()
376 self._updatedfiles.clear()
377 self._parentwriters = 0
377 self._parentwriters = 0
378 self._origpl = None
378 self._origpl = None
379
379
380 def copy(self, source, dest):
380 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
382 if source == dest:
383 return
383 return
384 self._dirty = True
384 self._dirty = True
385 if source is not None:
385 if source is not None:
386 self._map.copymap[dest] = source
386 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
388 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
389 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
390 self._updatedfiles.add(dest)
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self._map.copymap.get(file, None)
393 return self._map.copymap.get(file, None)
394
394
395 def copies(self):
395 def copies(self):
396 return self._map.copymap
396 return self._map.copymap
397
397
398 def _addpath(self, f, state, mode, size, mtime):
398 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
399 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
400 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
401 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
402 if self._map.hastrackeddir(f):
403 raise error.Abort(
403 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
405 )
406 # shadows
406 # shadows
407 for d in util.finddirs(f):
407 for d in util.finddirs(f):
408 if self._map.hastrackeddir(d):
408 if self._map.hastrackeddir(d):
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
411 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
412 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
413 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
415 )
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
419
420 def normal(self, f, parentfiledata=None):
420 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
421 '''Mark a file normal and clean.
422
422
423 parentfiledata: (mode, size, mtime) of the clean file
423 parentfiledata: (mode, size, mtime) of the clean file
424
424
425 parentfiledata should be computed from memory (for mode,
425 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
426 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
427 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
428 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
429 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
430 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
431 (mode, size, mtime) = parentfiledata
432 else:
432 else:
433 s = os.lstat(self._join(f))
433 s = os.lstat(self._join(f))
434 mode = s.st_mode
434 mode = s.st_mode
435 size = s.st_size
435 size = s.st_size
436 mtime = s[stat.ST_MTIME]
436 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
438 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
439 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
440 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
441 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
442 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
443 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
444 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
445 self._lastnormaltime = mtime
446
446
447 def normallookup(self, f):
447 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
448 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
450 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
452 # being removed, restore that state.
453 entry = self._map.get(f)
453 entry = self._map.get(f)
454 if entry is not None:
454 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
456 source = self._map.copymap.get(f)
457 if entry[2] == -1:
457 if entry[2] == -1:
458 self.merge(f)
458 self.merge(f)
459 elif entry[2] == -2:
459 elif entry[2] == -2:
460 self.otherparent(f)
460 self.otherparent(f)
461 if source:
461 if source:
462 self.copy(source, f)
462 self.copy(source, f)
463 return
463 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
465 return
466 self._addpath(f, b'n', 0, -1, -1)
466 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
467 self._map.copymap.pop(f, None)
468
468
469 def otherparent(self, f):
469 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
470 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
471 if self._pl[1] == nullid:
472 raise error.Abort(
472 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
473 _(b"setting %r to other parent only allowed in merges") % f
474 )
474 )
475 if f in self and self[f] == b'n':
475 if f in self and self[f] == b'n':
476 # merge-like
476 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
477 self._addpath(f, b'm', 0, -2, -1)
478 else:
478 else:
479 # add-like
479 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
480 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
481 self._map.copymap.pop(f, None)
482
482
483 def add(self, f):
483 def add(self, f):
484 '''Mark a file added.'''
484 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
485 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
486 self._map.copymap.pop(f, None)
487
487
488 def remove(self, f):
488 def remove(self, f):
489 '''Mark a file removed.'''
489 '''Mark a file removed.'''
490 self._dirty = True
490 self._dirty = True
491 oldstate = self[f]
491 oldstate = self[f]
492 size = 0
492 size = 0
493 if self._pl[1] != nullid:
493 if self._pl[1] != nullid:
494 entry = self._map.get(f)
494 entry = self._map.get(f)
495 if entry is not None:
495 if entry is not None:
496 # backup the previous state
496 # backup the previous state
497 if entry[0] == b'm': # merge
497 if entry[0] == b'm': # merge
498 size = -1
498 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
500 size = -2
501 self._map.otherparentset.add(f)
501 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
503 self._map.removefile(f, oldstate, size)
504 if size == 0:
504 if size == 0:
505 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
506
506
507 def merge(self, f):
507 def merge(self, f):
508 '''Mark a file merged.'''
508 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
509 if self._pl[1] == nullid:
510 return self.normallookup(f)
510 return self.normallookup(f)
511 return self.otherparent(f)
511 return self.otherparent(f)
512
512
513 def drop(self, f):
513 def drop(self, f):
514 '''Drop a file from the dirstate'''
514 '''Drop a file from the dirstate'''
515 oldstate = self[f]
515 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
516 if self._map.dropfile(f, oldstate):
517 self._dirty = True
517 self._dirty = True
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
526 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
527 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
529 folded = d + b"/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if b'/' in normed:
536 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
537 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
539 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
540 folded = d + b"/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(
554 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
556 )
557 return folded
557 return folded
558
558
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
560 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
561 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
562 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
563 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
564 if folded is None:
565 if isknown:
565 if isknown:
566 folded = path
566 folded = path
567 else:
567 else:
568 # store discovered result in dirfoldmap so that future
568 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
569 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def normalize(self, path, isknown=False, ignoremissing=False):
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
576 '''
577 normalize the case of a pathname when on a casefolding filesystem
577 normalize the case of a pathname when on a casefolding filesystem
578
578
579 isknown specifies whether the filename came from walking the
579 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
580 disk, to avoid extra filesystem access.
581
581
582 If ignoremissing is True, missing path are returned
582 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
583 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
584 existing path components.
585
585
586 The normalized case is determined based on the following precedence:
586 The normalized case is determined based on the following precedence:
587
587
588 - version of name already stored in the dirstate
588 - version of name already stored in the dirstate
589 - version of name stored on disk
589 - version of name stored on disk
590 - version provided via command arguments
590 - version provided via command arguments
591 '''
591 '''
592
592
593 if self._checkcase:
593 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
594 return self._normalize(path, isknown, ignoremissing)
595 return path
595 return path
596
596
597 def clear(self):
597 def clear(self):
598 self._map.clear()
598 self._map.clear()
599 self._lastnormaltime = 0
599 self._lastnormaltime = 0
600 self._updatedfiles.clear()
600 self._updatedfiles.clear()
601 self._dirty = True
601 self._dirty = True
602
602
603 def rebuild(self, parent, allfiles, changedfiles=None):
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
604 if changedfiles is None:
605 # Rebuild entire dirstate
605 # Rebuild entire dirstate
606 changedfiles = allfiles
606 changedfiles = allfiles
607 lastnormaltime = self._lastnormaltime
607 lastnormaltime = self._lastnormaltime
608 self.clear()
608 self.clear()
609 self._lastnormaltime = lastnormaltime
609 self._lastnormaltime = lastnormaltime
610
610
611 if self._origpl is None:
611 if self._origpl is None:
612 self._origpl = self._pl
612 self._origpl = self._pl
613 self._map.setparents(parent, nullid)
613 self._map.setparents(parent, nullid)
614 for f in changedfiles:
614 for f in changedfiles:
615 if f in allfiles:
615 if f in allfiles:
616 self.normallookup(f)
616 self.normallookup(f)
617 else:
617 else:
618 self.drop(f)
618 self.drop(f)
619
619
620 self._dirty = True
620 self._dirty = True
621
621
622 def identity(self):
622 def identity(self):
623 '''Return identity of dirstate itself to detect changing in storage
623 '''Return identity of dirstate itself to detect changing in storage
624
624
625 If identity of previous dirstate is equal to this, writing
625 If identity of previous dirstate is equal to this, writing
626 changes based on the former dirstate out can keep consistency.
626 changes based on the former dirstate out can keep consistency.
627 '''
627 '''
628 return self._map.identity
628 return self._map.identity
629
629
630 def write(self, tr):
630 def write(self, tr):
631 if not self._dirty:
631 if not self._dirty:
632 return
632 return
633
633
634 filename = self._filename
634 filename = self._filename
635 if tr:
635 if tr:
636 # 'dirstate.write()' is not only for writing in-memory
636 # 'dirstate.write()' is not only for writing in-memory
637 # changes out, but also for dropping ambiguous timestamp.
637 # changes out, but also for dropping ambiguous timestamp.
638 # delayed writing re-raise "ambiguous timestamp issue".
638 # delayed writing re-raise "ambiguous timestamp issue".
639 # See also the wiki page below for detail:
639 # See also the wiki page below for detail:
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641
641
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 now = _getfsnow(self._opener)
643 now = _getfsnow(self._opener)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
645
645
646 # emulate that all 'dirstate.normal' results are written out
646 # emulate that all 'dirstate.normal' results are written out
647 self._lastnormaltime = 0
647 self._lastnormaltime = 0
648 self._updatedfiles.clear()
648 self._updatedfiles.clear()
649
649
650 # delay writing in-memory changes out
650 # delay writing in-memory changes out
651 tr.addfilegenerator(
651 tr.addfilegenerator(
652 b'dirstate',
652 b'dirstate',
653 (self._filename,),
653 (self._filename,),
654 self._writedirstate,
654 self._writedirstate,
655 location=b'plain',
655 location=b'plain',
656 )
656 )
657 return
657 return
658
658
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
660 self._writedirstate(st)
661
661
662 def addparentchangecallback(self, category, callback):
662 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
663 """add a callback to be called when the wd parents are changed
664
664
665 Callback will be called with the following arguments:
665 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
666 dirstate, (oldp1, oldp2), (newp1, newp2)
667
667
668 Category is a unique identifier to allow overwriting an old callback
668 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
669 with a newer callback.
670 """
670 """
671 self._plchangecallbacks[category] = callback
671 self._plchangecallbacks[category] = callback
672
672
673 def _writedirstate(self, st):
673 def _writedirstate(self, st):
674 # notify callbacks about parents change
674 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
675 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(
676 for c, callback in sorted(
677 pycompat.iteritems(self._plchangecallbacks)
677 pycompat.iteritems(self._plchangecallbacks)
678 ):
678 ):
679 callback(self, self._origpl, self._pl)
679 callback(self, self._origpl, self._pl)
680 self._origpl = None
680 self._origpl = None
681 # use the modification time of the newly created temporary file as the
681 # use the modification time of the newly created temporary file as the
682 # filesystem's notion of 'now'
682 # filesystem's notion of 'now'
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684
684
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # timestamp of each entries in dirstate, because of 'now > mtime'
686 # timestamp of each entries in dirstate, because of 'now > mtime'
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 if delaywrite > 0:
688 if delaywrite > 0:
689 # do we have any files to delay for?
689 # do we have any files to delay for?
690 items = pycompat.iteritems(self._map)
690 items = pycompat.iteritems(self._map)
691 for f, e in items:
691 for f, e in items:
692 if e[0] == b'n' and e[3] == now:
692 if e[0] == b'n' and e[3] == now:
693 import time # to avoid useless import
693 import time # to avoid useless import
694
694
695 # rather than sleep n seconds, sleep until the next
695 # rather than sleep n seconds, sleep until the next
696 # multiple of n seconds
696 # multiple of n seconds
697 clock = time.time()
697 clock = time.time()
698 start = int(clock) - (int(clock) % delaywrite)
698 start = int(clock) - (int(clock) % delaywrite)
699 end = start + delaywrite
699 end = start + delaywrite
700 time.sleep(end - clock)
700 time.sleep(end - clock)
701 now = end # trust our estimate that the end is near now
701 now = end # trust our estimate that the end is near now
702 break
702 break
703 # since the iterator is potentially not deleted,
703 # since the iterator is potentially not deleted,
704 # delete the iterator to release the reference for the Rust
704 # delete the iterator to release the reference for the Rust
705 # implementation.
705 # implementation.
706 # TODO make the Rust implementation behave like Python
706 # TODO make the Rust implementation behave like Python
707 # since this would not work with a non ref-counting GC.
707 # since this would not work with a non ref-counting GC.
708 del items
708 del items
709
709
710 self._map.write(st, now)
710 self._map.write(st, now)
711 self._lastnormaltime = 0
711 self._lastnormaltime = 0
712 self._dirty = False
712 self._dirty = False
713
713
714 def _dirignore(self, f):
714 def _dirignore(self, f):
715 if self._ignore(f):
715 if self._ignore(f):
716 return True
716 return True
717 for p in util.finddirs(f):
717 for p in util.finddirs(f):
718 if self._ignore(p):
718 if self._ignore(p):
719 return True
719 return True
720 return False
720 return False
721
721
722 def _ignorefiles(self):
722 def _ignorefiles(self):
723 files = []
723 files = []
724 if os.path.exists(self._join(b'.hgignore')):
724 if os.path.exists(self._join(b'.hgignore')):
725 files.append(self._join(b'.hgignore'))
725 files.append(self._join(b'.hgignore'))
726 for name, path in self._ui.configitems(b"ui"):
726 for name, path in self._ui.configitems(b"ui"):
727 if name == b'ignore' or name.startswith(b'ignore.'):
727 if name == b'ignore' or name.startswith(b'ignore.'):
728 # we need to use os.path.join here rather than self._join
728 # we need to use os.path.join here rather than self._join
729 # because path is arbitrary and user-specified
729 # because path is arbitrary and user-specified
730 files.append(os.path.join(self._rootdir, util.expandpath(path)))
730 files.append(os.path.join(self._rootdir, util.expandpath(path)))
731 return files
731 return files
732
732
733 def _ignorefileandline(self, f):
733 def _ignorefileandline(self, f):
734 files = collections.deque(self._ignorefiles())
734 files = collections.deque(self._ignorefiles())
735 visited = set()
735 visited = set()
736 while files:
736 while files:
737 i = files.popleft()
737 i = files.popleft()
738 patterns = matchmod.readpatternfile(
738 patterns = matchmod.readpatternfile(
739 i, self._ui.warn, sourceinfo=True
739 i, self._ui.warn, sourceinfo=True
740 )
740 )
741 for pattern, lineno, line in patterns:
741 for pattern, lineno, line in patterns:
742 kind, p = matchmod._patsplit(pattern, b'glob')
742 kind, p = matchmod._patsplit(pattern, b'glob')
743 if kind == b"subinclude":
743 if kind == b"subinclude":
744 if p not in visited:
744 if p not in visited:
745 files.append(p)
745 files.append(p)
746 continue
746 continue
747 m = matchmod.match(
747 m = matchmod.match(
748 self._root, b'', [], [pattern], warn=self._ui.warn
748 self._root, b'', [], [pattern], warn=self._ui.warn
749 )
749 )
750 if m(f):
750 if m(f):
751 return (i, lineno, line)
751 return (i, lineno, line)
752 visited.add(i)
752 visited.add(i)
753 return (None, -1, b"")
753 return (None, -1, b"")
754
754
755 def _walkexplicit(self, match, subrepos):
755 def _walkexplicit(self, match, subrepos):
756 '''Get stat data about the files explicitly specified by match.
756 '''Get stat data about the files explicitly specified by match.
757
757
758 Return a triple (results, dirsfound, dirsnotfound).
758 Return a triple (results, dirsfound, dirsnotfound).
759 - results is a mapping from filename to stat result. It also contains
759 - results is a mapping from filename to stat result. It also contains
760 listings mapping subrepos and .hg to None.
760 listings mapping subrepos and .hg to None.
761 - dirsfound is a list of files found to be directories.
761 - dirsfound is a list of files found to be directories.
762 - dirsnotfound is a list of files that the dirstate thinks are
762 - dirsnotfound is a list of files that the dirstate thinks are
763 directories and that were not found.'''
763 directories and that were not found.'''
764
764
765 def badtype(mode):
765 def badtype(mode):
766 kind = _(b'unknown')
766 kind = _(b'unknown')
767 if stat.S_ISCHR(mode):
767 if stat.S_ISCHR(mode):
768 kind = _(b'character device')
768 kind = _(b'character device')
769 elif stat.S_ISBLK(mode):
769 elif stat.S_ISBLK(mode):
770 kind = _(b'block device')
770 kind = _(b'block device')
771 elif stat.S_ISFIFO(mode):
771 elif stat.S_ISFIFO(mode):
772 kind = _(b'fifo')
772 kind = _(b'fifo')
773 elif stat.S_ISSOCK(mode):
773 elif stat.S_ISSOCK(mode):
774 kind = _(b'socket')
774 kind = _(b'socket')
775 elif stat.S_ISDIR(mode):
775 elif stat.S_ISDIR(mode):
776 kind = _(b'directory')
776 kind = _(b'directory')
777 return _(b'unsupported file type (type is %s)') % kind
777 return _(b'unsupported file type (type is %s)') % kind
778
778
779 matchedir = match.explicitdir
779 matchedir = match.explicitdir
780 badfn = match.bad
780 badfn = match.bad
781 dmap = self._map
781 dmap = self._map
782 lstat = os.lstat
782 lstat = os.lstat
783 getkind = stat.S_IFMT
783 getkind = stat.S_IFMT
784 dirkind = stat.S_IFDIR
784 dirkind = stat.S_IFDIR
785 regkind = stat.S_IFREG
785 regkind = stat.S_IFREG
786 lnkkind = stat.S_IFLNK
786 lnkkind = stat.S_IFLNK
787 join = self._join
787 join = self._join
788 dirsfound = []
788 dirsfound = []
789 foundadd = dirsfound.append
789 foundadd = dirsfound.append
790 dirsnotfound = []
790 dirsnotfound = []
791 notfoundadd = dirsnotfound.append
791 notfoundadd = dirsnotfound.append
792
792
793 if not match.isexact() and self._checkcase:
793 if not match.isexact() and self._checkcase:
794 normalize = self._normalize
794 normalize = self._normalize
795 else:
795 else:
796 normalize = None
796 normalize = None
797
797
798 files = sorted(match.files())
798 files = sorted(match.files())
799 subrepos.sort()
799 subrepos.sort()
800 i, j = 0, 0
800 i, j = 0, 0
801 while i < len(files) and j < len(subrepos):
801 while i < len(files) and j < len(subrepos):
802 subpath = subrepos[j] + b"/"
802 subpath = subrepos[j] + b"/"
803 if files[i] < subpath:
803 if files[i] < subpath:
804 i += 1
804 i += 1
805 continue
805 continue
806 while i < len(files) and files[i].startswith(subpath):
806 while i < len(files) and files[i].startswith(subpath):
807 del files[i]
807 del files[i]
808 j += 1
808 j += 1
809
809
810 if not files or b'' in files:
810 if not files or b'' in files:
811 files = [b'']
811 files = [b'']
812 # constructing the foldmap is expensive, so don't do it for the
812 # constructing the foldmap is expensive, so don't do it for the
813 # common case where files is ['']
813 # common case where files is ['']
814 normalize = None
814 normalize = None
815 results = dict.fromkeys(subrepos)
815 results = dict.fromkeys(subrepos)
816 results[b'.hg'] = None
816 results[b'.hg'] = None
817
817
818 for ff in files:
818 for ff in files:
819 if normalize:
819 if normalize:
820 nf = normalize(ff, False, True)
820 nf = normalize(ff, False, True)
821 else:
821 else:
822 nf = ff
822 nf = ff
823 if nf in results:
823 if nf in results:
824 continue
824 continue
825
825
826 try:
826 try:
827 st = lstat(join(nf))
827 st = lstat(join(nf))
828 kind = getkind(st.st_mode)
828 kind = getkind(st.st_mode)
829 if kind == dirkind:
829 if kind == dirkind:
830 if nf in dmap:
830 if nf in dmap:
831 # file replaced by dir on disk but still in dirstate
831 # file replaced by dir on disk but still in dirstate
832 results[nf] = None
832 results[nf] = None
833 if matchedir:
833 if matchedir:
834 matchedir(nf)
834 matchedir(nf)
835 foundadd((nf, ff))
835 foundadd((nf, ff))
836 elif kind == regkind or kind == lnkkind:
836 elif kind == regkind or kind == lnkkind:
837 results[nf] = st
837 results[nf] = st
838 else:
838 else:
839 badfn(ff, badtype(kind))
839 badfn(ff, badtype(kind))
840 if nf in dmap:
840 if nf in dmap:
841 results[nf] = None
841 results[nf] = None
842 except OSError as inst: # nf not found on disk - it is dirstate only
842 except OSError as inst: # nf not found on disk - it is dirstate only
843 if nf in dmap: # does it exactly match a missing file?
843 if nf in dmap: # does it exactly match a missing file?
844 results[nf] = None
844 results[nf] = None
845 else: # does it match a missing directory?
845 else: # does it match a missing directory?
846 if self._map.hasdir(nf):
846 if self._map.hasdir(nf):
847 if matchedir:
847 if matchedir:
848 matchedir(nf)
848 matchedir(nf)
849 notfoundadd(nf)
849 notfoundadd(nf)
850 else:
850 else:
851 badfn(ff, encoding.strtolocal(inst.strerror))
851 badfn(ff, encoding.strtolocal(inst.strerror))
852
852
853 # match.files() may contain explicitly-specified paths that shouldn't
853 # match.files() may contain explicitly-specified paths that shouldn't
854 # be taken; drop them from the list of files found. dirsfound/notfound
854 # be taken; drop them from the list of files found. dirsfound/notfound
855 # aren't filtered here because they will be tested later.
855 # aren't filtered here because they will be tested later.
856 if match.anypats():
856 if match.anypats():
857 for f in list(results):
857 for f in list(results):
858 if f == b'.hg' or f in subrepos:
858 if f == b'.hg' or f in subrepos:
859 # keep sentinel to disable further out-of-repo walks
859 # keep sentinel to disable further out-of-repo walks
860 continue
860 continue
861 if not match(f):
861 if not match(f):
862 del results[f]
862 del results[f]
863
863
864 # Case insensitive filesystems cannot rely on lstat() failing to detect
864 # Case insensitive filesystems cannot rely on lstat() failing to detect
865 # a case-only rename. Prune the stat object for any file that does not
865 # a case-only rename. Prune the stat object for any file that does not
866 # match the case in the filesystem, if there are multiple files that
866 # match the case in the filesystem, if there are multiple files that
867 # normalize to the same path.
867 # normalize to the same path.
868 if match.isexact() and self._checkcase:
868 if match.isexact() and self._checkcase:
869 normed = {}
869 normed = {}
870
870
871 for f, st in pycompat.iteritems(results):
871 for f, st in pycompat.iteritems(results):
872 if st is None:
872 if st is None:
873 continue
873 continue
874
874
875 nc = util.normcase(f)
875 nc = util.normcase(f)
876 paths = normed.get(nc)
876 paths = normed.get(nc)
877
877
878 if paths is None:
878 if paths is None:
879 paths = set()
879 paths = set()
880 normed[nc] = paths
880 normed[nc] = paths
881
881
882 paths.add(f)
882 paths.add(f)
883
883
884 for norm, paths in pycompat.iteritems(normed):
884 for norm, paths in pycompat.iteritems(normed):
885 if len(paths) > 1:
885 if len(paths) > 1:
886 for path in paths:
886 for path in paths:
887 folded = self._discoverpath(
887 folded = self._discoverpath(
888 path, norm, True, None, self._map.dirfoldmap
888 path, norm, True, None, self._map.dirfoldmap
889 )
889 )
890 if path != folded:
890 if path != folded:
891 results[path] = None
891 results[path] = None
892
892
893 return results, dirsfound, dirsnotfound
893 return results, dirsfound, dirsnotfound
894
894
895 def walk(self, match, subrepos, unknown, ignored, full=True):
895 def walk(self, match, subrepos, unknown, ignored, full=True):
896 '''
896 '''
897 Walk recursively through the directory tree, finding all files
897 Walk recursively through the directory tree, finding all files
898 matched by match.
898 matched by match.
899
899
900 If full is False, maybe skip some known-clean files.
900 If full is False, maybe skip some known-clean files.
901
901
902 Return a dict mapping filename to stat-like object (either
902 Return a dict mapping filename to stat-like object (either
903 mercurial.osutil.stat instance or return value of os.stat()).
903 mercurial.osutil.stat instance or return value of os.stat()).
904
904
905 '''
905 '''
906 # full is a flag that extensions that hook into walk can use -- this
906 # full is a flag that extensions that hook into walk can use -- this
907 # implementation doesn't use it at all. This satisfies the contract
907 # implementation doesn't use it at all. This satisfies the contract
908 # because we only guarantee a "maybe".
908 # because we only guarantee a "maybe".
909
909
910 if ignored:
910 if ignored:
911 ignore = util.never
911 ignore = util.never
912 dirignore = util.never
912 dirignore = util.never
913 elif unknown:
913 elif unknown:
914 ignore = self._ignore
914 ignore = self._ignore
915 dirignore = self._dirignore
915 dirignore = self._dirignore
916 else:
916 else:
917 # if not unknown and not ignored, drop dir recursion and step 2
917 # if not unknown and not ignored, drop dir recursion and step 2
918 ignore = util.always
918 ignore = util.always
919 dirignore = util.always
919 dirignore = util.always
920
920
921 matchfn = match.matchfn
921 matchfn = match.matchfn
922 matchalways = match.always()
922 matchalways = match.always()
923 matchtdir = match.traversedir
923 matchtdir = match.traversedir
924 dmap = self._map
924 dmap = self._map
925 listdir = util.listdir
925 listdir = util.listdir
926 lstat = os.lstat
926 lstat = os.lstat
927 dirkind = stat.S_IFDIR
927 dirkind = stat.S_IFDIR
928 regkind = stat.S_IFREG
928 regkind = stat.S_IFREG
929 lnkkind = stat.S_IFLNK
929 lnkkind = stat.S_IFLNK
930 join = self._join
930 join = self._join
931
931
932 exact = skipstep3 = False
932 exact = skipstep3 = False
933 if match.isexact(): # match.exact
933 if match.isexact(): # match.exact
934 exact = True
934 exact = True
935 dirignore = util.always # skip step 2
935 dirignore = util.always # skip step 2
936 elif match.prefix(): # match.match, no patterns
936 elif match.prefix(): # match.match, no patterns
937 skipstep3 = True
937 skipstep3 = True
938
938
939 if not exact and self._checkcase:
939 if not exact and self._checkcase:
940 normalize = self._normalize
940 normalize = self._normalize
941 normalizefile = self._normalizefile
941 normalizefile = self._normalizefile
942 skipstep3 = False
942 skipstep3 = False
943 else:
943 else:
944 normalize = self._normalize
944 normalize = self._normalize
945 normalizefile = None
945 normalizefile = None
946
946
947 # step 1: find all explicit files
947 # step 1: find all explicit files
948 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
948 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
949
949
950 skipstep3 = skipstep3 and not (work or dirsnotfound)
950 skipstep3 = skipstep3 and not (work or dirsnotfound)
951 work = [d for d in work if not dirignore(d[0])]
951 work = [d for d in work if not dirignore(d[0])]
952
952
953 # step 2: visit subdirectories
953 # step 2: visit subdirectories
954 def traverse(work, alreadynormed):
954 def traverse(work, alreadynormed):
955 wadd = work.append
955 wadd = work.append
956 while work:
956 while work:
957 tracing.counter('dirstate.walk work', len(work))
957 tracing.counter('dirstate.walk work', len(work))
958 nd = work.pop()
958 nd = work.pop()
959 visitentries = match.visitchildrenset(nd)
959 visitentries = match.visitchildrenset(nd)
960 if not visitentries:
960 if not visitentries:
961 continue
961 continue
962 if visitentries == b'this' or visitentries == b'all':
962 if visitentries == b'this' or visitentries == b'all':
963 visitentries = None
963 visitentries = None
964 skip = None
964 skip = None
965 if nd != b'':
965 if nd != b'':
966 skip = b'.hg'
966 skip = b'.hg'
967 try:
967 try:
968 with tracing.log('dirstate.walk.traverse listdir %s', nd):
968 with tracing.log('dirstate.walk.traverse listdir %s', nd):
969 entries = listdir(join(nd), stat=True, skip=skip)
969 entries = listdir(join(nd), stat=True, skip=skip)
970 except OSError as inst:
970 except OSError as inst:
971 if inst.errno in (errno.EACCES, errno.ENOENT):
971 if inst.errno in (errno.EACCES, errno.ENOENT):
972 match.bad(
972 match.bad(
973 self.pathto(nd), encoding.strtolocal(inst.strerror)
973 self.pathto(nd), encoding.strtolocal(inst.strerror)
974 )
974 )
975 continue
975 continue
976 raise
976 raise
977 for f, kind, st in entries:
977 for f, kind, st in entries:
978 # Some matchers may return files in the visitentries set,
978 # Some matchers may return files in the visitentries set,
979 # instead of 'this', if the matcher explicitly mentions them
979 # instead of 'this', if the matcher explicitly mentions them
980 # and is not an exactmatcher. This is acceptable; we do not
980 # and is not an exactmatcher. This is acceptable; we do not
981 # make any hard assumptions about file-or-directory below
981 # make any hard assumptions about file-or-directory below
982 # based on the presence of `f` in visitentries. If
982 # based on the presence of `f` in visitentries. If
983 # visitchildrenset returned a set, we can always skip the
983 # visitchildrenset returned a set, we can always skip the
984 # entries *not* in the set it provided regardless of whether
984 # entries *not* in the set it provided regardless of whether
985 # they're actually a file or a directory.
985 # they're actually a file or a directory.
986 if visitentries and f not in visitentries:
986 if visitentries and f not in visitentries:
987 continue
987 continue
988 if normalizefile:
988 if normalizefile:
989 # even though f might be a directory, we're only
989 # even though f might be a directory, we're only
990 # interested in comparing it to files currently in the
990 # interested in comparing it to files currently in the
991 # dmap -- therefore normalizefile is enough
991 # dmap -- therefore normalizefile is enough
992 nf = normalizefile(
992 nf = normalizefile(
993 nd and (nd + b"/" + f) or f, True, True
993 nd and (nd + b"/" + f) or f, True, True
994 )
994 )
995 else:
995 else:
996 nf = nd and (nd + b"/" + f) or f
996 nf = nd and (nd + b"/" + f) or f
997 if nf not in results:
997 if nf not in results:
998 if kind == dirkind:
998 if kind == dirkind:
999 if not ignore(nf):
999 if not ignore(nf):
1000 if matchtdir:
1000 if matchtdir:
1001 matchtdir(nf)
1001 matchtdir(nf)
1002 wadd(nf)
1002 wadd(nf)
1003 if nf in dmap and (matchalways or matchfn(nf)):
1003 if nf in dmap and (matchalways or matchfn(nf)):
1004 results[nf] = None
1004 results[nf] = None
1005 elif kind == regkind or kind == lnkkind:
1005 elif kind == regkind or kind == lnkkind:
1006 if nf in dmap:
1006 if nf in dmap:
1007 if matchalways or matchfn(nf):
1007 if matchalways or matchfn(nf):
1008 results[nf] = st
1008 results[nf] = st
1009 elif (matchalways or matchfn(nf)) and not ignore(
1009 elif (matchalways or matchfn(nf)) and not ignore(
1010 nf
1010 nf
1011 ):
1011 ):
1012 # unknown file -- normalize if necessary
1012 # unknown file -- normalize if necessary
1013 if not alreadynormed:
1013 if not alreadynormed:
1014 nf = normalize(nf, False, True)
1014 nf = normalize(nf, False, True)
1015 results[nf] = st
1015 results[nf] = st
1016 elif nf in dmap and (matchalways or matchfn(nf)):
1016 elif nf in dmap and (matchalways or matchfn(nf)):
1017 results[nf] = None
1017 results[nf] = None
1018
1018
1019 for nd, d in work:
1019 for nd, d in work:
1020 # alreadynormed means that processwork doesn't have to do any
1020 # alreadynormed means that processwork doesn't have to do any
1021 # expensive directory normalization
1021 # expensive directory normalization
1022 alreadynormed = not normalize or nd == d
1022 alreadynormed = not normalize or nd == d
1023 traverse([d], alreadynormed)
1023 traverse([d], alreadynormed)
1024
1024
1025 for s in subrepos:
1025 for s in subrepos:
1026 del results[s]
1026 del results[s]
1027 del results[b'.hg']
1027 del results[b'.hg']
1028
1028
1029 # step 3: visit remaining files from dmap
1029 # step 3: visit remaining files from dmap
1030 if not skipstep3 and not exact:
1030 if not skipstep3 and not exact:
1031 # If a dmap file is not in results yet, it was either
1031 # If a dmap file is not in results yet, it was either
1032 # a) not matching matchfn b) ignored, c) missing, or d) under a
1032 # a) not matching matchfn b) ignored, c) missing, or d) under a
1033 # symlink directory.
1033 # symlink directory.
1034 if not results and matchalways:
1034 if not results and matchalways:
1035 visit = [f for f in dmap]
1035 visit = [f for f in dmap]
1036 else:
1036 else:
1037 visit = [f for f in dmap if f not in results and matchfn(f)]
1037 visit = [f for f in dmap if f not in results and matchfn(f)]
1038 visit.sort()
1038 visit.sort()
1039
1039
1040 if unknown:
1040 if unknown:
1041 # unknown == True means we walked all dirs under the roots
1041 # unknown == True means we walked all dirs under the roots
1042 # that wasn't ignored, and everything that matched was stat'ed
1042 # that wasn't ignored, and everything that matched was stat'ed
1043 # and is already in results.
1043 # and is already in results.
1044 # The rest must thus be ignored or under a symlink.
1044 # The rest must thus be ignored or under a symlink.
1045 audit_path = pathutil.pathauditor(self._root, cached=True)
1045 audit_path = pathutil.pathauditor(self._root, cached=True)
1046
1046
1047 for nf in iter(visit):
1047 for nf in iter(visit):
1048 # If a stat for the same file was already added with a
1048 # If a stat for the same file was already added with a
1049 # different case, don't add one for this, since that would
1049 # different case, don't add one for this, since that would
1050 # make it appear as if the file exists under both names
1050 # make it appear as if the file exists under both names
1051 # on disk.
1051 # on disk.
1052 if (
1052 if (
1053 normalizefile
1053 normalizefile
1054 and normalizefile(nf, True, True) in results
1054 and normalizefile(nf, True, True) in results
1055 ):
1055 ):
1056 results[nf] = None
1056 results[nf] = None
1057 # Report ignored items in the dmap as long as they are not
1057 # Report ignored items in the dmap as long as they are not
1058 # under a symlink directory.
1058 # under a symlink directory.
1059 elif audit_path.check(nf):
1059 elif audit_path.check(nf):
1060 try:
1060 try:
1061 results[nf] = lstat(join(nf))
1061 results[nf] = lstat(join(nf))
1062 # file was just ignored, no links, and exists
1062 # file was just ignored, no links, and exists
1063 except OSError:
1063 except OSError:
1064 # file doesn't exist
1064 # file doesn't exist
1065 results[nf] = None
1065 results[nf] = None
1066 else:
1066 else:
1067 # It's either missing or under a symlink directory
1067 # It's either missing or under a symlink directory
1068 # which we in this case report as missing
1068 # which we in this case report as missing
1069 results[nf] = None
1069 results[nf] = None
1070 else:
1070 else:
1071 # We may not have walked the full directory tree above,
1071 # We may not have walked the full directory tree above,
1072 # so stat and check everything we missed.
1072 # so stat and check everything we missed.
1073 iv = iter(visit)
1073 iv = iter(visit)
1074 for st in util.statfiles([join(i) for i in visit]):
1074 for st in util.statfiles([join(i) for i in visit]):
1075 results[next(iv)] = st
1075 results[next(iv)] = st
1076 return results
1076 return results
1077
1077
1078 def status(self, match, subrepos, ignored, clean, unknown):
1078 def status(self, match, subrepos, ignored, clean, unknown):
1079 '''Determine the status of the working copy relative to the
1079 '''Determine the status of the working copy relative to the
1080 dirstate and return a pair of (unsure, status), where status is of type
1080 dirstate and return a pair of (unsure, status), where status is of type
1081 scmutil.status and:
1081 scmutil.status and:
1082
1082
1083 unsure:
1083 unsure:
1084 files that might have been modified since the dirstate was
1084 files that might have been modified since the dirstate was
1085 written, but need to be read to be sure (size is the same
1085 written, but need to be read to be sure (size is the same
1086 but mtime differs)
1086 but mtime differs)
1087 status.modified:
1087 status.modified:
1088 files that have definitely been modified since the dirstate
1088 files that have definitely been modified since the dirstate
1089 was written (different size or mode)
1089 was written (different size or mode)
1090 status.clean:
1090 status.clean:
1091 files that have definitely not been modified since the
1091 files that have definitely not been modified since the
1092 dirstate was written
1092 dirstate was written
1093 '''
1093 '''
1094 listignored, listclean, listunknown = ignored, clean, unknown
1094 listignored, listclean, listunknown = ignored, clean, unknown
1095 lookup, modified, added, unknown, ignored = [], [], [], [], []
1095 lookup, modified, added, unknown, ignored = [], [], [], [], []
1096 removed, deleted, clean = [], [], []
1096 removed, deleted, clean = [], [], []
1097
1097
1098 dmap = self._map
1098 dmap = self._map
1099 dmap.preload()
1099 dmap.preload()
1100
1100
1101 use_rust = True
1101 use_rust = True
1102 if rustmod is None:
1102 if rustmod is None:
1103 use_rust = False
1103 use_rust = False
1104 elif subrepos:
1104 elif subrepos:
1105 use_rust = False
1105 use_rust = False
1106 if bool(listunknown):
1106 if bool(listunknown):
1107 # Pathauditor does not exist yet in Rust, unknown files
1107 # Pathauditor does not exist yet in Rust, unknown files
1108 # can't be trusted.
1108 # can't be trusted.
1109 use_rust = False
1109 use_rust = False
1110 elif self._ignorefiles() and listignored:
1110 elif self._ignorefiles() and listignored:
1111 # Rust has no ignore mechanism yet, so don't use Rust for
1111 # Rust has no ignore mechanism yet, so don't use Rust for
1112 # commands that need ignore.
1112 # commands that need ignore.
1113 use_rust = False
1113 use_rust = False
1114 elif not match.always():
1114 elif not match.always():
1115 # Matchers have yet to be implemented
1115 # Matchers have yet to be implemented
1116 use_rust = False
1116 use_rust = False
1117
1117
1118 if use_rust:
1118 if use_rust:
1119 # Force Rayon (Rust parallelism library) to respect the number of
1120 # workers. This is a temporary workaround until Rust code knows
1121 # how to read the config file.
1122 numcpus = self._ui.configint("worker", "numcpus")
1123 if numcpus is not None:
1124 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1125
1126 workers_enabled = self._ui.configbool("worker", "enabled", True)
1127 if not workers_enabled:
1128 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1129
1119 (
1130 (
1120 lookup,
1131 lookup,
1121 modified,
1132 modified,
1122 added,
1133 added,
1123 removed,
1134 removed,
1124 deleted,
1135 deleted,
1125 unknown,
1136 unknown,
1126 clean,
1137 clean,
1127 ) = rustmod.status(
1138 ) = rustmod.status(
1128 dmap._rustmap,
1139 dmap._rustmap,
1129 self._rootdir,
1140 self._rootdir,
1130 match.files(),
1141 match.files(),
1131 bool(listclean),
1142 bool(listclean),
1132 self._lastnormaltime,
1143 self._lastnormaltime,
1133 self._checkexec,
1144 self._checkexec,
1134 )
1145 )
1135
1146
1136 status = scmutil.status(
1147 status = scmutil.status(
1137 modified=modified,
1148 modified=modified,
1138 added=added,
1149 added=added,
1139 removed=removed,
1150 removed=removed,
1140 deleted=deleted,
1151 deleted=deleted,
1141 unknown=unknown,
1152 unknown=unknown,
1142 ignored=ignored,
1153 ignored=ignored,
1143 clean=clean,
1154 clean=clean,
1144 )
1155 )
1145 return (lookup, status)
1156 return (lookup, status)
1146
1157
1147 dcontains = dmap.__contains__
1158 dcontains = dmap.__contains__
1148 dget = dmap.__getitem__
1159 dget = dmap.__getitem__
1149 ladd = lookup.append # aka "unsure"
1160 ladd = lookup.append # aka "unsure"
1150 madd = modified.append
1161 madd = modified.append
1151 aadd = added.append
1162 aadd = added.append
1152 uadd = unknown.append
1163 uadd = unknown.append
1153 iadd = ignored.append
1164 iadd = ignored.append
1154 radd = removed.append
1165 radd = removed.append
1155 dadd = deleted.append
1166 dadd = deleted.append
1156 cadd = clean.append
1167 cadd = clean.append
1157 mexact = match.exact
1168 mexact = match.exact
1158 dirignore = self._dirignore
1169 dirignore = self._dirignore
1159 checkexec = self._checkexec
1170 checkexec = self._checkexec
1160 copymap = self._map.copymap
1171 copymap = self._map.copymap
1161 lastnormaltime = self._lastnormaltime
1172 lastnormaltime = self._lastnormaltime
1162
1173
1163 # We need to do full walks when either
1174 # We need to do full walks when either
1164 # - we're listing all clean files, or
1175 # - we're listing all clean files, or
1165 # - match.traversedir does something, because match.traversedir should
1176 # - match.traversedir does something, because match.traversedir should
1166 # be called for every dir in the working dir
1177 # be called for every dir in the working dir
1167 full = listclean or match.traversedir is not None
1178 full = listclean or match.traversedir is not None
1168 for fn, st in pycompat.iteritems(
1179 for fn, st in pycompat.iteritems(
1169 self.walk(match, subrepos, listunknown, listignored, full=full)
1180 self.walk(match, subrepos, listunknown, listignored, full=full)
1170 ):
1181 ):
1171 if not dcontains(fn):
1182 if not dcontains(fn):
1172 if (listignored or mexact(fn)) and dirignore(fn):
1183 if (listignored or mexact(fn)) and dirignore(fn):
1173 if listignored:
1184 if listignored:
1174 iadd(fn)
1185 iadd(fn)
1175 else:
1186 else:
1176 uadd(fn)
1187 uadd(fn)
1177 continue
1188 continue
1178
1189
1179 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1190 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1180 # written like that for performance reasons. dmap[fn] is not a
1191 # written like that for performance reasons. dmap[fn] is not a
1181 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1192 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1182 # opcode has fast paths when the value to be unpacked is a tuple or
1193 # opcode has fast paths when the value to be unpacked is a tuple or
1183 # a list, but falls back to creating a full-fledged iterator in
1194 # a list, but falls back to creating a full-fledged iterator in
1184 # general. That is much slower than simply accessing and storing the
1195 # general. That is much slower than simply accessing and storing the
1185 # tuple members one by one.
1196 # tuple members one by one.
1186 t = dget(fn)
1197 t = dget(fn)
1187 state = t[0]
1198 state = t[0]
1188 mode = t[1]
1199 mode = t[1]
1189 size = t[2]
1200 size = t[2]
1190 time = t[3]
1201 time = t[3]
1191
1202
1192 if not st and state in b"nma":
1203 if not st and state in b"nma":
1193 dadd(fn)
1204 dadd(fn)
1194 elif state == b'n':
1205 elif state == b'n':
1195 if (
1206 if (
1196 size >= 0
1207 size >= 0
1197 and (
1208 and (
1198 (size != st.st_size and size != st.st_size & _rangemask)
1209 (size != st.st_size and size != st.st_size & _rangemask)
1199 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1210 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1200 )
1211 )
1201 or size == -2 # other parent
1212 or size == -2 # other parent
1202 or fn in copymap
1213 or fn in copymap
1203 ):
1214 ):
1204 madd(fn)
1215 madd(fn)
1205 elif (
1216 elif (
1206 time != st[stat.ST_MTIME]
1217 time != st[stat.ST_MTIME]
1207 and time != st[stat.ST_MTIME] & _rangemask
1218 and time != st[stat.ST_MTIME] & _rangemask
1208 ):
1219 ):
1209 ladd(fn)
1220 ladd(fn)
1210 elif st[stat.ST_MTIME] == lastnormaltime:
1221 elif st[stat.ST_MTIME] == lastnormaltime:
1211 # fn may have just been marked as normal and it may have
1222 # fn may have just been marked as normal and it may have
1212 # changed in the same second without changing its size.
1223 # changed in the same second without changing its size.
1213 # This can happen if we quickly do multiple commits.
1224 # This can happen if we quickly do multiple commits.
1214 # Force lookup, so we don't miss such a racy file change.
1225 # Force lookup, so we don't miss such a racy file change.
1215 ladd(fn)
1226 ladd(fn)
1216 elif listclean:
1227 elif listclean:
1217 cadd(fn)
1228 cadd(fn)
1218 elif state == b'm':
1229 elif state == b'm':
1219 madd(fn)
1230 madd(fn)
1220 elif state == b'a':
1231 elif state == b'a':
1221 aadd(fn)
1232 aadd(fn)
1222 elif state == b'r':
1233 elif state == b'r':
1223 radd(fn)
1234 radd(fn)
1224
1235
1225 return (
1236 return (
1226 lookup,
1237 lookup,
1227 scmutil.status(
1238 scmutil.status(
1228 modified, added, removed, deleted, unknown, ignored, clean
1239 modified, added, removed, deleted, unknown, ignored, clean
1229 ),
1240 ),
1230 )
1241 )
1231
1242
1232 def matches(self, match):
1243 def matches(self, match):
1233 '''
1244 '''
1234 return files in the dirstate (in whatever state) filtered by match
1245 return files in the dirstate (in whatever state) filtered by match
1235 '''
1246 '''
1236 dmap = self._map
1247 dmap = self._map
1237 if match.always():
1248 if match.always():
1238 return dmap.keys()
1249 return dmap.keys()
1239 files = match.files()
1250 files = match.files()
1240 if match.isexact():
1251 if match.isexact():
1241 # fast path -- filter the other way around, since typically files is
1252 # fast path -- filter the other way around, since typically files is
1242 # much smaller than dmap
1253 # much smaller than dmap
1243 return [f for f in files if f in dmap]
1254 return [f for f in files if f in dmap]
1244 if match.prefix() and all(fn in dmap for fn in files):
1255 if match.prefix() and all(fn in dmap for fn in files):
1245 # fast path -- all the values are known to be files, so just return
1256 # fast path -- all the values are known to be files, so just return
1246 # that
1257 # that
1247 return list(files)
1258 return list(files)
1248 return [f for f in dmap if match(f)]
1259 return [f for f in dmap if match(f)]
1249
1260
1250 def _actualfilename(self, tr):
1261 def _actualfilename(self, tr):
1251 if tr:
1262 if tr:
1252 return self._pendingfilename
1263 return self._pendingfilename
1253 else:
1264 else:
1254 return self._filename
1265 return self._filename
1255
1266
1256 def savebackup(self, tr, backupname):
1267 def savebackup(self, tr, backupname):
1257 '''Save current dirstate into backup file'''
1268 '''Save current dirstate into backup file'''
1258 filename = self._actualfilename(tr)
1269 filename = self._actualfilename(tr)
1259 assert backupname != filename
1270 assert backupname != filename
1260
1271
1261 # use '_writedirstate' instead of 'write' to write changes certainly,
1272 # use '_writedirstate' instead of 'write' to write changes certainly,
1262 # because the latter omits writing out if transaction is running.
1273 # because the latter omits writing out if transaction is running.
1263 # output file will be used to create backup of dirstate at this point.
1274 # output file will be used to create backup of dirstate at this point.
1264 if self._dirty or not self._opener.exists(filename):
1275 if self._dirty or not self._opener.exists(filename):
1265 self._writedirstate(
1276 self._writedirstate(
1266 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1277 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1267 )
1278 )
1268
1279
1269 if tr:
1280 if tr:
1270 # ensure that subsequent tr.writepending returns True for
1281 # ensure that subsequent tr.writepending returns True for
1271 # changes written out above, even if dirstate is never
1282 # changes written out above, even if dirstate is never
1272 # changed after this
1283 # changed after this
1273 tr.addfilegenerator(
1284 tr.addfilegenerator(
1274 b'dirstate',
1285 b'dirstate',
1275 (self._filename,),
1286 (self._filename,),
1276 self._writedirstate,
1287 self._writedirstate,
1277 location=b'plain',
1288 location=b'plain',
1278 )
1289 )
1279
1290
1280 # ensure that pending file written above is unlinked at
1291 # ensure that pending file written above is unlinked at
1281 # failure, even if tr.writepending isn't invoked until the
1292 # failure, even if tr.writepending isn't invoked until the
1282 # end of this transaction
1293 # end of this transaction
1283 tr.registertmp(filename, location=b'plain')
1294 tr.registertmp(filename, location=b'plain')
1284
1295
1285 self._opener.tryunlink(backupname)
1296 self._opener.tryunlink(backupname)
1286 # hardlink backup is okay because _writedirstate is always called
1297 # hardlink backup is okay because _writedirstate is always called
1287 # with an "atomictemp=True" file.
1298 # with an "atomictemp=True" file.
1288 util.copyfile(
1299 util.copyfile(
1289 self._opener.join(filename),
1300 self._opener.join(filename),
1290 self._opener.join(backupname),
1301 self._opener.join(backupname),
1291 hardlink=True,
1302 hardlink=True,
1292 )
1303 )
1293
1304
1294 def restorebackup(self, tr, backupname):
1305 def restorebackup(self, tr, backupname):
1295 '''Restore dirstate by backup file'''
1306 '''Restore dirstate by backup file'''
1296 # this "invalidate()" prevents "wlock.release()" from writing
1307 # this "invalidate()" prevents "wlock.release()" from writing
1297 # changes of dirstate out after restoring from backup file
1308 # changes of dirstate out after restoring from backup file
1298 self.invalidate()
1309 self.invalidate()
1299 filename = self._actualfilename(tr)
1310 filename = self._actualfilename(tr)
1300 o = self._opener
1311 o = self._opener
1301 if util.samefile(o.join(backupname), o.join(filename)):
1312 if util.samefile(o.join(backupname), o.join(filename)):
1302 o.unlink(backupname)
1313 o.unlink(backupname)
1303 else:
1314 else:
1304 o.rename(backupname, filename, checkambig=True)
1315 o.rename(backupname, filename, checkambig=True)
1305
1316
1306 def clearbackup(self, tr, backupname):
1317 def clearbackup(self, tr, backupname):
1307 '''Clear backup file'''
1318 '''Clear backup file'''
1308 self._opener.unlink(backupname)
1319 self._opener.unlink(backupname)
1309
1320
1310
1321
1311 class dirstatemap(object):
1322 class dirstatemap(object):
1312 """Map encapsulating the dirstate's contents.
1323 """Map encapsulating the dirstate's contents.
1313
1324
1314 The dirstate contains the following state:
1325 The dirstate contains the following state:
1315
1326
1316 - `identity` is the identity of the dirstate file, which can be used to
1327 - `identity` is the identity of the dirstate file, which can be used to
1317 detect when changes have occurred to the dirstate file.
1328 detect when changes have occurred to the dirstate file.
1318
1329
1319 - `parents` is a pair containing the parents of the working copy. The
1330 - `parents` is a pair containing the parents of the working copy. The
1320 parents are updated by calling `setparents`.
1331 parents are updated by calling `setparents`.
1321
1332
1322 - the state map maps filenames to tuples of (state, mode, size, mtime),
1333 - the state map maps filenames to tuples of (state, mode, size, mtime),
1323 where state is a single character representing 'normal', 'added',
1334 where state is a single character representing 'normal', 'added',
1324 'removed', or 'merged'. It is read by treating the dirstate as a
1335 'removed', or 'merged'. It is read by treating the dirstate as a
1325 dict. File state is updated by calling the `addfile`, `removefile` and
1336 dict. File state is updated by calling the `addfile`, `removefile` and
1326 `dropfile` methods.
1337 `dropfile` methods.
1327
1338
1328 - `copymap` maps destination filenames to their source filename.
1339 - `copymap` maps destination filenames to their source filename.
1329
1340
1330 The dirstate also provides the following views onto the state:
1341 The dirstate also provides the following views onto the state:
1331
1342
1332 - `nonnormalset` is a set of the filenames that have state other
1343 - `nonnormalset` is a set of the filenames that have state other
1333 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1344 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1334
1345
1335 - `otherparentset` is a set of the filenames that are marked as coming
1346 - `otherparentset` is a set of the filenames that are marked as coming
1336 from the second parent when the dirstate is currently being merged.
1347 from the second parent when the dirstate is currently being merged.
1337
1348
1338 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1349 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1339 form that they appear as in the dirstate.
1350 form that they appear as in the dirstate.
1340
1351
1341 - `dirfoldmap` is a dict mapping normalized directory names to the
1352 - `dirfoldmap` is a dict mapping normalized directory names to the
1342 denormalized form that they appear as in the dirstate.
1353 denormalized form that they appear as in the dirstate.
1343 """
1354 """
1344
1355
1345 def __init__(self, ui, opener, root):
1356 def __init__(self, ui, opener, root):
1346 self._ui = ui
1357 self._ui = ui
1347 self._opener = opener
1358 self._opener = opener
1348 self._root = root
1359 self._root = root
1349 self._filename = b'dirstate'
1360 self._filename = b'dirstate'
1350
1361
1351 self._parents = None
1362 self._parents = None
1352 self._dirtyparents = False
1363 self._dirtyparents = False
1353
1364
1354 # for consistent view between _pl() and _read() invocations
1365 # for consistent view between _pl() and _read() invocations
1355 self._pendingmode = None
1366 self._pendingmode = None
1356
1367
1357 @propertycache
1368 @propertycache
1358 def _map(self):
1369 def _map(self):
1359 self._map = {}
1370 self._map = {}
1360 self.read()
1371 self.read()
1361 return self._map
1372 return self._map
1362
1373
1363 @propertycache
1374 @propertycache
1364 def copymap(self):
1375 def copymap(self):
1365 self.copymap = {}
1376 self.copymap = {}
1366 self._map
1377 self._map
1367 return self.copymap
1378 return self.copymap
1368
1379
1369 def clear(self):
1380 def clear(self):
1370 self._map.clear()
1381 self._map.clear()
1371 self.copymap.clear()
1382 self.copymap.clear()
1372 self.setparents(nullid, nullid)
1383 self.setparents(nullid, nullid)
1373 util.clearcachedproperty(self, b"_dirs")
1384 util.clearcachedproperty(self, b"_dirs")
1374 util.clearcachedproperty(self, b"_alldirs")
1385 util.clearcachedproperty(self, b"_alldirs")
1375 util.clearcachedproperty(self, b"filefoldmap")
1386 util.clearcachedproperty(self, b"filefoldmap")
1376 util.clearcachedproperty(self, b"dirfoldmap")
1387 util.clearcachedproperty(self, b"dirfoldmap")
1377 util.clearcachedproperty(self, b"nonnormalset")
1388 util.clearcachedproperty(self, b"nonnormalset")
1378 util.clearcachedproperty(self, b"otherparentset")
1389 util.clearcachedproperty(self, b"otherparentset")
1379
1390
1380 def items(self):
1391 def items(self):
1381 return pycompat.iteritems(self._map)
1392 return pycompat.iteritems(self._map)
1382
1393
1383 # forward for python2,3 compat
1394 # forward for python2,3 compat
1384 iteritems = items
1395 iteritems = items
1385
1396
1386 def __len__(self):
1397 def __len__(self):
1387 return len(self._map)
1398 return len(self._map)
1388
1399
1389 def __iter__(self):
1400 def __iter__(self):
1390 return iter(self._map)
1401 return iter(self._map)
1391
1402
1392 def get(self, key, default=None):
1403 def get(self, key, default=None):
1393 return self._map.get(key, default)
1404 return self._map.get(key, default)
1394
1405
1395 def __contains__(self, key):
1406 def __contains__(self, key):
1396 return key in self._map
1407 return key in self._map
1397
1408
1398 def __getitem__(self, key):
1409 def __getitem__(self, key):
1399 return self._map[key]
1410 return self._map[key]
1400
1411
1401 def keys(self):
1412 def keys(self):
1402 return self._map.keys()
1413 return self._map.keys()
1403
1414
1404 def preload(self):
1415 def preload(self):
1405 """Loads the underlying data, if it's not already loaded"""
1416 """Loads the underlying data, if it's not already loaded"""
1406 self._map
1417 self._map
1407
1418
1408 def addfile(self, f, oldstate, state, mode, size, mtime):
1419 def addfile(self, f, oldstate, state, mode, size, mtime):
1409 """Add a tracked file to the dirstate."""
1420 """Add a tracked file to the dirstate."""
1410 if oldstate in b"?r" and r"_dirs" in self.__dict__:
1421 if oldstate in b"?r" and r"_dirs" in self.__dict__:
1411 self._dirs.addpath(f)
1422 self._dirs.addpath(f)
1412 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1423 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1413 self._alldirs.addpath(f)
1424 self._alldirs.addpath(f)
1414 self._map[f] = dirstatetuple(state, mode, size, mtime)
1425 self._map[f] = dirstatetuple(state, mode, size, mtime)
1415 if state != b'n' or mtime == -1:
1426 if state != b'n' or mtime == -1:
1416 self.nonnormalset.add(f)
1427 self.nonnormalset.add(f)
1417 if size == -2:
1428 if size == -2:
1418 self.otherparentset.add(f)
1429 self.otherparentset.add(f)
1419
1430
1420 def removefile(self, f, oldstate, size):
1431 def removefile(self, f, oldstate, size):
1421 """
1432 """
1422 Mark a file as removed in the dirstate.
1433 Mark a file as removed in the dirstate.
1423
1434
1424 The `size` parameter is used to store sentinel values that indicate
1435 The `size` parameter is used to store sentinel values that indicate
1425 the file's previous state. In the future, we should refactor this
1436 the file's previous state. In the future, we should refactor this
1426 to be more explicit about what that state is.
1437 to be more explicit about what that state is.
1427 """
1438 """
1428 if oldstate not in b"?r" and r"_dirs" in self.__dict__:
1439 if oldstate not in b"?r" and r"_dirs" in self.__dict__:
1429 self._dirs.delpath(f)
1440 self._dirs.delpath(f)
1430 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1441 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1431 self._alldirs.addpath(f)
1442 self._alldirs.addpath(f)
1432 if r"filefoldmap" in self.__dict__:
1443 if r"filefoldmap" in self.__dict__:
1433 normed = util.normcase(f)
1444 normed = util.normcase(f)
1434 self.filefoldmap.pop(normed, None)
1445 self.filefoldmap.pop(normed, None)
1435 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1446 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1436 self.nonnormalset.add(f)
1447 self.nonnormalset.add(f)
1437
1448
1438 def dropfile(self, f, oldstate):
1449 def dropfile(self, f, oldstate):
1439 """
1450 """
1440 Remove a file from the dirstate. Returns True if the file was
1451 Remove a file from the dirstate. Returns True if the file was
1441 previously recorded.
1452 previously recorded.
1442 """
1453 """
1443 exists = self._map.pop(f, None) is not None
1454 exists = self._map.pop(f, None) is not None
1444 if exists:
1455 if exists:
1445 if oldstate != b"r" and r"_dirs" in self.__dict__:
1456 if oldstate != b"r" and r"_dirs" in self.__dict__:
1446 self._dirs.delpath(f)
1457 self._dirs.delpath(f)
1447 if r"_alldirs" in self.__dict__:
1458 if r"_alldirs" in self.__dict__:
1448 self._alldirs.delpath(f)
1459 self._alldirs.delpath(f)
1449 if r"filefoldmap" in self.__dict__:
1460 if r"filefoldmap" in self.__dict__:
1450 normed = util.normcase(f)
1461 normed = util.normcase(f)
1451 self.filefoldmap.pop(normed, None)
1462 self.filefoldmap.pop(normed, None)
1452 self.nonnormalset.discard(f)
1463 self.nonnormalset.discard(f)
1453 return exists
1464 return exists
1454
1465
1455 def clearambiguoustimes(self, files, now):
1466 def clearambiguoustimes(self, files, now):
1456 for f in files:
1467 for f in files:
1457 e = self.get(f)
1468 e = self.get(f)
1458 if e is not None and e[0] == b'n' and e[3] == now:
1469 if e is not None and e[0] == b'n' and e[3] == now:
1459 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1470 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1460 self.nonnormalset.add(f)
1471 self.nonnormalset.add(f)
1461
1472
1462 def nonnormalentries(self):
1473 def nonnormalentries(self):
1463 '''Compute the nonnormal dirstate entries from the dmap'''
1474 '''Compute the nonnormal dirstate entries from the dmap'''
1464 try:
1475 try:
1465 return parsers.nonnormalotherparententries(self._map)
1476 return parsers.nonnormalotherparententries(self._map)
1466 except AttributeError:
1477 except AttributeError:
1467 nonnorm = set()
1478 nonnorm = set()
1468 otherparent = set()
1479 otherparent = set()
1469 for fname, e in pycompat.iteritems(self._map):
1480 for fname, e in pycompat.iteritems(self._map):
1470 if e[0] != b'n' or e[3] == -1:
1481 if e[0] != b'n' or e[3] == -1:
1471 nonnorm.add(fname)
1482 nonnorm.add(fname)
1472 if e[0] == b'n' and e[2] == -2:
1483 if e[0] == b'n' and e[2] == -2:
1473 otherparent.add(fname)
1484 otherparent.add(fname)
1474 return nonnorm, otherparent
1485 return nonnorm, otherparent
1475
1486
1476 @propertycache
1487 @propertycache
1477 def filefoldmap(self):
1488 def filefoldmap(self):
1478 """Returns a dictionary mapping normalized case paths to their
1489 """Returns a dictionary mapping normalized case paths to their
1479 non-normalized versions.
1490 non-normalized versions.
1480 """
1491 """
1481 try:
1492 try:
1482 makefilefoldmap = parsers.make_file_foldmap
1493 makefilefoldmap = parsers.make_file_foldmap
1483 except AttributeError:
1494 except AttributeError:
1484 pass
1495 pass
1485 else:
1496 else:
1486 return makefilefoldmap(
1497 return makefilefoldmap(
1487 self._map, util.normcasespec, util.normcasefallback
1498 self._map, util.normcasespec, util.normcasefallback
1488 )
1499 )
1489
1500
1490 f = {}
1501 f = {}
1491 normcase = util.normcase
1502 normcase = util.normcase
1492 for name, s in pycompat.iteritems(self._map):
1503 for name, s in pycompat.iteritems(self._map):
1493 if s[0] != b'r':
1504 if s[0] != b'r':
1494 f[normcase(name)] = name
1505 f[normcase(name)] = name
1495 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1506 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1496 return f
1507 return f
1497
1508
1498 def hastrackeddir(self, d):
1509 def hastrackeddir(self, d):
1499 """
1510 """
1500 Returns True if the dirstate contains a tracked (not removed) file
1511 Returns True if the dirstate contains a tracked (not removed) file
1501 in this directory.
1512 in this directory.
1502 """
1513 """
1503 return d in self._dirs
1514 return d in self._dirs
1504
1515
1505 def hasdir(self, d):
1516 def hasdir(self, d):
1506 """
1517 """
1507 Returns True if the dirstate contains a file (tracked or removed)
1518 Returns True if the dirstate contains a file (tracked or removed)
1508 in this directory.
1519 in this directory.
1509 """
1520 """
1510 return d in self._alldirs
1521 return d in self._alldirs
1511
1522
1512 @propertycache
1523 @propertycache
1513 def _dirs(self):
1524 def _dirs(self):
1514 return util.dirs(self._map, b'r')
1525 return util.dirs(self._map, b'r')
1515
1526
1516 @propertycache
1527 @propertycache
1517 def _alldirs(self):
1528 def _alldirs(self):
1518 return util.dirs(self._map)
1529 return util.dirs(self._map)
1519
1530
1520 def _opendirstatefile(self):
1531 def _opendirstatefile(self):
1521 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1532 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1522 if self._pendingmode is not None and self._pendingmode != mode:
1533 if self._pendingmode is not None and self._pendingmode != mode:
1523 fp.close()
1534 fp.close()
1524 raise error.Abort(
1535 raise error.Abort(
1525 _(b'working directory state may be changed parallelly')
1536 _(b'working directory state may be changed parallelly')
1526 )
1537 )
1527 self._pendingmode = mode
1538 self._pendingmode = mode
1528 return fp
1539 return fp
1529
1540
1530 def parents(self):
1541 def parents(self):
1531 if not self._parents:
1542 if not self._parents:
1532 try:
1543 try:
1533 fp = self._opendirstatefile()
1544 fp = self._opendirstatefile()
1534 st = fp.read(40)
1545 st = fp.read(40)
1535 fp.close()
1546 fp.close()
1536 except IOError as err:
1547 except IOError as err:
1537 if err.errno != errno.ENOENT:
1548 if err.errno != errno.ENOENT:
1538 raise
1549 raise
1539 # File doesn't exist, so the current state is empty
1550 # File doesn't exist, so the current state is empty
1540 st = b''
1551 st = b''
1541
1552
1542 l = len(st)
1553 l = len(st)
1543 if l == 40:
1554 if l == 40:
1544 self._parents = (st[:20], st[20:40])
1555 self._parents = (st[:20], st[20:40])
1545 elif l == 0:
1556 elif l == 0:
1546 self._parents = (nullid, nullid)
1557 self._parents = (nullid, nullid)
1547 else:
1558 else:
1548 raise error.Abort(
1559 raise error.Abort(
1549 _(b'working directory state appears damaged!')
1560 _(b'working directory state appears damaged!')
1550 )
1561 )
1551
1562
1552 return self._parents
1563 return self._parents
1553
1564
1554 def setparents(self, p1, p2):
1565 def setparents(self, p1, p2):
1555 self._parents = (p1, p2)
1566 self._parents = (p1, p2)
1556 self._dirtyparents = True
1567 self._dirtyparents = True
1557
1568
1558 def read(self):
1569 def read(self):
1559 # ignore HG_PENDING because identity is used only for writing
1570 # ignore HG_PENDING because identity is used only for writing
1560 self.identity = util.filestat.frompath(
1571 self.identity = util.filestat.frompath(
1561 self._opener.join(self._filename)
1572 self._opener.join(self._filename)
1562 )
1573 )
1563
1574
1564 try:
1575 try:
1565 fp = self._opendirstatefile()
1576 fp = self._opendirstatefile()
1566 try:
1577 try:
1567 st = fp.read()
1578 st = fp.read()
1568 finally:
1579 finally:
1569 fp.close()
1580 fp.close()
1570 except IOError as err:
1581 except IOError as err:
1571 if err.errno != errno.ENOENT:
1582 if err.errno != errno.ENOENT:
1572 raise
1583 raise
1573 return
1584 return
1574 if not st:
1585 if not st:
1575 return
1586 return
1576
1587
1577 if util.safehasattr(parsers, b'dict_new_presized'):
1588 if util.safehasattr(parsers, b'dict_new_presized'):
1578 # Make an estimate of the number of files in the dirstate based on
1589 # Make an estimate of the number of files in the dirstate based on
1579 # its size. From a linear regression on a set of real-world repos,
1590 # its size. From a linear regression on a set of real-world repos,
1580 # all over 10,000 files, the size of a dirstate entry is 85
1591 # all over 10,000 files, the size of a dirstate entry is 85
1581 # bytes. The cost of resizing is significantly higher than the cost
1592 # bytes. The cost of resizing is significantly higher than the cost
1582 # of filling in a larger presized dict, so subtract 20% from the
1593 # of filling in a larger presized dict, so subtract 20% from the
1583 # size.
1594 # size.
1584 #
1595 #
1585 # This heuristic is imperfect in many ways, so in a future dirstate
1596 # This heuristic is imperfect in many ways, so in a future dirstate
1586 # format update it makes sense to just record the number of entries
1597 # format update it makes sense to just record the number of entries
1587 # on write.
1598 # on write.
1588 self._map = parsers.dict_new_presized(len(st) // 71)
1599 self._map = parsers.dict_new_presized(len(st) // 71)
1589
1600
1590 # Python's garbage collector triggers a GC each time a certain number
1601 # Python's garbage collector triggers a GC each time a certain number
1591 # of container objects (the number being defined by
1602 # of container objects (the number being defined by
1592 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1603 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1593 # for each file in the dirstate. The C version then immediately marks
1604 # for each file in the dirstate. The C version then immediately marks
1594 # them as not to be tracked by the collector. However, this has no
1605 # them as not to be tracked by the collector. However, this has no
1595 # effect on when GCs are triggered, only on what objects the GC looks
1606 # effect on when GCs are triggered, only on what objects the GC looks
1596 # into. This means that O(number of files) GCs are unavoidable.
1607 # into. This means that O(number of files) GCs are unavoidable.
1597 # Depending on when in the process's lifetime the dirstate is parsed,
1608 # Depending on when in the process's lifetime the dirstate is parsed,
1598 # this can get very expensive. As a workaround, disable GC while
1609 # this can get very expensive. As a workaround, disable GC while
1599 # parsing the dirstate.
1610 # parsing the dirstate.
1600 #
1611 #
1601 # (we cannot decorate the function directly since it is in a C module)
1612 # (we cannot decorate the function directly since it is in a C module)
1602 parse_dirstate = util.nogc(parsers.parse_dirstate)
1613 parse_dirstate = util.nogc(parsers.parse_dirstate)
1603 p = parse_dirstate(self._map, self.copymap, st)
1614 p = parse_dirstate(self._map, self.copymap, st)
1604 if not self._dirtyparents:
1615 if not self._dirtyparents:
1605 self.setparents(*p)
1616 self.setparents(*p)
1606
1617
1607 # Avoid excess attribute lookups by fast pathing certain checks
1618 # Avoid excess attribute lookups by fast pathing certain checks
1608 self.__contains__ = self._map.__contains__
1619 self.__contains__ = self._map.__contains__
1609 self.__getitem__ = self._map.__getitem__
1620 self.__getitem__ = self._map.__getitem__
1610 self.get = self._map.get
1621 self.get = self._map.get
1611
1622
1612 def write(self, st, now):
1623 def write(self, st, now):
1613 st.write(
1624 st.write(
1614 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1625 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1615 )
1626 )
1616 st.close()
1627 st.close()
1617 self._dirtyparents = False
1628 self._dirtyparents = False
1618 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1629 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1619
1630
1620 @propertycache
1631 @propertycache
1621 def nonnormalset(self):
1632 def nonnormalset(self):
1622 nonnorm, otherparents = self.nonnormalentries()
1633 nonnorm, otherparents = self.nonnormalentries()
1623 self.otherparentset = otherparents
1634 self.otherparentset = otherparents
1624 return nonnorm
1635 return nonnorm
1625
1636
1626 @propertycache
1637 @propertycache
1627 def otherparentset(self):
1638 def otherparentset(self):
1628 nonnorm, otherparents = self.nonnormalentries()
1639 nonnorm, otherparents = self.nonnormalentries()
1629 self.nonnormalset = nonnorm
1640 self.nonnormalset = nonnorm
1630 return otherparents
1641 return otherparents
1631
1642
1632 @propertycache
1643 @propertycache
1633 def identity(self):
1644 def identity(self):
1634 self._map
1645 self._map
1635 return self.identity
1646 return self.identity
1636
1647
1637 @propertycache
1648 @propertycache
1638 def dirfoldmap(self):
1649 def dirfoldmap(self):
1639 f = {}
1650 f = {}
1640 normcase = util.normcase
1651 normcase = util.normcase
1641 for name in self._dirs:
1652 for name in self._dirs:
1642 f[normcase(name)] = name
1653 f[normcase(name)] = name
1643 return f
1654 return f
1644
1655
1645
1656
1646 if rustmod is not None:
1657 if rustmod is not None:
1647
1658
1648 class dirstatemap(object):
1659 class dirstatemap(object):
1649 def __init__(self, ui, opener, root):
1660 def __init__(self, ui, opener, root):
1650 self._ui = ui
1661 self._ui = ui
1651 self._opener = opener
1662 self._opener = opener
1652 self._root = root
1663 self._root = root
1653 self._filename = b'dirstate'
1664 self._filename = b'dirstate'
1654 self._parents = None
1665 self._parents = None
1655 self._dirtyparents = False
1666 self._dirtyparents = False
1656
1667
1657 # for consistent view between _pl() and _read() invocations
1668 # for consistent view between _pl() and _read() invocations
1658 self._pendingmode = None
1669 self._pendingmode = None
1659
1670
1660 def addfile(self, *args, **kwargs):
1671 def addfile(self, *args, **kwargs):
1661 return self._rustmap.addfile(*args, **kwargs)
1672 return self._rustmap.addfile(*args, **kwargs)
1662
1673
1663 def removefile(self, *args, **kwargs):
1674 def removefile(self, *args, **kwargs):
1664 return self._rustmap.removefile(*args, **kwargs)
1675 return self._rustmap.removefile(*args, **kwargs)
1665
1676
1666 def dropfile(self, *args, **kwargs):
1677 def dropfile(self, *args, **kwargs):
1667 return self._rustmap.dropfile(*args, **kwargs)
1678 return self._rustmap.dropfile(*args, **kwargs)
1668
1679
1669 def clearambiguoustimes(self, *args, **kwargs):
1680 def clearambiguoustimes(self, *args, **kwargs):
1670 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1681 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1671
1682
1672 def nonnormalentries(self):
1683 def nonnormalentries(self):
1673 return self._rustmap.nonnormalentries()
1684 return self._rustmap.nonnormalentries()
1674
1685
1675 def get(self, *args, **kwargs):
1686 def get(self, *args, **kwargs):
1676 return self._rustmap.get(*args, **kwargs)
1687 return self._rustmap.get(*args, **kwargs)
1677
1688
1678 @propertycache
1689 @propertycache
1679 def _rustmap(self):
1690 def _rustmap(self):
1680 self._rustmap = rustmod.DirstateMap(self._root)
1691 self._rustmap = rustmod.DirstateMap(self._root)
1681 self.read()
1692 self.read()
1682 return self._rustmap
1693 return self._rustmap
1683
1694
1684 @property
1695 @property
1685 def copymap(self):
1696 def copymap(self):
1686 return self._rustmap.copymap()
1697 return self._rustmap.copymap()
1687
1698
1688 def preload(self):
1699 def preload(self):
1689 self._rustmap
1700 self._rustmap
1690
1701
1691 def clear(self):
1702 def clear(self):
1692 self._rustmap.clear()
1703 self._rustmap.clear()
1693 self.setparents(nullid, nullid)
1704 self.setparents(nullid, nullid)
1694 util.clearcachedproperty(self, b"_dirs")
1705 util.clearcachedproperty(self, b"_dirs")
1695 util.clearcachedproperty(self, b"_alldirs")
1706 util.clearcachedproperty(self, b"_alldirs")
1696 util.clearcachedproperty(self, b"dirfoldmap")
1707 util.clearcachedproperty(self, b"dirfoldmap")
1697
1708
1698 def items(self):
1709 def items(self):
1699 return self._rustmap.items()
1710 return self._rustmap.items()
1700
1711
1701 def keys(self):
1712 def keys(self):
1702 return iter(self._rustmap)
1713 return iter(self._rustmap)
1703
1714
1704 def __contains__(self, key):
1715 def __contains__(self, key):
1705 return key in self._rustmap
1716 return key in self._rustmap
1706
1717
1707 def __getitem__(self, item):
1718 def __getitem__(self, item):
1708 return self._rustmap[item]
1719 return self._rustmap[item]
1709
1720
1710 def __len__(self):
1721 def __len__(self):
1711 return len(self._rustmap)
1722 return len(self._rustmap)
1712
1723
1713 def __iter__(self):
1724 def __iter__(self):
1714 return iter(self._rustmap)
1725 return iter(self._rustmap)
1715
1726
1716 # forward for python2,3 compat
1727 # forward for python2,3 compat
1717 iteritems = items
1728 iteritems = items
1718
1729
1719 def _opendirstatefile(self):
1730 def _opendirstatefile(self):
1720 fp, mode = txnutil.trypending(
1731 fp, mode = txnutil.trypending(
1721 self._root, self._opener, self._filename
1732 self._root, self._opener, self._filename
1722 )
1733 )
1723 if self._pendingmode is not None and self._pendingmode != mode:
1734 if self._pendingmode is not None and self._pendingmode != mode:
1724 fp.close()
1735 fp.close()
1725 raise error.Abort(
1736 raise error.Abort(
1726 _(b'working directory state may be changed parallelly')
1737 _(b'working directory state may be changed parallelly')
1727 )
1738 )
1728 self._pendingmode = mode
1739 self._pendingmode = mode
1729 return fp
1740 return fp
1730
1741
1731 def setparents(self, p1, p2):
1742 def setparents(self, p1, p2):
1732 self._rustmap.setparents(p1, p2)
1743 self._rustmap.setparents(p1, p2)
1733 self._parents = (p1, p2)
1744 self._parents = (p1, p2)
1734 self._dirtyparents = True
1745 self._dirtyparents = True
1735
1746
1736 def parents(self):
1747 def parents(self):
1737 if not self._parents:
1748 if not self._parents:
1738 try:
1749 try:
1739 fp = self._opendirstatefile()
1750 fp = self._opendirstatefile()
1740 st = fp.read(40)
1751 st = fp.read(40)
1741 fp.close()
1752 fp.close()
1742 except IOError as err:
1753 except IOError as err:
1743 if err.errno != errno.ENOENT:
1754 if err.errno != errno.ENOENT:
1744 raise
1755 raise
1745 # File doesn't exist, so the current state is empty
1756 # File doesn't exist, so the current state is empty
1746 st = b''
1757 st = b''
1747
1758
1748 try:
1759 try:
1749 self._parents = self._rustmap.parents(st)
1760 self._parents = self._rustmap.parents(st)
1750 except ValueError:
1761 except ValueError:
1751 raise error.Abort(
1762 raise error.Abort(
1752 _(b'working directory state appears damaged!')
1763 _(b'working directory state appears damaged!')
1753 )
1764 )
1754
1765
1755 return self._parents
1766 return self._parents
1756
1767
1757 def read(self):
1768 def read(self):
1758 # ignore HG_PENDING because identity is used only for writing
1769 # ignore HG_PENDING because identity is used only for writing
1759 self.identity = util.filestat.frompath(
1770 self.identity = util.filestat.frompath(
1760 self._opener.join(self._filename)
1771 self._opener.join(self._filename)
1761 )
1772 )
1762
1773
1763 try:
1774 try:
1764 fp = self._opendirstatefile()
1775 fp = self._opendirstatefile()
1765 try:
1776 try:
1766 st = fp.read()
1777 st = fp.read()
1767 finally:
1778 finally:
1768 fp.close()
1779 fp.close()
1769 except IOError as err:
1780 except IOError as err:
1770 if err.errno != errno.ENOENT:
1781 if err.errno != errno.ENOENT:
1771 raise
1782 raise
1772 return
1783 return
1773 if not st:
1784 if not st:
1774 return
1785 return
1775
1786
1776 parse_dirstate = util.nogc(self._rustmap.read)
1787 parse_dirstate = util.nogc(self._rustmap.read)
1777 parents = parse_dirstate(st)
1788 parents = parse_dirstate(st)
1778 if parents and not self._dirtyparents:
1789 if parents and not self._dirtyparents:
1779 self.setparents(*parents)
1790 self.setparents(*parents)
1780
1791
1781 self.__contains__ = self._rustmap.__contains__
1792 self.__contains__ = self._rustmap.__contains__
1782 self.__getitem__ = self._rustmap.__getitem__
1793 self.__getitem__ = self._rustmap.__getitem__
1783 self.get = self._rustmap.get
1794 self.get = self._rustmap.get
1784
1795
1785 def write(self, st, now):
1796 def write(self, st, now):
1786 parents = self.parents()
1797 parents = self.parents()
1787 st.write(self._rustmap.write(parents[0], parents[1], now))
1798 st.write(self._rustmap.write(parents[0], parents[1], now))
1788 st.close()
1799 st.close()
1789 self._dirtyparents = False
1800 self._dirtyparents = False
1790
1801
1791 @propertycache
1802 @propertycache
1792 def filefoldmap(self):
1803 def filefoldmap(self):
1793 """Returns a dictionary mapping normalized case paths to their
1804 """Returns a dictionary mapping normalized case paths to their
1794 non-normalized versions.
1805 non-normalized versions.
1795 """
1806 """
1796 return self._rustmap.filefoldmapasdict()
1807 return self._rustmap.filefoldmapasdict()
1797
1808
1798 def hastrackeddir(self, d):
1809 def hastrackeddir(self, d):
1799 self._dirs # Trigger Python's propertycache
1810 self._dirs # Trigger Python's propertycache
1800 return self._rustmap.hastrackeddir(d)
1811 return self._rustmap.hastrackeddir(d)
1801
1812
1802 def hasdir(self, d):
1813 def hasdir(self, d):
1803 self._dirs # Trigger Python's propertycache
1814 self._dirs # Trigger Python's propertycache
1804 return self._rustmap.hasdir(d)
1815 return self._rustmap.hasdir(d)
1805
1816
1806 @propertycache
1817 @propertycache
1807 def _dirs(self):
1818 def _dirs(self):
1808 return self._rustmap.getdirs()
1819 return self._rustmap.getdirs()
1809
1820
1810 @propertycache
1821 @propertycache
1811 def _alldirs(self):
1822 def _alldirs(self):
1812 return self._rustmap.getalldirs()
1823 return self._rustmap.getalldirs()
1813
1824
1814 @propertycache
1825 @propertycache
1815 def identity(self):
1826 def identity(self):
1816 self._rustmap
1827 self._rustmap
1817 return self.identity
1828 return self.identity
1818
1829
1819 @property
1830 @property
1820 def nonnormalset(self):
1831 def nonnormalset(self):
1821 nonnorm, otherparents = self._rustmap.nonnormalentries()
1832 nonnorm, otherparents = self._rustmap.nonnormalentries()
1822 return nonnorm
1833 return nonnorm
1823
1834
1824 @property
1835 @property
1825 def otherparentset(self):
1836 def otherparentset(self):
1826 nonnorm, otherparents = self._rustmap.nonnormalentries()
1837 nonnorm, otherparents = self._rustmap.nonnormalentries()
1827 return otherparents
1838 return otherparents
1828
1839
1829 @propertycache
1840 @propertycache
1830 def dirfoldmap(self):
1841 def dirfoldmap(self):
1831 f = {}
1842 f = {}
1832 normcase = util.normcase
1843 normcase = util.normcase
1833 for name in self._dirs:
1844 for name in self._dirs:
1834 f[normcase(name)] = name
1845 f[normcase(name)] = name
1835 return f
1846 return f
General Comments 0
You need to be logged in to leave comments. Login now