##// END OF EJS Templates
dirstate: stop caring about match.explicitdir...
Martin von Zweigbergk -
r44113:deacffd2 default
parent child Browse files
Show More
@@ -1,1848 +1,1843 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 txnutil,
30 txnutil,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43 filecache = scmutil.filecache
43 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
44 _rangemask = 0x7FFFFFFF
45
45
46 dirstatetuple = parsers.dirstatetuple
46 dirstatetuple = parsers.dirstatetuple
47
47
48
48
49 class repocache(filecache):
49 class repocache(filecache):
50 """filecache for files in .hg/"""
50 """filecache for files in .hg/"""
51
51
52 def join(self, obj, fname):
52 def join(self, obj, fname):
53 return obj._opener.join(fname)
53 return obj._opener.join(fname)
54
54
55
55
56 class rootcache(filecache):
56 class rootcache(filecache):
57 """filecache for files in the repository root"""
57 """filecache for files in the repository root"""
58
58
59 def join(self, obj, fname):
59 def join(self, obj, fname):
60 return obj._join(fname)
60 return obj._join(fname)
61
61
62
62
63 def _getfsnow(vfs):
63 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
64 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
65 tmpfd, tmpname = vfs.mkstemp()
66 try:
66 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
68 finally:
69 os.close(tmpfd)
69 os.close(tmpfd)
70 vfs.unlink(tmpname)
70 vfs.unlink(tmpname)
71
71
72
72
73 @interfaceutil.implementer(intdirstate.idirstate)
73 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
74 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
76 '''Create a new dirstate object.
77
77
78 opener is an open()-like callable that can be used to open the
78 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
79 dirstate file; root is the root of the directory tracked by
80 the dirstate.
80 the dirstate.
81 '''
81 '''
82 self._opener = opener
82 self._opener = opener
83 self._validate = validate
83 self._validate = validate
84 self._root = root
84 self._root = root
85 self._sparsematchfn = sparsematchfn
85 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
87 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
88 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
89 self._dirty = False
90 self._lastnormaltime = 0
90 self._lastnormaltime = 0
91 self._ui = ui
91 self._ui = ui
92 self._filecache = {}
92 self._filecache = {}
93 self._parentwriters = 0
93 self._parentwriters = 0
94 self._filename = b'dirstate'
94 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
96 self._plchangecallbacks = {}
97 self._origpl = None
97 self._origpl = None
98 self._updatedfiles = set()
98 self._updatedfiles = set()
99 self._mapcls = dirstatemap
99 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
100 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
101 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
102 # raises an exception).
103 self._cwd
103 self._cwd
104
104
105 @contextlib.contextmanager
105 @contextlib.contextmanager
106 def parentchange(self):
106 def parentchange(self):
107 '''Context manager for handling dirstate parents.
107 '''Context manager for handling dirstate parents.
108
108
109 If an exception occurs in the scope of the context manager,
109 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
110 the incoherent dirstate won't be written when wlock is
111 released.
111 released.
112 '''
112 '''
113 self._parentwriters += 1
113 self._parentwriters += 1
114 yield
114 yield
115 # Typically we want the "undo" step of a context manager in a
115 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
116 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
117 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
118 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
119 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache(b'branch')
147 @repocache(b'branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read(b"branch").strip() or b"default"
150 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return b"default"
154 return b"default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def hasdir(self, d):
160 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
161 return self._map.hastrackeddir(d)
162
162
163 @rootcache(b'.hgignore')
163 @rootcache(b'.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never()
167 return matchmod.never()
168
168
169 pats = [b'include:%s' % f for f in files]
169 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
186 return not util.fscasesensitive(self._join(b'.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195
195
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return b'l'
200 return b'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return b'x'
202 return b'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return b''
205 return b''
206
206
207 return f
207 return f
208
208
209 fallback = buildfallback()
209 fallback = buildfallback()
210 if self._checklink:
210 if self._checklink:
211
211
212 def f(x):
212 def f(x):
213 if os.path.islink(self._join(x)):
213 if os.path.islink(self._join(x)):
214 return b'l'
214 return b'l'
215 if b'x' in fallback(x):
215 if b'x' in fallback(x):
216 return b'x'
216 return b'x'
217 return b''
217 return b''
218
218
219 return f
219 return f
220 if self._checkexec:
220 if self._checkexec:
221
221
222 def f(x):
222 def f(x):
223 if b'l' in fallback(x):
223 if b'l' in fallback(x):
224 return b'l'
224 return b'l'
225 if util.isexec(self._join(x)):
225 if util.isexec(self._join(x)):
226 return b'x'
226 return b'x'
227 return b''
227 return b''
228
228
229 return f
229 return f
230 else:
230 else:
231 return fallback
231 return fallback
232
232
233 @propertycache
233 @propertycache
234 def _cwd(self):
234 def _cwd(self):
235 # internal config: ui.forcecwd
235 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
237 if forcecwd:
238 return forcecwd
238 return forcecwd
239 return encoding.getcwd()
239 return encoding.getcwd()
240
240
241 def getcwd(self):
241 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
242 '''Return the path from which a canonical path is calculated.
243
243
244 This path should be used to resolve file patterns or to convert
244 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
245 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
246 used to get real file paths. Use vfs functions instead.
247 '''
247 '''
248 cwd = self._cwd
248 cwd = self._cwd
249 if cwd == self._root:
249 if cwd == self._root:
250 return b''
250 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
252 rootsep = self._root
253 if not util.endswithsep(rootsep):
253 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
254 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
255 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
256 return cwd[len(rootsep) :]
257 else:
257 else:
258 # we're outside the repo. return an absolute path.
258 # we're outside the repo. return an absolute path.
259 return cwd
259 return cwd
260
260
261 def pathto(self, f, cwd=None):
261 def pathto(self, f, cwd=None):
262 if cwd is None:
262 if cwd is None:
263 cwd = self.getcwd()
263 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
264 path = util.pathto(self._root, cwd, f)
265 if self._slash:
265 if self._slash:
266 return util.pconvert(path)
266 return util.pconvert(path)
267 return path
267 return path
268
268
269 def __getitem__(self, key):
269 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
270 '''Return the current state of key (a filename) in the dirstate.
271
271
272 States are:
272 States are:
273 n normal
273 n normal
274 m needs merging
274 m needs merging
275 r marked for removal
275 r marked for removal
276 a marked for addition
276 a marked for addition
277 ? not tracked
277 ? not tracked
278 '''
278 '''
279 return self._map.get(key, (b"?",))[0]
279 return self._map.get(key, (b"?",))[0]
280
280
281 def __contains__(self, key):
281 def __contains__(self, key):
282 return key in self._map
282 return key in self._map
283
283
284 def __iter__(self):
284 def __iter__(self):
285 return iter(sorted(self._map))
285 return iter(sorted(self._map))
286
286
287 def items(self):
287 def items(self):
288 return pycompat.iteritems(self._map)
288 return pycompat.iteritems(self._map)
289
289
290 iteritems = items
290 iteritems = items
291
291
292 def parents(self):
292 def parents(self):
293 return [self._validate(p) for p in self._pl]
293 return [self._validate(p) for p in self._pl]
294
294
295 def p1(self):
295 def p1(self):
296 return self._validate(self._pl[0])
296 return self._validate(self._pl[0])
297
297
298 def p2(self):
298 def p2(self):
299 return self._validate(self._pl[1])
299 return self._validate(self._pl[1])
300
300
301 def branch(self):
301 def branch(self):
302 return encoding.tolocal(self._branch)
302 return encoding.tolocal(self._branch)
303
303
304 def setparents(self, p1, p2=nullid):
304 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
305 """Set dirstate parents to p1 and p2.
306
306
307 When moving from two parents to one, 'm' merged entries a
307 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
308 adjusted to normal and previous copy records discarded and
309 returned by the call.
309 returned by the call.
310
310
311 See localrepo.setparents()
311 See localrepo.setparents()
312 """
312 """
313 if self._parentwriters == 0:
313 if self._parentwriters == 0:
314 raise ValueError(
314 raise ValueError(
315 b"cannot set dirstate parent outside of "
315 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
316 b"dirstate.parentchange context manager"
317 )
317 )
318
318
319 self._dirty = True
319 self._dirty = True
320 oldp2 = self._pl[1]
320 oldp2 = self._pl[1]
321 if self._origpl is None:
321 if self._origpl is None:
322 self._origpl = self._pl
322 self._origpl = self._pl
323 self._map.setparents(p1, p2)
323 self._map.setparents(p1, p2)
324 copies = {}
324 copies = {}
325 if oldp2 != nullid and p2 == nullid:
325 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
326 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
327 self._map.otherparentset
328 )
328 )
329 for f in candidatefiles:
329 for f in candidatefiles:
330 s = self._map.get(f)
330 s = self._map.get(f)
331 if s is None:
331 if s is None:
332 continue
332 continue
333
333
334 # Discard 'm' markers when moving away from a merge state
334 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
335 if s[0] == b'm':
336 source = self._map.copymap.get(f)
336 source = self._map.copymap.get(f)
337 if source:
337 if source:
338 copies[f] = source
338 copies[f] = source
339 self.normallookup(f)
339 self.normallookup(f)
340 # Also fix up otherparent markers
340 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
341 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
342 source = self._map.copymap.get(f)
343 if source:
343 if source:
344 copies[f] = source
344 copies[f] = source
345 self.add(f)
345 self.add(f)
346 return copies
346 return copies
347
347
348 def setbranch(self, branch):
348 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
351 try:
352 f.write(self._branch + b'\n')
352 f.write(self._branch + b'\n')
353 f.close()
353 f.close()
354
354
355 # make sure filecache has the correct stat info for _branch after
355 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
356 # replacing the underlying file
357 ce = self._filecache[b'_branch']
357 ce = self._filecache[b'_branch']
358 if ce:
358 if ce:
359 ce.refresh()
359 ce.refresh()
360 except: # re-raises
360 except: # re-raises
361 f.discard()
361 f.discard()
362 raise
362 raise
363
363
364 def invalidate(self):
364 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
365 '''Causes the next access to reread the dirstate.
366
366
367 This is different from localrepo.invalidatedirstate() because it always
367 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
369 check whether the dirstate has changed before rereading it.'''
370
370
371 for a in ("_map", "_branch", "_ignore"):
371 for a in ("_map", "_branch", "_ignore"):
372 if a in self.__dict__:
372 if a in self.__dict__:
373 delattr(self, a)
373 delattr(self, a)
374 self._lastnormaltime = 0
374 self._lastnormaltime = 0
375 self._dirty = False
375 self._dirty = False
376 self._updatedfiles.clear()
376 self._updatedfiles.clear()
377 self._parentwriters = 0
377 self._parentwriters = 0
378 self._origpl = None
378 self._origpl = None
379
379
380 def copy(self, source, dest):
380 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
382 if source == dest:
383 return
383 return
384 self._dirty = True
384 self._dirty = True
385 if source is not None:
385 if source is not None:
386 self._map.copymap[dest] = source
386 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
388 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
389 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
390 self._updatedfiles.add(dest)
391
391
392 def copied(self, file):
392 def copied(self, file):
393 return self._map.copymap.get(file, None)
393 return self._map.copymap.get(file, None)
394
394
395 def copies(self):
395 def copies(self):
396 return self._map.copymap
396 return self._map.copymap
397
397
398 def _addpath(self, f, state, mode, size, mtime):
398 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
399 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
400 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
401 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
402 if self._map.hastrackeddir(f):
403 raise error.Abort(
403 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
405 )
406 # shadows
406 # shadows
407 for d in pathutil.finddirs(f):
407 for d in pathutil.finddirs(f):
408 if self._map.hastrackeddir(d):
408 if self._map.hastrackeddir(d):
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
411 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
412 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
413 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
415 )
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
419
420 def normal(self, f, parentfiledata=None):
420 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
421 '''Mark a file normal and clean.
422
422
423 parentfiledata: (mode, size, mtime) of the clean file
423 parentfiledata: (mode, size, mtime) of the clean file
424
424
425 parentfiledata should be computed from memory (for mode,
425 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
426 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
427 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
428 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
429 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
430 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
431 (mode, size, mtime) = parentfiledata
432 else:
432 else:
433 s = os.lstat(self._join(f))
433 s = os.lstat(self._join(f))
434 mode = s.st_mode
434 mode = s.st_mode
435 size = s.st_size
435 size = s.st_size
436 mtime = s[stat.ST_MTIME]
436 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
438 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
439 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
440 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
441 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
442 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
443 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
444 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
445 self._lastnormaltime = mtime
446
446
447 def normallookup(self, f):
447 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
448 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
450 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
452 # being removed, restore that state.
453 entry = self._map.get(f)
453 entry = self._map.get(f)
454 if entry is not None:
454 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
456 source = self._map.copymap.get(f)
457 if entry[2] == -1:
457 if entry[2] == -1:
458 self.merge(f)
458 self.merge(f)
459 elif entry[2] == -2:
459 elif entry[2] == -2:
460 self.otherparent(f)
460 self.otherparent(f)
461 if source:
461 if source:
462 self.copy(source, f)
462 self.copy(source, f)
463 return
463 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
465 return
466 self._addpath(f, b'n', 0, -1, -1)
466 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
467 self._map.copymap.pop(f, None)
468
468
469 def otherparent(self, f):
469 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
470 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
471 if self._pl[1] == nullid:
472 raise error.Abort(
472 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
473 _(b"setting %r to other parent only allowed in merges") % f
474 )
474 )
475 if f in self and self[f] == b'n':
475 if f in self and self[f] == b'n':
476 # merge-like
476 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
477 self._addpath(f, b'm', 0, -2, -1)
478 else:
478 else:
479 # add-like
479 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
480 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
481 self._map.copymap.pop(f, None)
482
482
483 def add(self, f):
483 def add(self, f):
484 '''Mark a file added.'''
484 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
485 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
486 self._map.copymap.pop(f, None)
487
487
488 def remove(self, f):
488 def remove(self, f):
489 '''Mark a file removed.'''
489 '''Mark a file removed.'''
490 self._dirty = True
490 self._dirty = True
491 oldstate = self[f]
491 oldstate = self[f]
492 size = 0
492 size = 0
493 if self._pl[1] != nullid:
493 if self._pl[1] != nullid:
494 entry = self._map.get(f)
494 entry = self._map.get(f)
495 if entry is not None:
495 if entry is not None:
496 # backup the previous state
496 # backup the previous state
497 if entry[0] == b'm': # merge
497 if entry[0] == b'm': # merge
498 size = -1
498 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
500 size = -2
501 self._map.otherparentset.add(f)
501 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
502 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
503 self._map.removefile(f, oldstate, size)
504 if size == 0:
504 if size == 0:
505 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
506
506
507 def merge(self, f):
507 def merge(self, f):
508 '''Mark a file merged.'''
508 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
509 if self._pl[1] == nullid:
510 return self.normallookup(f)
510 return self.normallookup(f)
511 return self.otherparent(f)
511 return self.otherparent(f)
512
512
513 def drop(self, f):
513 def drop(self, f):
514 '''Drop a file from the dirstate'''
514 '''Drop a file from the dirstate'''
515 oldstate = self[f]
515 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
516 if self._map.dropfile(f, oldstate):
517 self._dirty = True
517 self._dirty = True
518 self._updatedfiles.add(f)
518 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
526 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
527 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
529 folded = d + b"/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if b'/' in normed:
536 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
537 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
539 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
540 folded = d + b"/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(
554 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
556 )
557 return folded
557 return folded
558
558
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
560 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
561 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
562 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
563 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
564 if folded is None:
565 if isknown:
565 if isknown:
566 folded = path
566 folded = path
567 else:
567 else:
568 # store discovered result in dirfoldmap so that future
568 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
569 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
570 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
572 )
573 return folded
573 return folded
574
574
575 def normalize(self, path, isknown=False, ignoremissing=False):
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
576 '''
577 normalize the case of a pathname when on a casefolding filesystem
577 normalize the case of a pathname when on a casefolding filesystem
578
578
579 isknown specifies whether the filename came from walking the
579 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
580 disk, to avoid extra filesystem access.
581
581
582 If ignoremissing is True, missing path are returned
582 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
583 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
584 existing path components.
585
585
586 The normalized case is determined based on the following precedence:
586 The normalized case is determined based on the following precedence:
587
587
588 - version of name already stored in the dirstate
588 - version of name already stored in the dirstate
589 - version of name stored on disk
589 - version of name stored on disk
590 - version provided via command arguments
590 - version provided via command arguments
591 '''
591 '''
592
592
593 if self._checkcase:
593 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
594 return self._normalize(path, isknown, ignoremissing)
595 return path
595 return path
596
596
597 def clear(self):
597 def clear(self):
598 self._map.clear()
598 self._map.clear()
599 self._lastnormaltime = 0
599 self._lastnormaltime = 0
600 self._updatedfiles.clear()
600 self._updatedfiles.clear()
601 self._dirty = True
601 self._dirty = True
602
602
603 def rebuild(self, parent, allfiles, changedfiles=None):
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
604 if changedfiles is None:
605 # Rebuild entire dirstate
605 # Rebuild entire dirstate
606 changedfiles = allfiles
606 changedfiles = allfiles
607 lastnormaltime = self._lastnormaltime
607 lastnormaltime = self._lastnormaltime
608 self.clear()
608 self.clear()
609 self._lastnormaltime = lastnormaltime
609 self._lastnormaltime = lastnormaltime
610
610
611 if self._origpl is None:
611 if self._origpl is None:
612 self._origpl = self._pl
612 self._origpl = self._pl
613 self._map.setparents(parent, nullid)
613 self._map.setparents(parent, nullid)
614 for f in changedfiles:
614 for f in changedfiles:
615 if f in allfiles:
615 if f in allfiles:
616 self.normallookup(f)
616 self.normallookup(f)
617 else:
617 else:
618 self.drop(f)
618 self.drop(f)
619
619
620 self._dirty = True
620 self._dirty = True
621
621
622 def identity(self):
622 def identity(self):
623 '''Return identity of dirstate itself to detect changing in storage
623 '''Return identity of dirstate itself to detect changing in storage
624
624
625 If identity of previous dirstate is equal to this, writing
625 If identity of previous dirstate is equal to this, writing
626 changes based on the former dirstate out can keep consistency.
626 changes based on the former dirstate out can keep consistency.
627 '''
627 '''
628 return self._map.identity
628 return self._map.identity
629
629
630 def write(self, tr):
630 def write(self, tr):
631 if not self._dirty:
631 if not self._dirty:
632 return
632 return
633
633
634 filename = self._filename
634 filename = self._filename
635 if tr:
635 if tr:
636 # 'dirstate.write()' is not only for writing in-memory
636 # 'dirstate.write()' is not only for writing in-memory
637 # changes out, but also for dropping ambiguous timestamp.
637 # changes out, but also for dropping ambiguous timestamp.
638 # delayed writing re-raise "ambiguous timestamp issue".
638 # delayed writing re-raise "ambiguous timestamp issue".
639 # See also the wiki page below for detail:
639 # See also the wiki page below for detail:
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641
641
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 now = _getfsnow(self._opener)
643 now = _getfsnow(self._opener)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
645
645
646 # emulate that all 'dirstate.normal' results are written out
646 # emulate that all 'dirstate.normal' results are written out
647 self._lastnormaltime = 0
647 self._lastnormaltime = 0
648 self._updatedfiles.clear()
648 self._updatedfiles.clear()
649
649
650 # delay writing in-memory changes out
650 # delay writing in-memory changes out
651 tr.addfilegenerator(
651 tr.addfilegenerator(
652 b'dirstate',
652 b'dirstate',
653 (self._filename,),
653 (self._filename,),
654 self._writedirstate,
654 self._writedirstate,
655 location=b'plain',
655 location=b'plain',
656 )
656 )
657 return
657 return
658
658
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
660 self._writedirstate(st)
661
661
662 def addparentchangecallback(self, category, callback):
662 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
663 """add a callback to be called when the wd parents are changed
664
664
665 Callback will be called with the following arguments:
665 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
666 dirstate, (oldp1, oldp2), (newp1, newp2)
667
667
668 Category is a unique identifier to allow overwriting an old callback
668 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
669 with a newer callback.
670 """
670 """
671 self._plchangecallbacks[category] = callback
671 self._plchangecallbacks[category] = callback
672
672
673 def _writedirstate(self, st):
673 def _writedirstate(self, st):
674 # notify callbacks about parents change
674 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
675 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(
676 for c, callback in sorted(
677 pycompat.iteritems(self._plchangecallbacks)
677 pycompat.iteritems(self._plchangecallbacks)
678 ):
678 ):
679 callback(self, self._origpl, self._pl)
679 callback(self, self._origpl, self._pl)
680 self._origpl = None
680 self._origpl = None
681 # use the modification time of the newly created temporary file as the
681 # use the modification time of the newly created temporary file as the
682 # filesystem's notion of 'now'
682 # filesystem's notion of 'now'
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684
684
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # timestamp of each entries in dirstate, because of 'now > mtime'
686 # timestamp of each entries in dirstate, because of 'now > mtime'
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 if delaywrite > 0:
688 if delaywrite > 0:
689 # do we have any files to delay for?
689 # do we have any files to delay for?
690 for f, e in pycompat.iteritems(self._map):
690 for f, e in pycompat.iteritems(self._map):
691 if e[0] == b'n' and e[3] == now:
691 if e[0] == b'n' and e[3] == now:
692 import time # to avoid useless import
692 import time # to avoid useless import
693
693
694 # rather than sleep n seconds, sleep until the next
694 # rather than sleep n seconds, sleep until the next
695 # multiple of n seconds
695 # multiple of n seconds
696 clock = time.time()
696 clock = time.time()
697 start = int(clock) - (int(clock) % delaywrite)
697 start = int(clock) - (int(clock) % delaywrite)
698 end = start + delaywrite
698 end = start + delaywrite
699 time.sleep(end - clock)
699 time.sleep(end - clock)
700 now = end # trust our estimate that the end is near now
700 now = end # trust our estimate that the end is near now
701 break
701 break
702
702
703 self._map.write(st, now)
703 self._map.write(st, now)
704 self._lastnormaltime = 0
704 self._lastnormaltime = 0
705 self._dirty = False
705 self._dirty = False
706
706
707 def _dirignore(self, f):
707 def _dirignore(self, f):
708 if self._ignore(f):
708 if self._ignore(f):
709 return True
709 return True
710 for p in pathutil.finddirs(f):
710 for p in pathutil.finddirs(f):
711 if self._ignore(p):
711 if self._ignore(p):
712 return True
712 return True
713 return False
713 return False
714
714
715 def _ignorefiles(self):
715 def _ignorefiles(self):
716 files = []
716 files = []
717 if os.path.exists(self._join(b'.hgignore')):
717 if os.path.exists(self._join(b'.hgignore')):
718 files.append(self._join(b'.hgignore'))
718 files.append(self._join(b'.hgignore'))
719 for name, path in self._ui.configitems(b"ui"):
719 for name, path in self._ui.configitems(b"ui"):
720 if name == b'ignore' or name.startswith(b'ignore.'):
720 if name == b'ignore' or name.startswith(b'ignore.'):
721 # we need to use os.path.join here rather than self._join
721 # we need to use os.path.join here rather than self._join
722 # because path is arbitrary and user-specified
722 # because path is arbitrary and user-specified
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
723 files.append(os.path.join(self._rootdir, util.expandpath(path)))
724 return files
724 return files
725
725
726 def _ignorefileandline(self, f):
726 def _ignorefileandline(self, f):
727 files = collections.deque(self._ignorefiles())
727 files = collections.deque(self._ignorefiles())
728 visited = set()
728 visited = set()
729 while files:
729 while files:
730 i = files.popleft()
730 i = files.popleft()
731 patterns = matchmod.readpatternfile(
731 patterns = matchmod.readpatternfile(
732 i, self._ui.warn, sourceinfo=True
732 i, self._ui.warn, sourceinfo=True
733 )
733 )
734 for pattern, lineno, line in patterns:
734 for pattern, lineno, line in patterns:
735 kind, p = matchmod._patsplit(pattern, b'glob')
735 kind, p = matchmod._patsplit(pattern, b'glob')
736 if kind == b"subinclude":
736 if kind == b"subinclude":
737 if p not in visited:
737 if p not in visited:
738 files.append(p)
738 files.append(p)
739 continue
739 continue
740 m = matchmod.match(
740 m = matchmod.match(
741 self._root, b'', [], [pattern], warn=self._ui.warn
741 self._root, b'', [], [pattern], warn=self._ui.warn
742 )
742 )
743 if m(f):
743 if m(f):
744 return (i, lineno, line)
744 return (i, lineno, line)
745 visited.add(i)
745 visited.add(i)
746 return (None, -1, b"")
746 return (None, -1, b"")
747
747
748 def _walkexplicit(self, match, subrepos):
748 def _walkexplicit(self, match, subrepos):
749 '''Get stat data about the files explicitly specified by match.
749 '''Get stat data about the files explicitly specified by match.
750
750
751 Return a triple (results, dirsfound, dirsnotfound).
751 Return a triple (results, dirsfound, dirsnotfound).
752 - results is a mapping from filename to stat result. It also contains
752 - results is a mapping from filename to stat result. It also contains
753 listings mapping subrepos and .hg to None.
753 listings mapping subrepos and .hg to None.
754 - dirsfound is a list of files found to be directories.
754 - dirsfound is a list of files found to be directories.
755 - dirsnotfound is a list of files that the dirstate thinks are
755 - dirsnotfound is a list of files that the dirstate thinks are
756 directories and that were not found.'''
756 directories and that were not found.'''
757
757
758 def badtype(mode):
758 def badtype(mode):
759 kind = _(b'unknown')
759 kind = _(b'unknown')
760 if stat.S_ISCHR(mode):
760 if stat.S_ISCHR(mode):
761 kind = _(b'character device')
761 kind = _(b'character device')
762 elif stat.S_ISBLK(mode):
762 elif stat.S_ISBLK(mode):
763 kind = _(b'block device')
763 kind = _(b'block device')
764 elif stat.S_ISFIFO(mode):
764 elif stat.S_ISFIFO(mode):
765 kind = _(b'fifo')
765 kind = _(b'fifo')
766 elif stat.S_ISSOCK(mode):
766 elif stat.S_ISSOCK(mode):
767 kind = _(b'socket')
767 kind = _(b'socket')
768 elif stat.S_ISDIR(mode):
768 elif stat.S_ISDIR(mode):
769 kind = _(b'directory')
769 kind = _(b'directory')
770 return _(b'unsupported file type (type is %s)') % kind
770 return _(b'unsupported file type (type is %s)') % kind
771
771
772 matchedir = match.explicitdir
773 badfn = match.bad
772 badfn = match.bad
774 dmap = self._map
773 dmap = self._map
775 lstat = os.lstat
774 lstat = os.lstat
776 getkind = stat.S_IFMT
775 getkind = stat.S_IFMT
777 dirkind = stat.S_IFDIR
776 dirkind = stat.S_IFDIR
778 regkind = stat.S_IFREG
777 regkind = stat.S_IFREG
779 lnkkind = stat.S_IFLNK
778 lnkkind = stat.S_IFLNK
780 join = self._join
779 join = self._join
781 dirsfound = []
780 dirsfound = []
782 foundadd = dirsfound.append
781 foundadd = dirsfound.append
783 dirsnotfound = []
782 dirsnotfound = []
784 notfoundadd = dirsnotfound.append
783 notfoundadd = dirsnotfound.append
785
784
786 if not match.isexact() and self._checkcase:
785 if not match.isexact() and self._checkcase:
787 normalize = self._normalize
786 normalize = self._normalize
788 else:
787 else:
789 normalize = None
788 normalize = None
790
789
791 files = sorted(match.files())
790 files = sorted(match.files())
792 subrepos.sort()
791 subrepos.sort()
793 i, j = 0, 0
792 i, j = 0, 0
794 while i < len(files) and j < len(subrepos):
793 while i < len(files) and j < len(subrepos):
795 subpath = subrepos[j] + b"/"
794 subpath = subrepos[j] + b"/"
796 if files[i] < subpath:
795 if files[i] < subpath:
797 i += 1
796 i += 1
798 continue
797 continue
799 while i < len(files) and files[i].startswith(subpath):
798 while i < len(files) and files[i].startswith(subpath):
800 del files[i]
799 del files[i]
801 j += 1
800 j += 1
802
801
803 if not files or b'' in files:
802 if not files or b'' in files:
804 files = [b'']
803 files = [b'']
805 # constructing the foldmap is expensive, so don't do it for the
804 # constructing the foldmap is expensive, so don't do it for the
806 # common case where files is ['']
805 # common case where files is ['']
807 normalize = None
806 normalize = None
808 results = dict.fromkeys(subrepos)
807 results = dict.fromkeys(subrepos)
809 results[b'.hg'] = None
808 results[b'.hg'] = None
810
809
811 for ff in files:
810 for ff in files:
812 if normalize:
811 if normalize:
813 nf = normalize(ff, False, True)
812 nf = normalize(ff, False, True)
814 else:
813 else:
815 nf = ff
814 nf = ff
816 if nf in results:
815 if nf in results:
817 continue
816 continue
818
817
819 try:
818 try:
820 st = lstat(join(nf))
819 st = lstat(join(nf))
821 kind = getkind(st.st_mode)
820 kind = getkind(st.st_mode)
822 if kind == dirkind:
821 if kind == dirkind:
823 if nf in dmap:
822 if nf in dmap:
824 # file replaced by dir on disk but still in dirstate
823 # file replaced by dir on disk but still in dirstate
825 results[nf] = None
824 results[nf] = None
826 if matchedir:
827 matchedir(nf)
828 foundadd((nf, ff))
825 foundadd((nf, ff))
829 elif kind == regkind or kind == lnkkind:
826 elif kind == regkind or kind == lnkkind:
830 results[nf] = st
827 results[nf] = st
831 else:
828 else:
832 badfn(ff, badtype(kind))
829 badfn(ff, badtype(kind))
833 if nf in dmap:
830 if nf in dmap:
834 results[nf] = None
831 results[nf] = None
835 except OSError as inst: # nf not found on disk - it is dirstate only
832 except OSError as inst: # nf not found on disk - it is dirstate only
836 if nf in dmap: # does it exactly match a missing file?
833 if nf in dmap: # does it exactly match a missing file?
837 results[nf] = None
834 results[nf] = None
838 else: # does it match a missing directory?
835 else: # does it match a missing directory?
839 if self._map.hasdir(nf):
836 if self._map.hasdir(nf):
840 if matchedir:
841 matchedir(nf)
842 notfoundadd(nf)
837 notfoundadd(nf)
843 else:
838 else:
844 badfn(ff, encoding.strtolocal(inst.strerror))
839 badfn(ff, encoding.strtolocal(inst.strerror))
845
840
846 # match.files() may contain explicitly-specified paths that shouldn't
841 # match.files() may contain explicitly-specified paths that shouldn't
847 # be taken; drop them from the list of files found. dirsfound/notfound
842 # be taken; drop them from the list of files found. dirsfound/notfound
848 # aren't filtered here because they will be tested later.
843 # aren't filtered here because they will be tested later.
849 if match.anypats():
844 if match.anypats():
850 for f in list(results):
845 for f in list(results):
851 if f == b'.hg' or f in subrepos:
846 if f == b'.hg' or f in subrepos:
852 # keep sentinel to disable further out-of-repo walks
847 # keep sentinel to disable further out-of-repo walks
853 continue
848 continue
854 if not match(f):
849 if not match(f):
855 del results[f]
850 del results[f]
856
851
857 # Case insensitive filesystems cannot rely on lstat() failing to detect
852 # Case insensitive filesystems cannot rely on lstat() failing to detect
858 # a case-only rename. Prune the stat object for any file that does not
853 # a case-only rename. Prune the stat object for any file that does not
859 # match the case in the filesystem, if there are multiple files that
854 # match the case in the filesystem, if there are multiple files that
860 # normalize to the same path.
855 # normalize to the same path.
861 if match.isexact() and self._checkcase:
856 if match.isexact() and self._checkcase:
862 normed = {}
857 normed = {}
863
858
864 for f, st in pycompat.iteritems(results):
859 for f, st in pycompat.iteritems(results):
865 if st is None:
860 if st is None:
866 continue
861 continue
867
862
868 nc = util.normcase(f)
863 nc = util.normcase(f)
869 paths = normed.get(nc)
864 paths = normed.get(nc)
870
865
871 if paths is None:
866 if paths is None:
872 paths = set()
867 paths = set()
873 normed[nc] = paths
868 normed[nc] = paths
874
869
875 paths.add(f)
870 paths.add(f)
876
871
877 for norm, paths in pycompat.iteritems(normed):
872 for norm, paths in pycompat.iteritems(normed):
878 if len(paths) > 1:
873 if len(paths) > 1:
879 for path in paths:
874 for path in paths:
880 folded = self._discoverpath(
875 folded = self._discoverpath(
881 path, norm, True, None, self._map.dirfoldmap
876 path, norm, True, None, self._map.dirfoldmap
882 )
877 )
883 if path != folded:
878 if path != folded:
884 results[path] = None
879 results[path] = None
885
880
886 return results, dirsfound, dirsnotfound
881 return results, dirsfound, dirsnotfound
887
882
888 def walk(self, match, subrepos, unknown, ignored, full=True):
883 def walk(self, match, subrepos, unknown, ignored, full=True):
889 '''
884 '''
890 Walk recursively through the directory tree, finding all files
885 Walk recursively through the directory tree, finding all files
891 matched by match.
886 matched by match.
892
887
893 If full is False, maybe skip some known-clean files.
888 If full is False, maybe skip some known-clean files.
894
889
895 Return a dict mapping filename to stat-like object (either
890 Return a dict mapping filename to stat-like object (either
896 mercurial.osutil.stat instance or return value of os.stat()).
891 mercurial.osutil.stat instance or return value of os.stat()).
897
892
898 '''
893 '''
899 # full is a flag that extensions that hook into walk can use -- this
894 # full is a flag that extensions that hook into walk can use -- this
900 # implementation doesn't use it at all. This satisfies the contract
895 # implementation doesn't use it at all. This satisfies the contract
901 # because we only guarantee a "maybe".
896 # because we only guarantee a "maybe".
902
897
903 if ignored:
898 if ignored:
904 ignore = util.never
899 ignore = util.never
905 dirignore = util.never
900 dirignore = util.never
906 elif unknown:
901 elif unknown:
907 ignore = self._ignore
902 ignore = self._ignore
908 dirignore = self._dirignore
903 dirignore = self._dirignore
909 else:
904 else:
910 # if not unknown and not ignored, drop dir recursion and step 2
905 # if not unknown and not ignored, drop dir recursion and step 2
911 ignore = util.always
906 ignore = util.always
912 dirignore = util.always
907 dirignore = util.always
913
908
914 matchfn = match.matchfn
909 matchfn = match.matchfn
915 matchalways = match.always()
910 matchalways = match.always()
916 matchtdir = match.traversedir
911 matchtdir = match.traversedir
917 dmap = self._map
912 dmap = self._map
918 listdir = util.listdir
913 listdir = util.listdir
919 lstat = os.lstat
914 lstat = os.lstat
920 dirkind = stat.S_IFDIR
915 dirkind = stat.S_IFDIR
921 regkind = stat.S_IFREG
916 regkind = stat.S_IFREG
922 lnkkind = stat.S_IFLNK
917 lnkkind = stat.S_IFLNK
923 join = self._join
918 join = self._join
924
919
925 exact = skipstep3 = False
920 exact = skipstep3 = False
926 if match.isexact(): # match.exact
921 if match.isexact(): # match.exact
927 exact = True
922 exact = True
928 dirignore = util.always # skip step 2
923 dirignore = util.always # skip step 2
929 elif match.prefix(): # match.match, no patterns
924 elif match.prefix(): # match.match, no patterns
930 skipstep3 = True
925 skipstep3 = True
931
926
932 if not exact and self._checkcase:
927 if not exact and self._checkcase:
933 normalize = self._normalize
928 normalize = self._normalize
934 normalizefile = self._normalizefile
929 normalizefile = self._normalizefile
935 skipstep3 = False
930 skipstep3 = False
936 else:
931 else:
937 normalize = self._normalize
932 normalize = self._normalize
938 normalizefile = None
933 normalizefile = None
939
934
940 # step 1: find all explicit files
935 # step 1: find all explicit files
941 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
936 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
942 if matchtdir:
937 if matchtdir:
943 for d in work:
938 for d in work:
944 matchtdir(d[0])
939 matchtdir(d[0])
945 for d in dirsnotfound:
940 for d in dirsnotfound:
946 matchtdir(d)
941 matchtdir(d)
947
942
948 skipstep3 = skipstep3 and not (work or dirsnotfound)
943 skipstep3 = skipstep3 and not (work or dirsnotfound)
949 work = [d for d in work if not dirignore(d[0])]
944 work = [d for d in work if not dirignore(d[0])]
950
945
951 # step 2: visit subdirectories
946 # step 2: visit subdirectories
952 def traverse(work, alreadynormed):
947 def traverse(work, alreadynormed):
953 wadd = work.append
948 wadd = work.append
954 while work:
949 while work:
955 tracing.counter('dirstate.walk work', len(work))
950 tracing.counter('dirstate.walk work', len(work))
956 nd = work.pop()
951 nd = work.pop()
957 visitentries = match.visitchildrenset(nd)
952 visitentries = match.visitchildrenset(nd)
958 if not visitentries:
953 if not visitentries:
959 continue
954 continue
960 if visitentries == b'this' or visitentries == b'all':
955 if visitentries == b'this' or visitentries == b'all':
961 visitentries = None
956 visitentries = None
962 skip = None
957 skip = None
963 if nd != b'':
958 if nd != b'':
964 skip = b'.hg'
959 skip = b'.hg'
965 try:
960 try:
966 with tracing.log('dirstate.walk.traverse listdir %s', nd):
961 with tracing.log('dirstate.walk.traverse listdir %s', nd):
967 entries = listdir(join(nd), stat=True, skip=skip)
962 entries = listdir(join(nd), stat=True, skip=skip)
968 except OSError as inst:
963 except OSError as inst:
969 if inst.errno in (errno.EACCES, errno.ENOENT):
964 if inst.errno in (errno.EACCES, errno.ENOENT):
970 match.bad(
965 match.bad(
971 self.pathto(nd), encoding.strtolocal(inst.strerror)
966 self.pathto(nd), encoding.strtolocal(inst.strerror)
972 )
967 )
973 continue
968 continue
974 raise
969 raise
975 for f, kind, st in entries:
970 for f, kind, st in entries:
976 # Some matchers may return files in the visitentries set,
971 # Some matchers may return files in the visitentries set,
977 # instead of 'this', if the matcher explicitly mentions them
972 # instead of 'this', if the matcher explicitly mentions them
978 # and is not an exactmatcher. This is acceptable; we do not
973 # and is not an exactmatcher. This is acceptable; we do not
979 # make any hard assumptions about file-or-directory below
974 # make any hard assumptions about file-or-directory below
980 # based on the presence of `f` in visitentries. If
975 # based on the presence of `f` in visitentries. If
981 # visitchildrenset returned a set, we can always skip the
976 # visitchildrenset returned a set, we can always skip the
982 # entries *not* in the set it provided regardless of whether
977 # entries *not* in the set it provided regardless of whether
983 # they're actually a file or a directory.
978 # they're actually a file or a directory.
984 if visitentries and f not in visitentries:
979 if visitentries and f not in visitentries:
985 continue
980 continue
986 if normalizefile:
981 if normalizefile:
987 # even though f might be a directory, we're only
982 # even though f might be a directory, we're only
988 # interested in comparing it to files currently in the
983 # interested in comparing it to files currently in the
989 # dmap -- therefore normalizefile is enough
984 # dmap -- therefore normalizefile is enough
990 nf = normalizefile(
985 nf = normalizefile(
991 nd and (nd + b"/" + f) or f, True, True
986 nd and (nd + b"/" + f) or f, True, True
992 )
987 )
993 else:
988 else:
994 nf = nd and (nd + b"/" + f) or f
989 nf = nd and (nd + b"/" + f) or f
995 if nf not in results:
990 if nf not in results:
996 if kind == dirkind:
991 if kind == dirkind:
997 if not ignore(nf):
992 if not ignore(nf):
998 if matchtdir:
993 if matchtdir:
999 matchtdir(nf)
994 matchtdir(nf)
1000 wadd(nf)
995 wadd(nf)
1001 if nf in dmap and (matchalways or matchfn(nf)):
996 if nf in dmap and (matchalways or matchfn(nf)):
1002 results[nf] = None
997 results[nf] = None
1003 elif kind == regkind or kind == lnkkind:
998 elif kind == regkind or kind == lnkkind:
1004 if nf in dmap:
999 if nf in dmap:
1005 if matchalways or matchfn(nf):
1000 if matchalways or matchfn(nf):
1006 results[nf] = st
1001 results[nf] = st
1007 elif (matchalways or matchfn(nf)) and not ignore(
1002 elif (matchalways or matchfn(nf)) and not ignore(
1008 nf
1003 nf
1009 ):
1004 ):
1010 # unknown file -- normalize if necessary
1005 # unknown file -- normalize if necessary
1011 if not alreadynormed:
1006 if not alreadynormed:
1012 nf = normalize(nf, False, True)
1007 nf = normalize(nf, False, True)
1013 results[nf] = st
1008 results[nf] = st
1014 elif nf in dmap and (matchalways or matchfn(nf)):
1009 elif nf in dmap and (matchalways or matchfn(nf)):
1015 results[nf] = None
1010 results[nf] = None
1016
1011
1017 for nd, d in work:
1012 for nd, d in work:
1018 # alreadynormed means that processwork doesn't have to do any
1013 # alreadynormed means that processwork doesn't have to do any
1019 # expensive directory normalization
1014 # expensive directory normalization
1020 alreadynormed = not normalize or nd == d
1015 alreadynormed = not normalize or nd == d
1021 traverse([d], alreadynormed)
1016 traverse([d], alreadynormed)
1022
1017
1023 for s in subrepos:
1018 for s in subrepos:
1024 del results[s]
1019 del results[s]
1025 del results[b'.hg']
1020 del results[b'.hg']
1026
1021
1027 # step 3: visit remaining files from dmap
1022 # step 3: visit remaining files from dmap
1028 if not skipstep3 and not exact:
1023 if not skipstep3 and not exact:
1029 # If a dmap file is not in results yet, it was either
1024 # If a dmap file is not in results yet, it was either
1030 # a) not matching matchfn b) ignored, c) missing, or d) under a
1025 # a) not matching matchfn b) ignored, c) missing, or d) under a
1031 # symlink directory.
1026 # symlink directory.
1032 if not results and matchalways:
1027 if not results and matchalways:
1033 visit = [f for f in dmap]
1028 visit = [f for f in dmap]
1034 else:
1029 else:
1035 visit = [f for f in dmap if f not in results and matchfn(f)]
1030 visit = [f for f in dmap if f not in results and matchfn(f)]
1036 visit.sort()
1031 visit.sort()
1037
1032
1038 if unknown:
1033 if unknown:
1039 # unknown == True means we walked all dirs under the roots
1034 # unknown == True means we walked all dirs under the roots
1040 # that wasn't ignored, and everything that matched was stat'ed
1035 # that wasn't ignored, and everything that matched was stat'ed
1041 # and is already in results.
1036 # and is already in results.
1042 # The rest must thus be ignored or under a symlink.
1037 # The rest must thus be ignored or under a symlink.
1043 audit_path = pathutil.pathauditor(self._root, cached=True)
1038 audit_path = pathutil.pathauditor(self._root, cached=True)
1044
1039
1045 for nf in iter(visit):
1040 for nf in iter(visit):
1046 # If a stat for the same file was already added with a
1041 # If a stat for the same file was already added with a
1047 # different case, don't add one for this, since that would
1042 # different case, don't add one for this, since that would
1048 # make it appear as if the file exists under both names
1043 # make it appear as if the file exists under both names
1049 # on disk.
1044 # on disk.
1050 if (
1045 if (
1051 normalizefile
1046 normalizefile
1052 and normalizefile(nf, True, True) in results
1047 and normalizefile(nf, True, True) in results
1053 ):
1048 ):
1054 results[nf] = None
1049 results[nf] = None
1055 # Report ignored items in the dmap as long as they are not
1050 # Report ignored items in the dmap as long as they are not
1056 # under a symlink directory.
1051 # under a symlink directory.
1057 elif audit_path.check(nf):
1052 elif audit_path.check(nf):
1058 try:
1053 try:
1059 results[nf] = lstat(join(nf))
1054 results[nf] = lstat(join(nf))
1060 # file was just ignored, no links, and exists
1055 # file was just ignored, no links, and exists
1061 except OSError:
1056 except OSError:
1062 # file doesn't exist
1057 # file doesn't exist
1063 results[nf] = None
1058 results[nf] = None
1064 else:
1059 else:
1065 # It's either missing or under a symlink directory
1060 # It's either missing or under a symlink directory
1066 # which we in this case report as missing
1061 # which we in this case report as missing
1067 results[nf] = None
1062 results[nf] = None
1068 else:
1063 else:
1069 # We may not have walked the full directory tree above,
1064 # We may not have walked the full directory tree above,
1070 # so stat and check everything we missed.
1065 # so stat and check everything we missed.
1071 iv = iter(visit)
1066 iv = iter(visit)
1072 for st in util.statfiles([join(i) for i in visit]):
1067 for st in util.statfiles([join(i) for i in visit]):
1073 results[next(iv)] = st
1068 results[next(iv)] = st
1074 return results
1069 return results
1075
1070
1076 def status(self, match, subrepos, ignored, clean, unknown):
1071 def status(self, match, subrepos, ignored, clean, unknown):
1077 '''Determine the status of the working copy relative to the
1072 '''Determine the status of the working copy relative to the
1078 dirstate and return a pair of (unsure, status), where status is of type
1073 dirstate and return a pair of (unsure, status), where status is of type
1079 scmutil.status and:
1074 scmutil.status and:
1080
1075
1081 unsure:
1076 unsure:
1082 files that might have been modified since the dirstate was
1077 files that might have been modified since the dirstate was
1083 written, but need to be read to be sure (size is the same
1078 written, but need to be read to be sure (size is the same
1084 but mtime differs)
1079 but mtime differs)
1085 status.modified:
1080 status.modified:
1086 files that have definitely been modified since the dirstate
1081 files that have definitely been modified since the dirstate
1087 was written (different size or mode)
1082 was written (different size or mode)
1088 status.clean:
1083 status.clean:
1089 files that have definitely not been modified since the
1084 files that have definitely not been modified since the
1090 dirstate was written
1085 dirstate was written
1091 '''
1086 '''
1092 listignored, listclean, listunknown = ignored, clean, unknown
1087 listignored, listclean, listunknown = ignored, clean, unknown
1093 lookup, modified, added, unknown, ignored = [], [], [], [], []
1088 lookup, modified, added, unknown, ignored = [], [], [], [], []
1094 removed, deleted, clean = [], [], []
1089 removed, deleted, clean = [], [], []
1095
1090
1096 dmap = self._map
1091 dmap = self._map
1097 dmap.preload()
1092 dmap.preload()
1098
1093
1099 use_rust = True
1094 use_rust = True
1100 if rustmod is None:
1095 if rustmod is None:
1101 use_rust = False
1096 use_rust = False
1102 elif subrepos:
1097 elif subrepos:
1103 use_rust = False
1098 use_rust = False
1104 if bool(listunknown):
1099 if bool(listunknown):
1105 # Pathauditor does not exist yet in Rust, unknown files
1100 # Pathauditor does not exist yet in Rust, unknown files
1106 # can't be trusted.
1101 # can't be trusted.
1107 use_rust = False
1102 use_rust = False
1108 elif self._ignorefiles() and listignored:
1103 elif self._ignorefiles() and listignored:
1109 # Rust has no ignore mechanism yet, so don't use Rust for
1104 # Rust has no ignore mechanism yet, so don't use Rust for
1110 # commands that need ignore.
1105 # commands that need ignore.
1111 use_rust = False
1106 use_rust = False
1112 elif not match.always():
1107 elif not match.always():
1113 # Matchers have yet to be implemented
1108 # Matchers have yet to be implemented
1114 use_rust = False
1109 use_rust = False
1115
1110
1116 if use_rust:
1111 if use_rust:
1117 # Force Rayon (Rust parallelism library) to respect the number of
1112 # Force Rayon (Rust parallelism library) to respect the number of
1118 # workers. This is a temporary workaround until Rust code knows
1113 # workers. This is a temporary workaround until Rust code knows
1119 # how to read the config file.
1114 # how to read the config file.
1120 numcpus = self._ui.configint(b"worker", b"numcpus")
1115 numcpus = self._ui.configint(b"worker", b"numcpus")
1121 if numcpus is not None:
1116 if numcpus is not None:
1122 encoding.environ.setdefault(
1117 encoding.environ.setdefault(
1123 b'RAYON_NUM_THREADS', b'%d' % numcpus
1118 b'RAYON_NUM_THREADS', b'%d' % numcpus
1124 )
1119 )
1125
1120
1126 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1121 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1127 if not workers_enabled:
1122 if not workers_enabled:
1128 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1123 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1129
1124
1130 (
1125 (
1131 lookup,
1126 lookup,
1132 modified,
1127 modified,
1133 added,
1128 added,
1134 removed,
1129 removed,
1135 deleted,
1130 deleted,
1136 unknown,
1131 unknown,
1137 clean,
1132 clean,
1138 ) = rustmod.status(
1133 ) = rustmod.status(
1139 dmap._rustmap,
1134 dmap._rustmap,
1140 self._rootdir,
1135 self._rootdir,
1141 bool(listclean),
1136 bool(listclean),
1142 self._lastnormaltime,
1137 self._lastnormaltime,
1143 self._checkexec,
1138 self._checkexec,
1144 )
1139 )
1145
1140
1146 status = scmutil.status(
1141 status = scmutil.status(
1147 modified=modified,
1142 modified=modified,
1148 added=added,
1143 added=added,
1149 removed=removed,
1144 removed=removed,
1150 deleted=deleted,
1145 deleted=deleted,
1151 unknown=unknown,
1146 unknown=unknown,
1152 ignored=ignored,
1147 ignored=ignored,
1153 clean=clean,
1148 clean=clean,
1154 )
1149 )
1155 return (lookup, status)
1150 return (lookup, status)
1156
1151
1157 def noop(f):
1152 def noop(f):
1158 pass
1153 pass
1159
1154
1160 dcontains = dmap.__contains__
1155 dcontains = dmap.__contains__
1161 dget = dmap.__getitem__
1156 dget = dmap.__getitem__
1162 ladd = lookup.append # aka "unsure"
1157 ladd = lookup.append # aka "unsure"
1163 madd = modified.append
1158 madd = modified.append
1164 aadd = added.append
1159 aadd = added.append
1165 uadd = unknown.append if listunknown else noop
1160 uadd = unknown.append if listunknown else noop
1166 iadd = ignored.append if listignored else noop
1161 iadd = ignored.append if listignored else noop
1167 radd = removed.append
1162 radd = removed.append
1168 dadd = deleted.append
1163 dadd = deleted.append
1169 cadd = clean.append if listclean else noop
1164 cadd = clean.append if listclean else noop
1170 mexact = match.exact
1165 mexact = match.exact
1171 dirignore = self._dirignore
1166 dirignore = self._dirignore
1172 checkexec = self._checkexec
1167 checkexec = self._checkexec
1173 copymap = self._map.copymap
1168 copymap = self._map.copymap
1174 lastnormaltime = self._lastnormaltime
1169 lastnormaltime = self._lastnormaltime
1175
1170
1176 # We need to do full walks when either
1171 # We need to do full walks when either
1177 # - we're listing all clean files, or
1172 # - we're listing all clean files, or
1178 # - match.traversedir does something, because match.traversedir should
1173 # - match.traversedir does something, because match.traversedir should
1179 # be called for every dir in the working dir
1174 # be called for every dir in the working dir
1180 full = listclean or match.traversedir is not None
1175 full = listclean or match.traversedir is not None
1181 for fn, st in pycompat.iteritems(
1176 for fn, st in pycompat.iteritems(
1182 self.walk(match, subrepos, listunknown, listignored, full=full)
1177 self.walk(match, subrepos, listunknown, listignored, full=full)
1183 ):
1178 ):
1184 if not dcontains(fn):
1179 if not dcontains(fn):
1185 if (listignored or mexact(fn)) and dirignore(fn):
1180 if (listignored or mexact(fn)) and dirignore(fn):
1186 if listignored:
1181 if listignored:
1187 iadd(fn)
1182 iadd(fn)
1188 else:
1183 else:
1189 uadd(fn)
1184 uadd(fn)
1190 continue
1185 continue
1191
1186
1192 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1187 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1193 # written like that for performance reasons. dmap[fn] is not a
1188 # written like that for performance reasons. dmap[fn] is not a
1194 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1189 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1195 # opcode has fast paths when the value to be unpacked is a tuple or
1190 # opcode has fast paths when the value to be unpacked is a tuple or
1196 # a list, but falls back to creating a full-fledged iterator in
1191 # a list, but falls back to creating a full-fledged iterator in
1197 # general. That is much slower than simply accessing and storing the
1192 # general. That is much slower than simply accessing and storing the
1198 # tuple members one by one.
1193 # tuple members one by one.
1199 t = dget(fn)
1194 t = dget(fn)
1200 state = t[0]
1195 state = t[0]
1201 mode = t[1]
1196 mode = t[1]
1202 size = t[2]
1197 size = t[2]
1203 time = t[3]
1198 time = t[3]
1204
1199
1205 if not st and state in b"nma":
1200 if not st and state in b"nma":
1206 dadd(fn)
1201 dadd(fn)
1207 elif state == b'n':
1202 elif state == b'n':
1208 if (
1203 if (
1209 size >= 0
1204 size >= 0
1210 and (
1205 and (
1211 (size != st.st_size and size != st.st_size & _rangemask)
1206 (size != st.st_size and size != st.st_size & _rangemask)
1212 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1207 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1213 )
1208 )
1214 or size == -2 # other parent
1209 or size == -2 # other parent
1215 or fn in copymap
1210 or fn in copymap
1216 ):
1211 ):
1217 madd(fn)
1212 madd(fn)
1218 elif (
1213 elif (
1219 time != st[stat.ST_MTIME]
1214 time != st[stat.ST_MTIME]
1220 and time != st[stat.ST_MTIME] & _rangemask
1215 and time != st[stat.ST_MTIME] & _rangemask
1221 ):
1216 ):
1222 ladd(fn)
1217 ladd(fn)
1223 elif st[stat.ST_MTIME] == lastnormaltime:
1218 elif st[stat.ST_MTIME] == lastnormaltime:
1224 # fn may have just been marked as normal and it may have
1219 # fn may have just been marked as normal and it may have
1225 # changed in the same second without changing its size.
1220 # changed in the same second without changing its size.
1226 # This can happen if we quickly do multiple commits.
1221 # This can happen if we quickly do multiple commits.
1227 # Force lookup, so we don't miss such a racy file change.
1222 # Force lookup, so we don't miss such a racy file change.
1228 ladd(fn)
1223 ladd(fn)
1229 elif listclean:
1224 elif listclean:
1230 cadd(fn)
1225 cadd(fn)
1231 elif state == b'm':
1226 elif state == b'm':
1232 madd(fn)
1227 madd(fn)
1233 elif state == b'a':
1228 elif state == b'a':
1234 aadd(fn)
1229 aadd(fn)
1235 elif state == b'r':
1230 elif state == b'r':
1236 radd(fn)
1231 radd(fn)
1237
1232
1238 return (
1233 return (
1239 lookup,
1234 lookup,
1240 scmutil.status(
1235 scmutil.status(
1241 modified, added, removed, deleted, unknown, ignored, clean
1236 modified, added, removed, deleted, unknown, ignored, clean
1242 ),
1237 ),
1243 )
1238 )
1244
1239
1245 def matches(self, match):
1240 def matches(self, match):
1246 '''
1241 '''
1247 return files in the dirstate (in whatever state) filtered by match
1242 return files in the dirstate (in whatever state) filtered by match
1248 '''
1243 '''
1249 dmap = self._map
1244 dmap = self._map
1250 if match.always():
1245 if match.always():
1251 return dmap.keys()
1246 return dmap.keys()
1252 files = match.files()
1247 files = match.files()
1253 if match.isexact():
1248 if match.isexact():
1254 # fast path -- filter the other way around, since typically files is
1249 # fast path -- filter the other way around, since typically files is
1255 # much smaller than dmap
1250 # much smaller than dmap
1256 return [f for f in files if f in dmap]
1251 return [f for f in files if f in dmap]
1257 if match.prefix() and all(fn in dmap for fn in files):
1252 if match.prefix() and all(fn in dmap for fn in files):
1258 # fast path -- all the values are known to be files, so just return
1253 # fast path -- all the values are known to be files, so just return
1259 # that
1254 # that
1260 return list(files)
1255 return list(files)
1261 return [f for f in dmap if match(f)]
1256 return [f for f in dmap if match(f)]
1262
1257
1263 def _actualfilename(self, tr):
1258 def _actualfilename(self, tr):
1264 if tr:
1259 if tr:
1265 return self._pendingfilename
1260 return self._pendingfilename
1266 else:
1261 else:
1267 return self._filename
1262 return self._filename
1268
1263
1269 def savebackup(self, tr, backupname):
1264 def savebackup(self, tr, backupname):
1270 '''Save current dirstate into backup file'''
1265 '''Save current dirstate into backup file'''
1271 filename = self._actualfilename(tr)
1266 filename = self._actualfilename(tr)
1272 assert backupname != filename
1267 assert backupname != filename
1273
1268
1274 # use '_writedirstate' instead of 'write' to write changes certainly,
1269 # use '_writedirstate' instead of 'write' to write changes certainly,
1275 # because the latter omits writing out if transaction is running.
1270 # because the latter omits writing out if transaction is running.
1276 # output file will be used to create backup of dirstate at this point.
1271 # output file will be used to create backup of dirstate at this point.
1277 if self._dirty or not self._opener.exists(filename):
1272 if self._dirty or not self._opener.exists(filename):
1278 self._writedirstate(
1273 self._writedirstate(
1279 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1274 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1280 )
1275 )
1281
1276
1282 if tr:
1277 if tr:
1283 # ensure that subsequent tr.writepending returns True for
1278 # ensure that subsequent tr.writepending returns True for
1284 # changes written out above, even if dirstate is never
1279 # changes written out above, even if dirstate is never
1285 # changed after this
1280 # changed after this
1286 tr.addfilegenerator(
1281 tr.addfilegenerator(
1287 b'dirstate',
1282 b'dirstate',
1288 (self._filename,),
1283 (self._filename,),
1289 self._writedirstate,
1284 self._writedirstate,
1290 location=b'plain',
1285 location=b'plain',
1291 )
1286 )
1292
1287
1293 # ensure that pending file written above is unlinked at
1288 # ensure that pending file written above is unlinked at
1294 # failure, even if tr.writepending isn't invoked until the
1289 # failure, even if tr.writepending isn't invoked until the
1295 # end of this transaction
1290 # end of this transaction
1296 tr.registertmp(filename, location=b'plain')
1291 tr.registertmp(filename, location=b'plain')
1297
1292
1298 self._opener.tryunlink(backupname)
1293 self._opener.tryunlink(backupname)
1299 # hardlink backup is okay because _writedirstate is always called
1294 # hardlink backup is okay because _writedirstate is always called
1300 # with an "atomictemp=True" file.
1295 # with an "atomictemp=True" file.
1301 util.copyfile(
1296 util.copyfile(
1302 self._opener.join(filename),
1297 self._opener.join(filename),
1303 self._opener.join(backupname),
1298 self._opener.join(backupname),
1304 hardlink=True,
1299 hardlink=True,
1305 )
1300 )
1306
1301
1307 def restorebackup(self, tr, backupname):
1302 def restorebackup(self, tr, backupname):
1308 '''Restore dirstate by backup file'''
1303 '''Restore dirstate by backup file'''
1309 # this "invalidate()" prevents "wlock.release()" from writing
1304 # this "invalidate()" prevents "wlock.release()" from writing
1310 # changes of dirstate out after restoring from backup file
1305 # changes of dirstate out after restoring from backup file
1311 self.invalidate()
1306 self.invalidate()
1312 filename = self._actualfilename(tr)
1307 filename = self._actualfilename(tr)
1313 o = self._opener
1308 o = self._opener
1314 if util.samefile(o.join(backupname), o.join(filename)):
1309 if util.samefile(o.join(backupname), o.join(filename)):
1315 o.unlink(backupname)
1310 o.unlink(backupname)
1316 else:
1311 else:
1317 o.rename(backupname, filename, checkambig=True)
1312 o.rename(backupname, filename, checkambig=True)
1318
1313
1319 def clearbackup(self, tr, backupname):
1314 def clearbackup(self, tr, backupname):
1320 '''Clear backup file'''
1315 '''Clear backup file'''
1321 self._opener.unlink(backupname)
1316 self._opener.unlink(backupname)
1322
1317
1323
1318
1324 class dirstatemap(object):
1319 class dirstatemap(object):
1325 """Map encapsulating the dirstate's contents.
1320 """Map encapsulating the dirstate's contents.
1326
1321
1327 The dirstate contains the following state:
1322 The dirstate contains the following state:
1328
1323
1329 - `identity` is the identity of the dirstate file, which can be used to
1324 - `identity` is the identity of the dirstate file, which can be used to
1330 detect when changes have occurred to the dirstate file.
1325 detect when changes have occurred to the dirstate file.
1331
1326
1332 - `parents` is a pair containing the parents of the working copy. The
1327 - `parents` is a pair containing the parents of the working copy. The
1333 parents are updated by calling `setparents`.
1328 parents are updated by calling `setparents`.
1334
1329
1335 - the state map maps filenames to tuples of (state, mode, size, mtime),
1330 - the state map maps filenames to tuples of (state, mode, size, mtime),
1336 where state is a single character representing 'normal', 'added',
1331 where state is a single character representing 'normal', 'added',
1337 'removed', or 'merged'. It is read by treating the dirstate as a
1332 'removed', or 'merged'. It is read by treating the dirstate as a
1338 dict. File state is updated by calling the `addfile`, `removefile` and
1333 dict. File state is updated by calling the `addfile`, `removefile` and
1339 `dropfile` methods.
1334 `dropfile` methods.
1340
1335
1341 - `copymap` maps destination filenames to their source filename.
1336 - `copymap` maps destination filenames to their source filename.
1342
1337
1343 The dirstate also provides the following views onto the state:
1338 The dirstate also provides the following views onto the state:
1344
1339
1345 - `nonnormalset` is a set of the filenames that have state other
1340 - `nonnormalset` is a set of the filenames that have state other
1346 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1341 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1347
1342
1348 - `otherparentset` is a set of the filenames that are marked as coming
1343 - `otherparentset` is a set of the filenames that are marked as coming
1349 from the second parent when the dirstate is currently being merged.
1344 from the second parent when the dirstate is currently being merged.
1350
1345
1351 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1346 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1352 form that they appear as in the dirstate.
1347 form that they appear as in the dirstate.
1353
1348
1354 - `dirfoldmap` is a dict mapping normalized directory names to the
1349 - `dirfoldmap` is a dict mapping normalized directory names to the
1355 denormalized form that they appear as in the dirstate.
1350 denormalized form that they appear as in the dirstate.
1356 """
1351 """
1357
1352
1358 def __init__(self, ui, opener, root):
1353 def __init__(self, ui, opener, root):
1359 self._ui = ui
1354 self._ui = ui
1360 self._opener = opener
1355 self._opener = opener
1361 self._root = root
1356 self._root = root
1362 self._filename = b'dirstate'
1357 self._filename = b'dirstate'
1363
1358
1364 self._parents = None
1359 self._parents = None
1365 self._dirtyparents = False
1360 self._dirtyparents = False
1366
1361
1367 # for consistent view between _pl() and _read() invocations
1362 # for consistent view between _pl() and _read() invocations
1368 self._pendingmode = None
1363 self._pendingmode = None
1369
1364
1370 @propertycache
1365 @propertycache
1371 def _map(self):
1366 def _map(self):
1372 self._map = {}
1367 self._map = {}
1373 self.read()
1368 self.read()
1374 return self._map
1369 return self._map
1375
1370
1376 @propertycache
1371 @propertycache
1377 def copymap(self):
1372 def copymap(self):
1378 self.copymap = {}
1373 self.copymap = {}
1379 self._map
1374 self._map
1380 return self.copymap
1375 return self.copymap
1381
1376
1382 def clear(self):
1377 def clear(self):
1383 self._map.clear()
1378 self._map.clear()
1384 self.copymap.clear()
1379 self.copymap.clear()
1385 self.setparents(nullid, nullid)
1380 self.setparents(nullid, nullid)
1386 util.clearcachedproperty(self, b"_dirs")
1381 util.clearcachedproperty(self, b"_dirs")
1387 util.clearcachedproperty(self, b"_alldirs")
1382 util.clearcachedproperty(self, b"_alldirs")
1388 util.clearcachedproperty(self, b"filefoldmap")
1383 util.clearcachedproperty(self, b"filefoldmap")
1389 util.clearcachedproperty(self, b"dirfoldmap")
1384 util.clearcachedproperty(self, b"dirfoldmap")
1390 util.clearcachedproperty(self, b"nonnormalset")
1385 util.clearcachedproperty(self, b"nonnormalset")
1391 util.clearcachedproperty(self, b"otherparentset")
1386 util.clearcachedproperty(self, b"otherparentset")
1392
1387
1393 def items(self):
1388 def items(self):
1394 return pycompat.iteritems(self._map)
1389 return pycompat.iteritems(self._map)
1395
1390
1396 # forward for python2,3 compat
1391 # forward for python2,3 compat
1397 iteritems = items
1392 iteritems = items
1398
1393
1399 def __len__(self):
1394 def __len__(self):
1400 return len(self._map)
1395 return len(self._map)
1401
1396
1402 def __iter__(self):
1397 def __iter__(self):
1403 return iter(self._map)
1398 return iter(self._map)
1404
1399
1405 def get(self, key, default=None):
1400 def get(self, key, default=None):
1406 return self._map.get(key, default)
1401 return self._map.get(key, default)
1407
1402
1408 def __contains__(self, key):
1403 def __contains__(self, key):
1409 return key in self._map
1404 return key in self._map
1410
1405
1411 def __getitem__(self, key):
1406 def __getitem__(self, key):
1412 return self._map[key]
1407 return self._map[key]
1413
1408
1414 def keys(self):
1409 def keys(self):
1415 return self._map.keys()
1410 return self._map.keys()
1416
1411
1417 def preload(self):
1412 def preload(self):
1418 """Loads the underlying data, if it's not already loaded"""
1413 """Loads the underlying data, if it's not already loaded"""
1419 self._map
1414 self._map
1420
1415
1421 def addfile(self, f, oldstate, state, mode, size, mtime):
1416 def addfile(self, f, oldstate, state, mode, size, mtime):
1422 """Add a tracked file to the dirstate."""
1417 """Add a tracked file to the dirstate."""
1423 if oldstate in b"?r" and "_dirs" in self.__dict__:
1418 if oldstate in b"?r" and "_dirs" in self.__dict__:
1424 self._dirs.addpath(f)
1419 self._dirs.addpath(f)
1425 if oldstate == b"?" and "_alldirs" in self.__dict__:
1420 if oldstate == b"?" and "_alldirs" in self.__dict__:
1426 self._alldirs.addpath(f)
1421 self._alldirs.addpath(f)
1427 self._map[f] = dirstatetuple(state, mode, size, mtime)
1422 self._map[f] = dirstatetuple(state, mode, size, mtime)
1428 if state != b'n' or mtime == -1:
1423 if state != b'n' or mtime == -1:
1429 self.nonnormalset.add(f)
1424 self.nonnormalset.add(f)
1430 if size == -2:
1425 if size == -2:
1431 self.otherparentset.add(f)
1426 self.otherparentset.add(f)
1432
1427
1433 def removefile(self, f, oldstate, size):
1428 def removefile(self, f, oldstate, size):
1434 """
1429 """
1435 Mark a file as removed in the dirstate.
1430 Mark a file as removed in the dirstate.
1436
1431
1437 The `size` parameter is used to store sentinel values that indicate
1432 The `size` parameter is used to store sentinel values that indicate
1438 the file's previous state. In the future, we should refactor this
1433 the file's previous state. In the future, we should refactor this
1439 to be more explicit about what that state is.
1434 to be more explicit about what that state is.
1440 """
1435 """
1441 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1436 if oldstate not in b"?r" and "_dirs" in self.__dict__:
1442 self._dirs.delpath(f)
1437 self._dirs.delpath(f)
1443 if oldstate == b"?" and "_alldirs" in self.__dict__:
1438 if oldstate == b"?" and "_alldirs" in self.__dict__:
1444 self._alldirs.addpath(f)
1439 self._alldirs.addpath(f)
1445 if "filefoldmap" in self.__dict__:
1440 if "filefoldmap" in self.__dict__:
1446 normed = util.normcase(f)
1441 normed = util.normcase(f)
1447 self.filefoldmap.pop(normed, None)
1442 self.filefoldmap.pop(normed, None)
1448 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1443 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1449 self.nonnormalset.add(f)
1444 self.nonnormalset.add(f)
1450
1445
1451 def dropfile(self, f, oldstate):
1446 def dropfile(self, f, oldstate):
1452 """
1447 """
1453 Remove a file from the dirstate. Returns True if the file was
1448 Remove a file from the dirstate. Returns True if the file was
1454 previously recorded.
1449 previously recorded.
1455 """
1450 """
1456 exists = self._map.pop(f, None) is not None
1451 exists = self._map.pop(f, None) is not None
1457 if exists:
1452 if exists:
1458 if oldstate != b"r" and "_dirs" in self.__dict__:
1453 if oldstate != b"r" and "_dirs" in self.__dict__:
1459 self._dirs.delpath(f)
1454 self._dirs.delpath(f)
1460 if "_alldirs" in self.__dict__:
1455 if "_alldirs" in self.__dict__:
1461 self._alldirs.delpath(f)
1456 self._alldirs.delpath(f)
1462 if "filefoldmap" in self.__dict__:
1457 if "filefoldmap" in self.__dict__:
1463 normed = util.normcase(f)
1458 normed = util.normcase(f)
1464 self.filefoldmap.pop(normed, None)
1459 self.filefoldmap.pop(normed, None)
1465 self.nonnormalset.discard(f)
1460 self.nonnormalset.discard(f)
1466 return exists
1461 return exists
1467
1462
1468 def clearambiguoustimes(self, files, now):
1463 def clearambiguoustimes(self, files, now):
1469 for f in files:
1464 for f in files:
1470 e = self.get(f)
1465 e = self.get(f)
1471 if e is not None and e[0] == b'n' and e[3] == now:
1466 if e is not None and e[0] == b'n' and e[3] == now:
1472 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1467 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1473 self.nonnormalset.add(f)
1468 self.nonnormalset.add(f)
1474
1469
1475 def nonnormalentries(self):
1470 def nonnormalentries(self):
1476 '''Compute the nonnormal dirstate entries from the dmap'''
1471 '''Compute the nonnormal dirstate entries from the dmap'''
1477 try:
1472 try:
1478 return parsers.nonnormalotherparententries(self._map)
1473 return parsers.nonnormalotherparententries(self._map)
1479 except AttributeError:
1474 except AttributeError:
1480 nonnorm = set()
1475 nonnorm = set()
1481 otherparent = set()
1476 otherparent = set()
1482 for fname, e in pycompat.iteritems(self._map):
1477 for fname, e in pycompat.iteritems(self._map):
1483 if e[0] != b'n' or e[3] == -1:
1478 if e[0] != b'n' or e[3] == -1:
1484 nonnorm.add(fname)
1479 nonnorm.add(fname)
1485 if e[0] == b'n' and e[2] == -2:
1480 if e[0] == b'n' and e[2] == -2:
1486 otherparent.add(fname)
1481 otherparent.add(fname)
1487 return nonnorm, otherparent
1482 return nonnorm, otherparent
1488
1483
1489 @propertycache
1484 @propertycache
1490 def filefoldmap(self):
1485 def filefoldmap(self):
1491 """Returns a dictionary mapping normalized case paths to their
1486 """Returns a dictionary mapping normalized case paths to their
1492 non-normalized versions.
1487 non-normalized versions.
1493 """
1488 """
1494 try:
1489 try:
1495 makefilefoldmap = parsers.make_file_foldmap
1490 makefilefoldmap = parsers.make_file_foldmap
1496 except AttributeError:
1491 except AttributeError:
1497 pass
1492 pass
1498 else:
1493 else:
1499 return makefilefoldmap(
1494 return makefilefoldmap(
1500 self._map, util.normcasespec, util.normcasefallback
1495 self._map, util.normcasespec, util.normcasefallback
1501 )
1496 )
1502
1497
1503 f = {}
1498 f = {}
1504 normcase = util.normcase
1499 normcase = util.normcase
1505 for name, s in pycompat.iteritems(self._map):
1500 for name, s in pycompat.iteritems(self._map):
1506 if s[0] != b'r':
1501 if s[0] != b'r':
1507 f[normcase(name)] = name
1502 f[normcase(name)] = name
1508 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1503 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1509 return f
1504 return f
1510
1505
1511 def hastrackeddir(self, d):
1506 def hastrackeddir(self, d):
1512 """
1507 """
1513 Returns True if the dirstate contains a tracked (not removed) file
1508 Returns True if the dirstate contains a tracked (not removed) file
1514 in this directory.
1509 in this directory.
1515 """
1510 """
1516 return d in self._dirs
1511 return d in self._dirs
1517
1512
1518 def hasdir(self, d):
1513 def hasdir(self, d):
1519 """
1514 """
1520 Returns True if the dirstate contains a file (tracked or removed)
1515 Returns True if the dirstate contains a file (tracked or removed)
1521 in this directory.
1516 in this directory.
1522 """
1517 """
1523 return d in self._alldirs
1518 return d in self._alldirs
1524
1519
1525 @propertycache
1520 @propertycache
1526 def _dirs(self):
1521 def _dirs(self):
1527 return pathutil.dirs(self._map, b'r')
1522 return pathutil.dirs(self._map, b'r')
1528
1523
1529 @propertycache
1524 @propertycache
1530 def _alldirs(self):
1525 def _alldirs(self):
1531 return pathutil.dirs(self._map)
1526 return pathutil.dirs(self._map)
1532
1527
1533 def _opendirstatefile(self):
1528 def _opendirstatefile(self):
1534 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1529 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1535 if self._pendingmode is not None and self._pendingmode != mode:
1530 if self._pendingmode is not None and self._pendingmode != mode:
1536 fp.close()
1531 fp.close()
1537 raise error.Abort(
1532 raise error.Abort(
1538 _(b'working directory state may be changed parallelly')
1533 _(b'working directory state may be changed parallelly')
1539 )
1534 )
1540 self._pendingmode = mode
1535 self._pendingmode = mode
1541 return fp
1536 return fp
1542
1537
1543 def parents(self):
1538 def parents(self):
1544 if not self._parents:
1539 if not self._parents:
1545 try:
1540 try:
1546 fp = self._opendirstatefile()
1541 fp = self._opendirstatefile()
1547 st = fp.read(40)
1542 st = fp.read(40)
1548 fp.close()
1543 fp.close()
1549 except IOError as err:
1544 except IOError as err:
1550 if err.errno != errno.ENOENT:
1545 if err.errno != errno.ENOENT:
1551 raise
1546 raise
1552 # File doesn't exist, so the current state is empty
1547 # File doesn't exist, so the current state is empty
1553 st = b''
1548 st = b''
1554
1549
1555 l = len(st)
1550 l = len(st)
1556 if l == 40:
1551 if l == 40:
1557 self._parents = (st[:20], st[20:40])
1552 self._parents = (st[:20], st[20:40])
1558 elif l == 0:
1553 elif l == 0:
1559 self._parents = (nullid, nullid)
1554 self._parents = (nullid, nullid)
1560 else:
1555 else:
1561 raise error.Abort(
1556 raise error.Abort(
1562 _(b'working directory state appears damaged!')
1557 _(b'working directory state appears damaged!')
1563 )
1558 )
1564
1559
1565 return self._parents
1560 return self._parents
1566
1561
1567 def setparents(self, p1, p2):
1562 def setparents(self, p1, p2):
1568 self._parents = (p1, p2)
1563 self._parents = (p1, p2)
1569 self._dirtyparents = True
1564 self._dirtyparents = True
1570
1565
1571 def read(self):
1566 def read(self):
1572 # ignore HG_PENDING because identity is used only for writing
1567 # ignore HG_PENDING because identity is used only for writing
1573 self.identity = util.filestat.frompath(
1568 self.identity = util.filestat.frompath(
1574 self._opener.join(self._filename)
1569 self._opener.join(self._filename)
1575 )
1570 )
1576
1571
1577 try:
1572 try:
1578 fp = self._opendirstatefile()
1573 fp = self._opendirstatefile()
1579 try:
1574 try:
1580 st = fp.read()
1575 st = fp.read()
1581 finally:
1576 finally:
1582 fp.close()
1577 fp.close()
1583 except IOError as err:
1578 except IOError as err:
1584 if err.errno != errno.ENOENT:
1579 if err.errno != errno.ENOENT:
1585 raise
1580 raise
1586 return
1581 return
1587 if not st:
1582 if not st:
1588 return
1583 return
1589
1584
1590 if util.safehasattr(parsers, b'dict_new_presized'):
1585 if util.safehasattr(parsers, b'dict_new_presized'):
1591 # Make an estimate of the number of files in the dirstate based on
1586 # Make an estimate of the number of files in the dirstate based on
1592 # its size. From a linear regression on a set of real-world repos,
1587 # its size. From a linear regression on a set of real-world repos,
1593 # all over 10,000 files, the size of a dirstate entry is 85
1588 # all over 10,000 files, the size of a dirstate entry is 85
1594 # bytes. The cost of resizing is significantly higher than the cost
1589 # bytes. The cost of resizing is significantly higher than the cost
1595 # of filling in a larger presized dict, so subtract 20% from the
1590 # of filling in a larger presized dict, so subtract 20% from the
1596 # size.
1591 # size.
1597 #
1592 #
1598 # This heuristic is imperfect in many ways, so in a future dirstate
1593 # This heuristic is imperfect in many ways, so in a future dirstate
1599 # format update it makes sense to just record the number of entries
1594 # format update it makes sense to just record the number of entries
1600 # on write.
1595 # on write.
1601 self._map = parsers.dict_new_presized(len(st) // 71)
1596 self._map = parsers.dict_new_presized(len(st) // 71)
1602
1597
1603 # Python's garbage collector triggers a GC each time a certain number
1598 # Python's garbage collector triggers a GC each time a certain number
1604 # of container objects (the number being defined by
1599 # of container objects (the number being defined by
1605 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1600 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1606 # for each file in the dirstate. The C version then immediately marks
1601 # for each file in the dirstate. The C version then immediately marks
1607 # them as not to be tracked by the collector. However, this has no
1602 # them as not to be tracked by the collector. However, this has no
1608 # effect on when GCs are triggered, only on what objects the GC looks
1603 # effect on when GCs are triggered, only on what objects the GC looks
1609 # into. This means that O(number of files) GCs are unavoidable.
1604 # into. This means that O(number of files) GCs are unavoidable.
1610 # Depending on when in the process's lifetime the dirstate is parsed,
1605 # Depending on when in the process's lifetime the dirstate is parsed,
1611 # this can get very expensive. As a workaround, disable GC while
1606 # this can get very expensive. As a workaround, disable GC while
1612 # parsing the dirstate.
1607 # parsing the dirstate.
1613 #
1608 #
1614 # (we cannot decorate the function directly since it is in a C module)
1609 # (we cannot decorate the function directly since it is in a C module)
1615 parse_dirstate = util.nogc(parsers.parse_dirstate)
1610 parse_dirstate = util.nogc(parsers.parse_dirstate)
1616 p = parse_dirstate(self._map, self.copymap, st)
1611 p = parse_dirstate(self._map, self.copymap, st)
1617 if not self._dirtyparents:
1612 if not self._dirtyparents:
1618 self.setparents(*p)
1613 self.setparents(*p)
1619
1614
1620 # Avoid excess attribute lookups by fast pathing certain checks
1615 # Avoid excess attribute lookups by fast pathing certain checks
1621 self.__contains__ = self._map.__contains__
1616 self.__contains__ = self._map.__contains__
1622 self.__getitem__ = self._map.__getitem__
1617 self.__getitem__ = self._map.__getitem__
1623 self.get = self._map.get
1618 self.get = self._map.get
1624
1619
1625 def write(self, st, now):
1620 def write(self, st, now):
1626 st.write(
1621 st.write(
1627 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1622 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1628 )
1623 )
1629 st.close()
1624 st.close()
1630 self._dirtyparents = False
1625 self._dirtyparents = False
1631 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1626 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1632
1627
1633 @propertycache
1628 @propertycache
1634 def nonnormalset(self):
1629 def nonnormalset(self):
1635 nonnorm, otherparents = self.nonnormalentries()
1630 nonnorm, otherparents = self.nonnormalentries()
1636 self.otherparentset = otherparents
1631 self.otherparentset = otherparents
1637 return nonnorm
1632 return nonnorm
1638
1633
1639 @propertycache
1634 @propertycache
1640 def otherparentset(self):
1635 def otherparentset(self):
1641 nonnorm, otherparents = self.nonnormalentries()
1636 nonnorm, otherparents = self.nonnormalentries()
1642 self.nonnormalset = nonnorm
1637 self.nonnormalset = nonnorm
1643 return otherparents
1638 return otherparents
1644
1639
1645 @propertycache
1640 @propertycache
1646 def identity(self):
1641 def identity(self):
1647 self._map
1642 self._map
1648 return self.identity
1643 return self.identity
1649
1644
1650 @propertycache
1645 @propertycache
1651 def dirfoldmap(self):
1646 def dirfoldmap(self):
1652 f = {}
1647 f = {}
1653 normcase = util.normcase
1648 normcase = util.normcase
1654 for name in self._dirs:
1649 for name in self._dirs:
1655 f[normcase(name)] = name
1650 f[normcase(name)] = name
1656 return f
1651 return f
1657
1652
1658
1653
1659 if rustmod is not None:
1654 if rustmod is not None:
1660
1655
1661 class dirstatemap(object):
1656 class dirstatemap(object):
1662 def __init__(self, ui, opener, root):
1657 def __init__(self, ui, opener, root):
1663 self._ui = ui
1658 self._ui = ui
1664 self._opener = opener
1659 self._opener = opener
1665 self._root = root
1660 self._root = root
1666 self._filename = b'dirstate'
1661 self._filename = b'dirstate'
1667 self._parents = None
1662 self._parents = None
1668 self._dirtyparents = False
1663 self._dirtyparents = False
1669
1664
1670 # for consistent view between _pl() and _read() invocations
1665 # for consistent view between _pl() and _read() invocations
1671 self._pendingmode = None
1666 self._pendingmode = None
1672
1667
1673 def addfile(self, *args, **kwargs):
1668 def addfile(self, *args, **kwargs):
1674 return self._rustmap.addfile(*args, **kwargs)
1669 return self._rustmap.addfile(*args, **kwargs)
1675
1670
1676 def removefile(self, *args, **kwargs):
1671 def removefile(self, *args, **kwargs):
1677 return self._rustmap.removefile(*args, **kwargs)
1672 return self._rustmap.removefile(*args, **kwargs)
1678
1673
1679 def dropfile(self, *args, **kwargs):
1674 def dropfile(self, *args, **kwargs):
1680 return self._rustmap.dropfile(*args, **kwargs)
1675 return self._rustmap.dropfile(*args, **kwargs)
1681
1676
1682 def clearambiguoustimes(self, *args, **kwargs):
1677 def clearambiguoustimes(self, *args, **kwargs):
1683 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1678 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1684
1679
1685 def nonnormalentries(self):
1680 def nonnormalentries(self):
1686 return self._rustmap.nonnormalentries()
1681 return self._rustmap.nonnormalentries()
1687
1682
1688 def get(self, *args, **kwargs):
1683 def get(self, *args, **kwargs):
1689 return self._rustmap.get(*args, **kwargs)
1684 return self._rustmap.get(*args, **kwargs)
1690
1685
1691 @propertycache
1686 @propertycache
1692 def _rustmap(self):
1687 def _rustmap(self):
1693 self._rustmap = rustmod.DirstateMap(self._root)
1688 self._rustmap = rustmod.DirstateMap(self._root)
1694 self.read()
1689 self.read()
1695 return self._rustmap
1690 return self._rustmap
1696
1691
1697 @property
1692 @property
1698 def copymap(self):
1693 def copymap(self):
1699 return self._rustmap.copymap()
1694 return self._rustmap.copymap()
1700
1695
1701 def preload(self):
1696 def preload(self):
1702 self._rustmap
1697 self._rustmap
1703
1698
1704 def clear(self):
1699 def clear(self):
1705 self._rustmap.clear()
1700 self._rustmap.clear()
1706 self.setparents(nullid, nullid)
1701 self.setparents(nullid, nullid)
1707 util.clearcachedproperty(self, b"_dirs")
1702 util.clearcachedproperty(self, b"_dirs")
1708 util.clearcachedproperty(self, b"_alldirs")
1703 util.clearcachedproperty(self, b"_alldirs")
1709 util.clearcachedproperty(self, b"dirfoldmap")
1704 util.clearcachedproperty(self, b"dirfoldmap")
1710
1705
1711 def items(self):
1706 def items(self):
1712 return self._rustmap.items()
1707 return self._rustmap.items()
1713
1708
1714 def keys(self):
1709 def keys(self):
1715 return iter(self._rustmap)
1710 return iter(self._rustmap)
1716
1711
1717 def __contains__(self, key):
1712 def __contains__(self, key):
1718 return key in self._rustmap
1713 return key in self._rustmap
1719
1714
1720 def __getitem__(self, item):
1715 def __getitem__(self, item):
1721 return self._rustmap[item]
1716 return self._rustmap[item]
1722
1717
1723 def __len__(self):
1718 def __len__(self):
1724 return len(self._rustmap)
1719 return len(self._rustmap)
1725
1720
1726 def __iter__(self):
1721 def __iter__(self):
1727 return iter(self._rustmap)
1722 return iter(self._rustmap)
1728
1723
1729 # forward for python2,3 compat
1724 # forward for python2,3 compat
1730 iteritems = items
1725 iteritems = items
1731
1726
1732 def _opendirstatefile(self):
1727 def _opendirstatefile(self):
1733 fp, mode = txnutil.trypending(
1728 fp, mode = txnutil.trypending(
1734 self._root, self._opener, self._filename
1729 self._root, self._opener, self._filename
1735 )
1730 )
1736 if self._pendingmode is not None and self._pendingmode != mode:
1731 if self._pendingmode is not None and self._pendingmode != mode:
1737 fp.close()
1732 fp.close()
1738 raise error.Abort(
1733 raise error.Abort(
1739 _(b'working directory state may be changed parallelly')
1734 _(b'working directory state may be changed parallelly')
1740 )
1735 )
1741 self._pendingmode = mode
1736 self._pendingmode = mode
1742 return fp
1737 return fp
1743
1738
1744 def setparents(self, p1, p2):
1739 def setparents(self, p1, p2):
1745 self._rustmap.setparents(p1, p2)
1740 self._rustmap.setparents(p1, p2)
1746 self._parents = (p1, p2)
1741 self._parents = (p1, p2)
1747 self._dirtyparents = True
1742 self._dirtyparents = True
1748
1743
1749 def parents(self):
1744 def parents(self):
1750 if not self._parents:
1745 if not self._parents:
1751 try:
1746 try:
1752 fp = self._opendirstatefile()
1747 fp = self._opendirstatefile()
1753 st = fp.read(40)
1748 st = fp.read(40)
1754 fp.close()
1749 fp.close()
1755 except IOError as err:
1750 except IOError as err:
1756 if err.errno != errno.ENOENT:
1751 if err.errno != errno.ENOENT:
1757 raise
1752 raise
1758 # File doesn't exist, so the current state is empty
1753 # File doesn't exist, so the current state is empty
1759 st = b''
1754 st = b''
1760
1755
1761 try:
1756 try:
1762 self._parents = self._rustmap.parents(st)
1757 self._parents = self._rustmap.parents(st)
1763 except ValueError:
1758 except ValueError:
1764 raise error.Abort(
1759 raise error.Abort(
1765 _(b'working directory state appears damaged!')
1760 _(b'working directory state appears damaged!')
1766 )
1761 )
1767
1762
1768 return self._parents
1763 return self._parents
1769
1764
1770 def read(self):
1765 def read(self):
1771 # ignore HG_PENDING because identity is used only for writing
1766 # ignore HG_PENDING because identity is used only for writing
1772 self.identity = util.filestat.frompath(
1767 self.identity = util.filestat.frompath(
1773 self._opener.join(self._filename)
1768 self._opener.join(self._filename)
1774 )
1769 )
1775
1770
1776 try:
1771 try:
1777 fp = self._opendirstatefile()
1772 fp = self._opendirstatefile()
1778 try:
1773 try:
1779 st = fp.read()
1774 st = fp.read()
1780 finally:
1775 finally:
1781 fp.close()
1776 fp.close()
1782 except IOError as err:
1777 except IOError as err:
1783 if err.errno != errno.ENOENT:
1778 if err.errno != errno.ENOENT:
1784 raise
1779 raise
1785 return
1780 return
1786 if not st:
1781 if not st:
1787 return
1782 return
1788
1783
1789 parse_dirstate = util.nogc(self._rustmap.read)
1784 parse_dirstate = util.nogc(self._rustmap.read)
1790 parents = parse_dirstate(st)
1785 parents = parse_dirstate(st)
1791 if parents and not self._dirtyparents:
1786 if parents and not self._dirtyparents:
1792 self.setparents(*parents)
1787 self.setparents(*parents)
1793
1788
1794 self.__contains__ = self._rustmap.__contains__
1789 self.__contains__ = self._rustmap.__contains__
1795 self.__getitem__ = self._rustmap.__getitem__
1790 self.__getitem__ = self._rustmap.__getitem__
1796 self.get = self._rustmap.get
1791 self.get = self._rustmap.get
1797
1792
1798 def write(self, st, now):
1793 def write(self, st, now):
1799 parents = self.parents()
1794 parents = self.parents()
1800 st.write(self._rustmap.write(parents[0], parents[1], now))
1795 st.write(self._rustmap.write(parents[0], parents[1], now))
1801 st.close()
1796 st.close()
1802 self._dirtyparents = False
1797 self._dirtyparents = False
1803
1798
1804 @propertycache
1799 @propertycache
1805 def filefoldmap(self):
1800 def filefoldmap(self):
1806 """Returns a dictionary mapping normalized case paths to their
1801 """Returns a dictionary mapping normalized case paths to their
1807 non-normalized versions.
1802 non-normalized versions.
1808 """
1803 """
1809 return self._rustmap.filefoldmapasdict()
1804 return self._rustmap.filefoldmapasdict()
1810
1805
1811 def hastrackeddir(self, d):
1806 def hastrackeddir(self, d):
1812 self._dirs # Trigger Python's propertycache
1807 self._dirs # Trigger Python's propertycache
1813 return self._rustmap.hastrackeddir(d)
1808 return self._rustmap.hastrackeddir(d)
1814
1809
1815 def hasdir(self, d):
1810 def hasdir(self, d):
1816 self._dirs # Trigger Python's propertycache
1811 self._dirs # Trigger Python's propertycache
1817 return self._rustmap.hasdir(d)
1812 return self._rustmap.hasdir(d)
1818
1813
1819 @propertycache
1814 @propertycache
1820 def _dirs(self):
1815 def _dirs(self):
1821 return self._rustmap.getdirs()
1816 return self._rustmap.getdirs()
1822
1817
1823 @propertycache
1818 @propertycache
1824 def _alldirs(self):
1819 def _alldirs(self):
1825 return self._rustmap.getalldirs()
1820 return self._rustmap.getalldirs()
1826
1821
1827 @propertycache
1822 @propertycache
1828 def identity(self):
1823 def identity(self):
1829 self._rustmap
1824 self._rustmap
1830 return self.identity
1825 return self.identity
1831
1826
1832 @property
1827 @property
1833 def nonnormalset(self):
1828 def nonnormalset(self):
1834 nonnorm, otherparents = self._rustmap.nonnormalentries()
1829 nonnorm, otherparents = self._rustmap.nonnormalentries()
1835 return nonnorm
1830 return nonnorm
1836
1831
1837 @property
1832 @property
1838 def otherparentset(self):
1833 def otherparentset(self):
1839 nonnorm, otherparents = self._rustmap.nonnormalentries()
1834 nonnorm, otherparents = self._rustmap.nonnormalentries()
1840 return otherparents
1835 return otherparents
1841
1836
1842 @propertycache
1837 @propertycache
1843 def dirfoldmap(self):
1838 def dirfoldmap(self):
1844 f = {}
1839 f = {}
1845 normcase = util.normcase
1840 normcase = util.normcase
1846 for name in self._dirs:
1841 for name in self._dirs:
1847 f[normcase(name)] = name
1842 f[normcase(name)] = name
1848 return f
1843 return f
General Comments 0
You need to be logged in to leave comments. Login now