##// END OF EJS Templates
rust-dirstate-status: add call to rust-fast path for `dirstate.status`...
Raphaël Gomès -
r43568:733d4ffc default
parent child Browse files
Show More
@@ -1,1784 +1,1837
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from .pycompat import delattr
18 from .pycompat import delattr
19
19
20 from hgdemandimport import tracing
20 from hgdemandimport import tracing
21
21
22 from . import (
22 from . import (
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 txnutil,
31 txnutil,
31 util,
32 util,
32 )
33 )
33
34
34 from .interfaces import (
35 from .interfaces import (
35 dirstate as intdirstate,
36 dirstate as intdirstate,
36 util as interfaceutil,
37 util as interfaceutil,
37 )
38 )
38
39
39 parsers = policy.importmod(r'parsers')
40 parsers = policy.importmod(r'parsers')
40 rustmod = policy.importrust(r'dirstate')
41 rustmod = policy.importrust(r'dirstate')
41
42
42 propertycache = util.propertycache
43 propertycache = util.propertycache
43 filecache = scmutil.filecache
44 filecache = scmutil.filecache
44 _rangemask = 0x7FFFFFFF
45 _rangemask = 0x7FFFFFFF
45
46
46 dirstatetuple = parsers.dirstatetuple
47 dirstatetuple = parsers.dirstatetuple
47
48
48
49
49 class repocache(filecache):
50 class repocache(filecache):
50 """filecache for files in .hg/"""
51 """filecache for files in .hg/"""
51
52
52 def join(self, obj, fname):
53 def join(self, obj, fname):
53 return obj._opener.join(fname)
54 return obj._opener.join(fname)
54
55
55
56
56 class rootcache(filecache):
57 class rootcache(filecache):
57 """filecache for files in the repository root"""
58 """filecache for files in the repository root"""
58
59
59 def join(self, obj, fname):
60 def join(self, obj, fname):
60 return obj._join(fname)
61 return obj._join(fname)
61
62
62
63
63 def _getfsnow(vfs):
64 def _getfsnow(vfs):
64 '''Get "now" timestamp on filesystem'''
65 '''Get "now" timestamp on filesystem'''
65 tmpfd, tmpname = vfs.mkstemp()
66 tmpfd, tmpname = vfs.mkstemp()
66 try:
67 try:
67 return os.fstat(tmpfd)[stat.ST_MTIME]
68 return os.fstat(tmpfd)[stat.ST_MTIME]
68 finally:
69 finally:
69 os.close(tmpfd)
70 os.close(tmpfd)
70 vfs.unlink(tmpname)
71 vfs.unlink(tmpname)
71
72
72
73
73 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
74 class dirstate(object):
75 class dirstate(object):
75 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 '''Create a new dirstate object.
77 '''Create a new dirstate object.
77
78
78 opener is an open()-like callable that can be used to open the
79 opener is an open()-like callable that can be used to open the
79 dirstate file; root is the root of the directory tracked by
80 dirstate file; root is the root of the directory tracked by
80 the dirstate.
81 the dirstate.
81 '''
82 '''
82 self._opener = opener
83 self._opener = opener
83 self._validate = validate
84 self._validate = validate
84 self._root = root
85 self._root = root
85 self._sparsematchfn = sparsematchfn
86 self._sparsematchfn = sparsematchfn
86 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
87 # UNC path pointing to root share (issue4557)
88 # UNC path pointing to root share (issue4557)
88 self._rootdir = pathutil.normasprefix(root)
89 self._rootdir = pathutil.normasprefix(root)
89 self._dirty = False
90 self._dirty = False
90 self._lastnormaltime = 0
91 self._lastnormaltime = 0
91 self._ui = ui
92 self._ui = ui
92 self._filecache = {}
93 self._filecache = {}
93 self._parentwriters = 0
94 self._parentwriters = 0
94 self._filename = b'dirstate'
95 self._filename = b'dirstate'
95 self._pendingfilename = b'%s.pending' % self._filename
96 self._pendingfilename = b'%s.pending' % self._filename
96 self._plchangecallbacks = {}
97 self._plchangecallbacks = {}
97 self._origpl = None
98 self._origpl = None
98 self._updatedfiles = set()
99 self._updatedfiles = set()
99 self._mapcls = dirstatemap
100 self._mapcls = dirstatemap
100 # Access and cache cwd early, so we don't access it for the first time
101 # Access and cache cwd early, so we don't access it for the first time
101 # after a working-copy update caused it to not exist (accessing it then
102 # after a working-copy update caused it to not exist (accessing it then
102 # raises an exception).
103 # raises an exception).
103 self._cwd
104 self._cwd
104
105
105 @contextlib.contextmanager
106 @contextlib.contextmanager
106 def parentchange(self):
107 def parentchange(self):
107 '''Context manager for handling dirstate parents.
108 '''Context manager for handling dirstate parents.
108
109
109 If an exception occurs in the scope of the context manager,
110 If an exception occurs in the scope of the context manager,
110 the incoherent dirstate won't be written when wlock is
111 the incoherent dirstate won't be written when wlock is
111 released.
112 released.
112 '''
113 '''
113 self._parentwriters += 1
114 self._parentwriters += 1
114 yield
115 yield
115 # Typically we want the "undo" step of a context manager in a
116 # Typically we want the "undo" step of a context manager in a
116 # finally block so it happens even when an exception
117 # finally block so it happens even when an exception
117 # occurs. In this case, however, we only want to decrement
118 # occurs. In this case, however, we only want to decrement
118 # parentwriters if the code in the with statement exits
119 # parentwriters if the code in the with statement exits
119 # normally, so we don't have a try/finally here on purpose.
120 # normally, so we don't have a try/finally here on purpose.
120 self._parentwriters -= 1
121 self._parentwriters -= 1
121
122
122 def pendingparentchange(self):
123 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
124 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
125 that modify the dirstate parent.
125 '''
126 '''
126 return self._parentwriters > 0
127 return self._parentwriters > 0
127
128
128 @propertycache
129 @propertycache
129 def _map(self):
130 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = self._mapcls(self._ui, self._opener, self._root)
132 self._map = self._mapcls(self._ui, self._opener, self._root)
132 return self._map
133 return self._map
133
134
134 @property
135 @property
135 def _sparsematcher(self):
136 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
137 """The matcher for the sparse checkout.
137
138
138 The working directory may not include every file from a manifest. The
139 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
140 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
141 included in the working directory.
141 """
142 """
142 # TODO there is potential to cache this property. For now, the matcher
143 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
144 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
145 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
146 return self._sparsematchfn()
146
147
147 @repocache(b'branch')
148 @repocache(b'branch')
148 def _branch(self):
149 def _branch(self):
149 try:
150 try:
150 return self._opener.read(b"branch").strip() or b"default"
151 return self._opener.read(b"branch").strip() or b"default"
151 except IOError as inst:
152 except IOError as inst:
152 if inst.errno != errno.ENOENT:
153 if inst.errno != errno.ENOENT:
153 raise
154 raise
154 return b"default"
155 return b"default"
155
156
156 @property
157 @property
157 def _pl(self):
158 def _pl(self):
158 return self._map.parents()
159 return self._map.parents()
159
160
160 def hasdir(self, d):
161 def hasdir(self, d):
161 return self._map.hastrackeddir(d)
162 return self._map.hastrackeddir(d)
162
163
163 @rootcache(b'.hgignore')
164 @rootcache(b'.hgignore')
164 def _ignore(self):
165 def _ignore(self):
165 files = self._ignorefiles()
166 files = self._ignorefiles()
166 if not files:
167 if not files:
167 return matchmod.never()
168 return matchmod.never()
168
169
169 pats = [b'include:%s' % f for f in files]
170 pats = [b'include:%s' % f for f in files]
170 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
171
172
172 @propertycache
173 @propertycache
173 def _slash(self):
174 def _slash(self):
174 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
175
176
176 @propertycache
177 @propertycache
177 def _checklink(self):
178 def _checklink(self):
178 return util.checklink(self._root)
179 return util.checklink(self._root)
179
180
180 @propertycache
181 @propertycache
181 def _checkexec(self):
182 def _checkexec(self):
182 return util.checkexec(self._root)
183 return util.checkexec(self._root)
183
184
184 @propertycache
185 @propertycache
185 def _checkcase(self):
186 def _checkcase(self):
186 return not util.fscasesensitive(self._join(b'.hg'))
187 return not util.fscasesensitive(self._join(b'.hg'))
187
188
188 def _join(self, f):
189 def _join(self, f):
189 # much faster than os.path.join()
190 # much faster than os.path.join()
190 # it's safe because f is always a relative path
191 # it's safe because f is always a relative path
191 return self._rootdir + f
192 return self._rootdir + f
192
193
193 def flagfunc(self, buildfallback):
194 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
195 if self._checklink and self._checkexec:
195
196
196 def f(x):
197 def f(x):
197 try:
198 try:
198 st = os.lstat(self._join(x))
199 st = os.lstat(self._join(x))
199 if util.statislink(st):
200 if util.statislink(st):
200 return b'l'
201 return b'l'
201 if util.statisexec(st):
202 if util.statisexec(st):
202 return b'x'
203 return b'x'
203 except OSError:
204 except OSError:
204 pass
205 pass
205 return b''
206 return b''
206
207
207 return f
208 return f
208
209
209 fallback = buildfallback()
210 fallback = buildfallback()
210 if self._checklink:
211 if self._checklink:
211
212
212 def f(x):
213 def f(x):
213 if os.path.islink(self._join(x)):
214 if os.path.islink(self._join(x)):
214 return b'l'
215 return b'l'
215 if b'x' in fallback(x):
216 if b'x' in fallback(x):
216 return b'x'
217 return b'x'
217 return b''
218 return b''
218
219
219 return f
220 return f
220 if self._checkexec:
221 if self._checkexec:
221
222
222 def f(x):
223 def f(x):
223 if b'l' in fallback(x):
224 if b'l' in fallback(x):
224 return b'l'
225 return b'l'
225 if util.isexec(self._join(x)):
226 if util.isexec(self._join(x)):
226 return b'x'
227 return b'x'
227 return b''
228 return b''
228
229
229 return f
230 return f
230 else:
231 else:
231 return fallback
232 return fallback
232
233
233 @propertycache
234 @propertycache
234 def _cwd(self):
235 def _cwd(self):
235 # internal config: ui.forcecwd
236 # internal config: ui.forcecwd
236 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 forcecwd = self._ui.config(b'ui', b'forcecwd')
237 if forcecwd:
238 if forcecwd:
238 return forcecwd
239 return forcecwd
239 return encoding.getcwd()
240 return encoding.getcwd()
240
241
241 def getcwd(self):
242 def getcwd(self):
242 '''Return the path from which a canonical path is calculated.
243 '''Return the path from which a canonical path is calculated.
243
244
244 This path should be used to resolve file patterns or to convert
245 This path should be used to resolve file patterns or to convert
245 canonical paths back to file paths for display. It shouldn't be
246 canonical paths back to file paths for display. It shouldn't be
246 used to get real file paths. Use vfs functions instead.
247 used to get real file paths. Use vfs functions instead.
247 '''
248 '''
248 cwd = self._cwd
249 cwd = self._cwd
249 if cwd == self._root:
250 if cwd == self._root:
250 return b''
251 return b''
251 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 # self._root ends with a path separator if self._root is '/' or 'C:\'
252 rootsep = self._root
253 rootsep = self._root
253 if not util.endswithsep(rootsep):
254 if not util.endswithsep(rootsep):
254 rootsep += pycompat.ossep
255 rootsep += pycompat.ossep
255 if cwd.startswith(rootsep):
256 if cwd.startswith(rootsep):
256 return cwd[len(rootsep) :]
257 return cwd[len(rootsep) :]
257 else:
258 else:
258 # we're outside the repo. return an absolute path.
259 # we're outside the repo. return an absolute path.
259 return cwd
260 return cwd
260
261
261 def pathto(self, f, cwd=None):
262 def pathto(self, f, cwd=None):
262 if cwd is None:
263 if cwd is None:
263 cwd = self.getcwd()
264 cwd = self.getcwd()
264 path = util.pathto(self._root, cwd, f)
265 path = util.pathto(self._root, cwd, f)
265 if self._slash:
266 if self._slash:
266 return util.pconvert(path)
267 return util.pconvert(path)
267 return path
268 return path
268
269
269 def __getitem__(self, key):
270 def __getitem__(self, key):
270 '''Return the current state of key (a filename) in the dirstate.
271 '''Return the current state of key (a filename) in the dirstate.
271
272
272 States are:
273 States are:
273 n normal
274 n normal
274 m needs merging
275 m needs merging
275 r marked for removal
276 r marked for removal
276 a marked for addition
277 a marked for addition
277 ? not tracked
278 ? not tracked
278 '''
279 '''
279 return self._map.get(key, (b"?",))[0]
280 return self._map.get(key, (b"?",))[0]
280
281
281 def __contains__(self, key):
282 def __contains__(self, key):
282 return key in self._map
283 return key in self._map
283
284
284 def __iter__(self):
285 def __iter__(self):
285 return iter(sorted(self._map))
286 return iter(sorted(self._map))
286
287
287 def items(self):
288 def items(self):
288 return pycompat.iteritems(self._map)
289 return pycompat.iteritems(self._map)
289
290
290 iteritems = items
291 iteritems = items
291
292
292 def parents(self):
293 def parents(self):
293 return [self._validate(p) for p in self._pl]
294 return [self._validate(p) for p in self._pl]
294
295
295 def p1(self):
296 def p1(self):
296 return self._validate(self._pl[0])
297 return self._validate(self._pl[0])
297
298
298 def p2(self):
299 def p2(self):
299 return self._validate(self._pl[1])
300 return self._validate(self._pl[1])
300
301
301 def branch(self):
302 def branch(self):
302 return encoding.tolocal(self._branch)
303 return encoding.tolocal(self._branch)
303
304
304 def setparents(self, p1, p2=nullid):
305 def setparents(self, p1, p2=nullid):
305 """Set dirstate parents to p1 and p2.
306 """Set dirstate parents to p1 and p2.
306
307
307 When moving from two parents to one, 'm' merged entries a
308 When moving from two parents to one, 'm' merged entries a
308 adjusted to normal and previous copy records discarded and
309 adjusted to normal and previous copy records discarded and
309 returned by the call.
310 returned by the call.
310
311
311 See localrepo.setparents()
312 See localrepo.setparents()
312 """
313 """
313 if self._parentwriters == 0:
314 if self._parentwriters == 0:
314 raise ValueError(
315 raise ValueError(
315 b"cannot set dirstate parent outside of "
316 b"cannot set dirstate parent outside of "
316 b"dirstate.parentchange context manager"
317 b"dirstate.parentchange context manager"
317 )
318 )
318
319
319 self._dirty = True
320 self._dirty = True
320 oldp2 = self._pl[1]
321 oldp2 = self._pl[1]
321 if self._origpl is None:
322 if self._origpl is None:
322 self._origpl = self._pl
323 self._origpl = self._pl
323 self._map.setparents(p1, p2)
324 self._map.setparents(p1, p2)
324 copies = {}
325 copies = {}
325 if oldp2 != nullid and p2 == nullid:
326 if oldp2 != nullid and p2 == nullid:
326 candidatefiles = self._map.nonnormalset.union(
327 candidatefiles = self._map.nonnormalset.union(
327 self._map.otherparentset
328 self._map.otherparentset
328 )
329 )
329 for f in candidatefiles:
330 for f in candidatefiles:
330 s = self._map.get(f)
331 s = self._map.get(f)
331 if s is None:
332 if s is None:
332 continue
333 continue
333
334
334 # Discard 'm' markers when moving away from a merge state
335 # Discard 'm' markers when moving away from a merge state
335 if s[0] == b'm':
336 if s[0] == b'm':
336 source = self._map.copymap.get(f)
337 source = self._map.copymap.get(f)
337 if source:
338 if source:
338 copies[f] = source
339 copies[f] = source
339 self.normallookup(f)
340 self.normallookup(f)
340 # Also fix up otherparent markers
341 # Also fix up otherparent markers
341 elif s[0] == b'n' and s[2] == -2:
342 elif s[0] == b'n' and s[2] == -2:
342 source = self._map.copymap.get(f)
343 source = self._map.copymap.get(f)
343 if source:
344 if source:
344 copies[f] = source
345 copies[f] = source
345 self.add(f)
346 self.add(f)
346 return copies
347 return copies
347
348
348 def setbranch(self, branch):
349 def setbranch(self, branch):
349 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 self.__class__._branch.set(self, encoding.fromlocal(branch))
350 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
351 try:
352 try:
352 f.write(self._branch + b'\n')
353 f.write(self._branch + b'\n')
353 f.close()
354 f.close()
354
355
355 # make sure filecache has the correct stat info for _branch after
356 # make sure filecache has the correct stat info for _branch after
356 # replacing the underlying file
357 # replacing the underlying file
357 ce = self._filecache[b'_branch']
358 ce = self._filecache[b'_branch']
358 if ce:
359 if ce:
359 ce.refresh()
360 ce.refresh()
360 except: # re-raises
361 except: # re-raises
361 f.discard()
362 f.discard()
362 raise
363 raise
363
364
364 def invalidate(self):
365 def invalidate(self):
365 '''Causes the next access to reread the dirstate.
366 '''Causes the next access to reread the dirstate.
366
367
367 This is different from localrepo.invalidatedirstate() because it always
368 This is different from localrepo.invalidatedirstate() because it always
368 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
369 check whether the dirstate has changed before rereading it.'''
370 check whether the dirstate has changed before rereading it.'''
370
371
371 for a in (r"_map", r"_branch", r"_ignore"):
372 for a in (r"_map", r"_branch", r"_ignore"):
372 if a in self.__dict__:
373 if a in self.__dict__:
373 delattr(self, a)
374 delattr(self, a)
374 self._lastnormaltime = 0
375 self._lastnormaltime = 0
375 self._dirty = False
376 self._dirty = False
376 self._updatedfiles.clear()
377 self._updatedfiles.clear()
377 self._parentwriters = 0
378 self._parentwriters = 0
378 self._origpl = None
379 self._origpl = None
379
380
380 def copy(self, source, dest):
381 def copy(self, source, dest):
381 """Mark dest as a copy of source. Unmark dest if source is None."""
382 """Mark dest as a copy of source. Unmark dest if source is None."""
382 if source == dest:
383 if source == dest:
383 return
384 return
384 self._dirty = True
385 self._dirty = True
385 if source is not None:
386 if source is not None:
386 self._map.copymap[dest] = source
387 self._map.copymap[dest] = source
387 self._updatedfiles.add(source)
388 self._updatedfiles.add(source)
388 self._updatedfiles.add(dest)
389 self._updatedfiles.add(dest)
389 elif self._map.copymap.pop(dest, None):
390 elif self._map.copymap.pop(dest, None):
390 self._updatedfiles.add(dest)
391 self._updatedfiles.add(dest)
391
392
392 def copied(self, file):
393 def copied(self, file):
393 return self._map.copymap.get(file, None)
394 return self._map.copymap.get(file, None)
394
395
395 def copies(self):
396 def copies(self):
396 return self._map.copymap
397 return self._map.copymap
397
398
398 def _addpath(self, f, state, mode, size, mtime):
399 def _addpath(self, f, state, mode, size, mtime):
399 oldstate = self[f]
400 oldstate = self[f]
400 if state == b'a' or oldstate == b'r':
401 if state == b'a' or oldstate == b'r':
401 scmutil.checkfilename(f)
402 scmutil.checkfilename(f)
402 if self._map.hastrackeddir(f):
403 if self._map.hastrackeddir(f):
403 raise error.Abort(
404 raise error.Abort(
404 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 _(b'directory %r already in dirstate') % pycompat.bytestr(f)
405 )
406 )
406 # shadows
407 # shadows
407 for d in util.finddirs(f):
408 for d in util.finddirs(f):
408 if self._map.hastrackeddir(d):
409 if self._map.hastrackeddir(d):
409 break
410 break
410 entry = self._map.get(d)
411 entry = self._map.get(d)
411 if entry is not None and entry[0] != b'r':
412 if entry is not None and entry[0] != b'r':
412 raise error.Abort(
413 raise error.Abort(
413 _(b'file %r in dirstate clashes with %r')
414 _(b'file %r in dirstate clashes with %r')
414 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 % (pycompat.bytestr(d), pycompat.bytestr(f))
415 )
416 )
416 self._dirty = True
417 self._dirty = True
417 self._updatedfiles.add(f)
418 self._updatedfiles.add(f)
418 self._map.addfile(f, oldstate, state, mode, size, mtime)
419 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
420
420 def normal(self, f, parentfiledata=None):
421 def normal(self, f, parentfiledata=None):
421 '''Mark a file normal and clean.
422 '''Mark a file normal and clean.
422
423
423 parentfiledata: (mode, size, mtime) of the clean file
424 parentfiledata: (mode, size, mtime) of the clean file
424
425
425 parentfiledata should be computed from memory (for mode,
426 parentfiledata should be computed from memory (for mode,
426 size), as or close as possible from the point where we
427 size), as or close as possible from the point where we
427 determined the file was clean, to limit the risk of the
428 determined the file was clean, to limit the risk of the
428 file having been changed by an external process between the
429 file having been changed by an external process between the
429 moment where the file was determined to be clean and now.'''
430 moment where the file was determined to be clean and now.'''
430 if parentfiledata:
431 if parentfiledata:
431 (mode, size, mtime) = parentfiledata
432 (mode, size, mtime) = parentfiledata
432 else:
433 else:
433 s = os.lstat(self._join(f))
434 s = os.lstat(self._join(f))
434 mode = s.st_mode
435 mode = s.st_mode
435 size = s.st_size
436 size = s.st_size
436 mtime = s[stat.ST_MTIME]
437 mtime = s[stat.ST_MTIME]
437 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._addpath(f, b'n', mode, size & _rangemask, mtime & _rangemask)
438 self._map.copymap.pop(f, None)
439 self._map.copymap.pop(f, None)
439 if f in self._map.nonnormalset:
440 if f in self._map.nonnormalset:
440 self._map.nonnormalset.remove(f)
441 self._map.nonnormalset.remove(f)
441 if mtime > self._lastnormaltime:
442 if mtime > self._lastnormaltime:
442 # Remember the most recent modification timeslot for status(),
443 # Remember the most recent modification timeslot for status(),
443 # to make sure we won't miss future size-preserving file content
444 # to make sure we won't miss future size-preserving file content
444 # modifications that happen within the same timeslot.
445 # modifications that happen within the same timeslot.
445 self._lastnormaltime = mtime
446 self._lastnormaltime = mtime
446
447
447 def normallookup(self, f):
448 def normallookup(self, f):
448 '''Mark a file normal, but possibly dirty.'''
449 '''Mark a file normal, but possibly dirty.'''
449 if self._pl[1] != nullid:
450 if self._pl[1] != nullid:
450 # if there is a merge going on and the file was either
451 # if there is a merge going on and the file was either
451 # in state 'm' (-1) or coming from other parent (-2) before
452 # in state 'm' (-1) or coming from other parent (-2) before
452 # being removed, restore that state.
453 # being removed, restore that state.
453 entry = self._map.get(f)
454 entry = self._map.get(f)
454 if entry is not None:
455 if entry is not None:
455 if entry[0] == b'r' and entry[2] in (-1, -2):
456 if entry[0] == b'r' and entry[2] in (-1, -2):
456 source = self._map.copymap.get(f)
457 source = self._map.copymap.get(f)
457 if entry[2] == -1:
458 if entry[2] == -1:
458 self.merge(f)
459 self.merge(f)
459 elif entry[2] == -2:
460 elif entry[2] == -2:
460 self.otherparent(f)
461 self.otherparent(f)
461 if source:
462 if source:
462 self.copy(source, f)
463 self.copy(source, f)
463 return
464 return
464 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 if entry[0] == b'm' or entry[0] == b'n' and entry[2] == -2:
465 return
466 return
466 self._addpath(f, b'n', 0, -1, -1)
467 self._addpath(f, b'n', 0, -1, -1)
467 self._map.copymap.pop(f, None)
468 self._map.copymap.pop(f, None)
468
469
469 def otherparent(self, f):
470 def otherparent(self, f):
470 '''Mark as coming from the other parent, always dirty.'''
471 '''Mark as coming from the other parent, always dirty.'''
471 if self._pl[1] == nullid:
472 if self._pl[1] == nullid:
472 raise error.Abort(
473 raise error.Abort(
473 _(b"setting %r to other parent only allowed in merges") % f
474 _(b"setting %r to other parent only allowed in merges") % f
474 )
475 )
475 if f in self and self[f] == b'n':
476 if f in self and self[f] == b'n':
476 # merge-like
477 # merge-like
477 self._addpath(f, b'm', 0, -2, -1)
478 self._addpath(f, b'm', 0, -2, -1)
478 else:
479 else:
479 # add-like
480 # add-like
480 self._addpath(f, b'n', 0, -2, -1)
481 self._addpath(f, b'n', 0, -2, -1)
481 self._map.copymap.pop(f, None)
482 self._map.copymap.pop(f, None)
482
483
483 def add(self, f):
484 def add(self, f):
484 '''Mark a file added.'''
485 '''Mark a file added.'''
485 self._addpath(f, b'a', 0, -1, -1)
486 self._addpath(f, b'a', 0, -1, -1)
486 self._map.copymap.pop(f, None)
487 self._map.copymap.pop(f, None)
487
488
488 def remove(self, f):
489 def remove(self, f):
489 '''Mark a file removed.'''
490 '''Mark a file removed.'''
490 self._dirty = True
491 self._dirty = True
491 oldstate = self[f]
492 oldstate = self[f]
492 size = 0
493 size = 0
493 if self._pl[1] != nullid:
494 if self._pl[1] != nullid:
494 entry = self._map.get(f)
495 entry = self._map.get(f)
495 if entry is not None:
496 if entry is not None:
496 # backup the previous state
497 # backup the previous state
497 if entry[0] == b'm': # merge
498 if entry[0] == b'm': # merge
498 size = -1
499 size = -1
499 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 elif entry[0] == b'n' and entry[2] == -2: # other parent
500 size = -2
501 size = -2
501 self._map.otherparentset.add(f)
502 self._map.otherparentset.add(f)
502 self._updatedfiles.add(f)
503 self._updatedfiles.add(f)
503 self._map.removefile(f, oldstate, size)
504 self._map.removefile(f, oldstate, size)
504 if size == 0:
505 if size == 0:
505 self._map.copymap.pop(f, None)
506 self._map.copymap.pop(f, None)
506
507
507 def merge(self, f):
508 def merge(self, f):
508 '''Mark a file merged.'''
509 '''Mark a file merged.'''
509 if self._pl[1] == nullid:
510 if self._pl[1] == nullid:
510 return self.normallookup(f)
511 return self.normallookup(f)
511 return self.otherparent(f)
512 return self.otherparent(f)
512
513
513 def drop(self, f):
514 def drop(self, f):
514 '''Drop a file from the dirstate'''
515 '''Drop a file from the dirstate'''
515 oldstate = self[f]
516 oldstate = self[f]
516 if self._map.dropfile(f, oldstate):
517 if self._map.dropfile(f, oldstate):
517 self._dirty = True
518 self._dirty = True
518 self._updatedfiles.add(f)
519 self._updatedfiles.add(f)
519 self._map.copymap.pop(f, None)
520 self._map.copymap.pop(f, None)
520
521
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
523 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
524 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
525 if not exists:
525 # Maybe a path component exists
526 # Maybe a path component exists
526 if not ignoremissing and b'/' in path:
527 if not ignoremissing and b'/' in path:
527 d, f = path.rsplit(b'/', 1)
528 d, f = path.rsplit(b'/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
529 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + b"/" + f
530 folded = d + b"/" + f
530 else:
531 else:
531 # No path components, preserve original case
532 # No path components, preserve original case
532 folded = path
533 folded = path
533 else:
534 else:
534 # recursively normalize leading directory components
535 # recursively normalize leading directory components
535 # against dirstate
536 # against dirstate
536 if b'/' in normed:
537 if b'/' in normed:
537 d, f = normed.rsplit(b'/', 1)
538 d, f = normed.rsplit(b'/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
539 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + b"/" + d
540 r = self._root + b"/" + d
540 folded = d + b"/" + util.fspath(f, r)
541 folded = d + b"/" + util.fspath(f, r)
541 else:
542 else:
542 folded = util.fspath(normed, self._root)
543 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
544 storemap[normed] = folded
544
545
545 return folded
546 return folded
546
547
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
549 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
550 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
551 if folded is None:
551 if isknown:
552 if isknown:
552 folded = path
553 folded = path
553 else:
554 else:
554 folded = self._discoverpath(
555 folded = self._discoverpath(
555 path, normed, ignoremissing, exists, self._map.filefoldmap
556 path, normed, ignoremissing, exists, self._map.filefoldmap
556 )
557 )
557 return folded
558 return folded
558
559
559 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
560 normed = util.normcase(path)
561 normed = util.normcase(path)
561 folded = self._map.filefoldmap.get(normed, None)
562 folded = self._map.filefoldmap.get(normed, None)
562 if folded is None:
563 if folded is None:
563 folded = self._map.dirfoldmap.get(normed, None)
564 folded = self._map.dirfoldmap.get(normed, None)
564 if folded is None:
565 if folded is None:
565 if isknown:
566 if isknown:
566 folded = path
567 folded = path
567 else:
568 else:
568 # store discovered result in dirfoldmap so that future
569 # store discovered result in dirfoldmap so that future
569 # normalizefile calls don't start matching directories
570 # normalizefile calls don't start matching directories
570 folded = self._discoverpath(
571 folded = self._discoverpath(
571 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 path, normed, ignoremissing, exists, self._map.dirfoldmap
572 )
573 )
573 return folded
574 return folded
574
575
575 def normalize(self, path, isknown=False, ignoremissing=False):
576 def normalize(self, path, isknown=False, ignoremissing=False):
576 '''
577 '''
577 normalize the case of a pathname when on a casefolding filesystem
578 normalize the case of a pathname when on a casefolding filesystem
578
579
579 isknown specifies whether the filename came from walking the
580 isknown specifies whether the filename came from walking the
580 disk, to avoid extra filesystem access.
581 disk, to avoid extra filesystem access.
581
582
582 If ignoremissing is True, missing path are returned
583 If ignoremissing is True, missing path are returned
583 unchanged. Otherwise, we try harder to normalize possibly
584 unchanged. Otherwise, we try harder to normalize possibly
584 existing path components.
585 existing path components.
585
586
586 The normalized case is determined based on the following precedence:
587 The normalized case is determined based on the following precedence:
587
588
588 - version of name already stored in the dirstate
589 - version of name already stored in the dirstate
589 - version of name stored on disk
590 - version of name stored on disk
590 - version provided via command arguments
591 - version provided via command arguments
591 '''
592 '''
592
593
593 if self._checkcase:
594 if self._checkcase:
594 return self._normalize(path, isknown, ignoremissing)
595 return self._normalize(path, isknown, ignoremissing)
595 return path
596 return path
596
597
597 def clear(self):
598 def clear(self):
598 self._map.clear()
599 self._map.clear()
599 self._lastnormaltime = 0
600 self._lastnormaltime = 0
600 self._updatedfiles.clear()
601 self._updatedfiles.clear()
601 self._dirty = True
602 self._dirty = True
602
603
603 def rebuild(self, parent, allfiles, changedfiles=None):
604 def rebuild(self, parent, allfiles, changedfiles=None):
604 if changedfiles is None:
605 if changedfiles is None:
605 # Rebuild entire dirstate
606 # Rebuild entire dirstate
606 changedfiles = allfiles
607 changedfiles = allfiles
607 lastnormaltime = self._lastnormaltime
608 lastnormaltime = self._lastnormaltime
608 self.clear()
609 self.clear()
609 self._lastnormaltime = lastnormaltime
610 self._lastnormaltime = lastnormaltime
610
611
611 if self._origpl is None:
612 if self._origpl is None:
612 self._origpl = self._pl
613 self._origpl = self._pl
613 self._map.setparents(parent, nullid)
614 self._map.setparents(parent, nullid)
614 for f in changedfiles:
615 for f in changedfiles:
615 if f in allfiles:
616 if f in allfiles:
616 self.normallookup(f)
617 self.normallookup(f)
617 else:
618 else:
618 self.drop(f)
619 self.drop(f)
619
620
620 self._dirty = True
621 self._dirty = True
621
622
622 def identity(self):
623 def identity(self):
623 '''Return identity of dirstate itself to detect changing in storage
624 '''Return identity of dirstate itself to detect changing in storage
624
625
625 If identity of previous dirstate is equal to this, writing
626 If identity of previous dirstate is equal to this, writing
626 changes based on the former dirstate out can keep consistency.
627 changes based on the former dirstate out can keep consistency.
627 '''
628 '''
628 return self._map.identity
629 return self._map.identity
629
630
630 def write(self, tr):
631 def write(self, tr):
631 if not self._dirty:
632 if not self._dirty:
632 return
633 return
633
634
634 filename = self._filename
635 filename = self._filename
635 if tr:
636 if tr:
636 # 'dirstate.write()' is not only for writing in-memory
637 # 'dirstate.write()' is not only for writing in-memory
637 # changes out, but also for dropping ambiguous timestamp.
638 # changes out, but also for dropping ambiguous timestamp.
638 # delayed writing re-raise "ambiguous timestamp issue".
639 # delayed writing re-raise "ambiguous timestamp issue".
639 # See also the wiki page below for detail:
640 # See also the wiki page below for detail:
640 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
641
642
642 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 # emulate dropping timestamp in 'parsers.pack_dirstate'
643 now = _getfsnow(self._opener)
644 now = _getfsnow(self._opener)
644 self._map.clearambiguoustimes(self._updatedfiles, now)
645 self._map.clearambiguoustimes(self._updatedfiles, now)
645
646
646 # emulate that all 'dirstate.normal' results are written out
647 # emulate that all 'dirstate.normal' results are written out
647 self._lastnormaltime = 0
648 self._lastnormaltime = 0
648 self._updatedfiles.clear()
649 self._updatedfiles.clear()
649
650
650 # delay writing in-memory changes out
651 # delay writing in-memory changes out
651 tr.addfilegenerator(
652 tr.addfilegenerator(
652 b'dirstate',
653 b'dirstate',
653 (self._filename,),
654 (self._filename,),
654 self._writedirstate,
655 self._writedirstate,
655 location=b'plain',
656 location=b'plain',
656 )
657 )
657 return
658 return
658
659
659 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
661 self._writedirstate(st)
661
662
662 def addparentchangecallback(self, category, callback):
663 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
664 """add a callback to be called when the wd parents are changed
664
665
665 Callback will be called with the following arguments:
666 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
667 dirstate, (oldp1, oldp2), (newp1, newp2)
667
668
668 Category is a unique identifier to allow overwriting an old callback
669 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
670 with a newer callback.
670 """
671 """
671 self._plchangecallbacks[category] = callback
672 self._plchangecallbacks[category] = callback
672
673
673 def _writedirstate(self, st):
674 def _writedirstate(self, st):
674 # notify callbacks about parents change
675 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
676 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(
677 for c, callback in sorted(
677 pycompat.iteritems(self._plchangecallbacks)
678 pycompat.iteritems(self._plchangecallbacks)
678 ):
679 ):
679 callback(self, self._origpl, self._pl)
680 callback(self, self._origpl, self._pl)
680 self._origpl = None
681 self._origpl = None
681 # use the modification time of the newly created temporary file as the
682 # use the modification time of the newly created temporary file as the
682 # filesystem's notion of 'now'
683 # filesystem's notion of 'now'
683 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
684
685
685 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
686 # timestamp of each entries in dirstate, because of 'now > mtime'
687 # timestamp of each entries in dirstate, because of 'now > mtime'
687 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
688 if delaywrite > 0:
689 if delaywrite > 0:
689 # do we have any files to delay for?
690 # do we have any files to delay for?
690 items = pycompat.iteritems(self._map)
691 items = pycompat.iteritems(self._map)
691 for f, e in items:
692 for f, e in items:
692 if e[0] == b'n' and e[3] == now:
693 if e[0] == b'n' and e[3] == now:
693 import time # to avoid useless import
694 import time # to avoid useless import
694
695
695 # rather than sleep n seconds, sleep until the next
696 # rather than sleep n seconds, sleep until the next
696 # multiple of n seconds
697 # multiple of n seconds
697 clock = time.time()
698 clock = time.time()
698 start = int(clock) - (int(clock) % delaywrite)
699 start = int(clock) - (int(clock) % delaywrite)
699 end = start + delaywrite
700 end = start + delaywrite
700 time.sleep(end - clock)
701 time.sleep(end - clock)
701 now = end # trust our estimate that the end is near now
702 now = end # trust our estimate that the end is near now
702 break
703 break
703 # since the iterator is potentially not deleted,
704 # since the iterator is potentially not deleted,
704 # delete the iterator to release the reference for the Rust
705 # delete the iterator to release the reference for the Rust
705 # implementation.
706 # implementation.
706 # TODO make the Rust implementation behave like Python
707 # TODO make the Rust implementation behave like Python
707 # since this would not work with a non ref-counting GC.
708 # since this would not work with a non ref-counting GC.
708 del items
709 del items
709
710
710 self._map.write(st, now)
711 self._map.write(st, now)
711 self._lastnormaltime = 0
712 self._lastnormaltime = 0
712 self._dirty = False
713 self._dirty = False
713
714
714 def _dirignore(self, f):
715 def _dirignore(self, f):
715 if self._ignore(f):
716 if self._ignore(f):
716 return True
717 return True
717 for p in util.finddirs(f):
718 for p in util.finddirs(f):
718 if self._ignore(p):
719 if self._ignore(p):
719 return True
720 return True
720 return False
721 return False
721
722
722 def _ignorefiles(self):
723 def _ignorefiles(self):
723 files = []
724 files = []
724 if os.path.exists(self._join(b'.hgignore')):
725 if os.path.exists(self._join(b'.hgignore')):
725 files.append(self._join(b'.hgignore'))
726 files.append(self._join(b'.hgignore'))
726 for name, path in self._ui.configitems(b"ui"):
727 for name, path in self._ui.configitems(b"ui"):
727 if name == b'ignore' or name.startswith(b'ignore.'):
728 if name == b'ignore' or name.startswith(b'ignore.'):
728 # we need to use os.path.join here rather than self._join
729 # we need to use os.path.join here rather than self._join
729 # because path is arbitrary and user-specified
730 # because path is arbitrary and user-specified
730 files.append(os.path.join(self._rootdir, util.expandpath(path)))
731 files.append(os.path.join(self._rootdir, util.expandpath(path)))
731 return files
732 return files
732
733
733 def _ignorefileandline(self, f):
734 def _ignorefileandline(self, f):
734 files = collections.deque(self._ignorefiles())
735 files = collections.deque(self._ignorefiles())
735 visited = set()
736 visited = set()
736 while files:
737 while files:
737 i = files.popleft()
738 i = files.popleft()
738 patterns = matchmod.readpatternfile(
739 patterns = matchmod.readpatternfile(
739 i, self._ui.warn, sourceinfo=True
740 i, self._ui.warn, sourceinfo=True
740 )
741 )
741 for pattern, lineno, line in patterns:
742 for pattern, lineno, line in patterns:
742 kind, p = matchmod._patsplit(pattern, b'glob')
743 kind, p = matchmod._patsplit(pattern, b'glob')
743 if kind == b"subinclude":
744 if kind == b"subinclude":
744 if p not in visited:
745 if p not in visited:
745 files.append(p)
746 files.append(p)
746 continue
747 continue
747 m = matchmod.match(
748 m = matchmod.match(
748 self._root, b'', [], [pattern], warn=self._ui.warn
749 self._root, b'', [], [pattern], warn=self._ui.warn
749 )
750 )
750 if m(f):
751 if m(f):
751 return (i, lineno, line)
752 return (i, lineno, line)
752 visited.add(i)
753 visited.add(i)
753 return (None, -1, b"")
754 return (None, -1, b"")
754
755
755 def _walkexplicit(self, match, subrepos):
756 def _walkexplicit(self, match, subrepos):
756 '''Get stat data about the files explicitly specified by match.
757 '''Get stat data about the files explicitly specified by match.
757
758
758 Return a triple (results, dirsfound, dirsnotfound).
759 Return a triple (results, dirsfound, dirsnotfound).
759 - results is a mapping from filename to stat result. It also contains
760 - results is a mapping from filename to stat result. It also contains
760 listings mapping subrepos and .hg to None.
761 listings mapping subrepos and .hg to None.
761 - dirsfound is a list of files found to be directories.
762 - dirsfound is a list of files found to be directories.
762 - dirsnotfound is a list of files that the dirstate thinks are
763 - dirsnotfound is a list of files that the dirstate thinks are
763 directories and that were not found.'''
764 directories and that were not found.'''
764
765
765 def badtype(mode):
766 def badtype(mode):
766 kind = _(b'unknown')
767 kind = _(b'unknown')
767 if stat.S_ISCHR(mode):
768 if stat.S_ISCHR(mode):
768 kind = _(b'character device')
769 kind = _(b'character device')
769 elif stat.S_ISBLK(mode):
770 elif stat.S_ISBLK(mode):
770 kind = _(b'block device')
771 kind = _(b'block device')
771 elif stat.S_ISFIFO(mode):
772 elif stat.S_ISFIFO(mode):
772 kind = _(b'fifo')
773 kind = _(b'fifo')
773 elif stat.S_ISSOCK(mode):
774 elif stat.S_ISSOCK(mode):
774 kind = _(b'socket')
775 kind = _(b'socket')
775 elif stat.S_ISDIR(mode):
776 elif stat.S_ISDIR(mode):
776 kind = _(b'directory')
777 kind = _(b'directory')
777 return _(b'unsupported file type (type is %s)') % kind
778 return _(b'unsupported file type (type is %s)') % kind
778
779
779 matchedir = match.explicitdir
780 matchedir = match.explicitdir
780 badfn = match.bad
781 badfn = match.bad
781 dmap = self._map
782 dmap = self._map
782 lstat = os.lstat
783 lstat = os.lstat
783 getkind = stat.S_IFMT
784 getkind = stat.S_IFMT
784 dirkind = stat.S_IFDIR
785 dirkind = stat.S_IFDIR
785 regkind = stat.S_IFREG
786 regkind = stat.S_IFREG
786 lnkkind = stat.S_IFLNK
787 lnkkind = stat.S_IFLNK
787 join = self._join
788 join = self._join
788 dirsfound = []
789 dirsfound = []
789 foundadd = dirsfound.append
790 foundadd = dirsfound.append
790 dirsnotfound = []
791 dirsnotfound = []
791 notfoundadd = dirsnotfound.append
792 notfoundadd = dirsnotfound.append
792
793
793 if not match.isexact() and self._checkcase:
794 if not match.isexact() and self._checkcase:
794 normalize = self._normalize
795 normalize = self._normalize
795 else:
796 else:
796 normalize = None
797 normalize = None
797
798
798 files = sorted(match.files())
799 files = sorted(match.files())
799 subrepos.sort()
800 subrepos.sort()
800 i, j = 0, 0
801 i, j = 0, 0
801 while i < len(files) and j < len(subrepos):
802 while i < len(files) and j < len(subrepos):
802 subpath = subrepos[j] + b"/"
803 subpath = subrepos[j] + b"/"
803 if files[i] < subpath:
804 if files[i] < subpath:
804 i += 1
805 i += 1
805 continue
806 continue
806 while i < len(files) and files[i].startswith(subpath):
807 while i < len(files) and files[i].startswith(subpath):
807 del files[i]
808 del files[i]
808 j += 1
809 j += 1
809
810
810 if not files or b'' in files:
811 if not files or b'' in files:
811 files = [b'']
812 files = [b'']
812 # constructing the foldmap is expensive, so don't do it for the
813 # constructing the foldmap is expensive, so don't do it for the
813 # common case where files is ['']
814 # common case where files is ['']
814 normalize = None
815 normalize = None
815 results = dict.fromkeys(subrepos)
816 results = dict.fromkeys(subrepos)
816 results[b'.hg'] = None
817 results[b'.hg'] = None
817
818
818 for ff in files:
819 for ff in files:
819 if normalize:
820 if normalize:
820 nf = normalize(ff, False, True)
821 nf = normalize(ff, False, True)
821 else:
822 else:
822 nf = ff
823 nf = ff
823 if nf in results:
824 if nf in results:
824 continue
825 continue
825
826
826 try:
827 try:
827 st = lstat(join(nf))
828 st = lstat(join(nf))
828 kind = getkind(st.st_mode)
829 kind = getkind(st.st_mode)
829 if kind == dirkind:
830 if kind == dirkind:
830 if nf in dmap:
831 if nf in dmap:
831 # file replaced by dir on disk but still in dirstate
832 # file replaced by dir on disk but still in dirstate
832 results[nf] = None
833 results[nf] = None
833 if matchedir:
834 if matchedir:
834 matchedir(nf)
835 matchedir(nf)
835 foundadd((nf, ff))
836 foundadd((nf, ff))
836 elif kind == regkind or kind == lnkkind:
837 elif kind == regkind or kind == lnkkind:
837 results[nf] = st
838 results[nf] = st
838 else:
839 else:
839 badfn(ff, badtype(kind))
840 badfn(ff, badtype(kind))
840 if nf in dmap:
841 if nf in dmap:
841 results[nf] = None
842 results[nf] = None
842 except OSError as inst: # nf not found on disk - it is dirstate only
843 except OSError as inst: # nf not found on disk - it is dirstate only
843 if nf in dmap: # does it exactly match a missing file?
844 if nf in dmap: # does it exactly match a missing file?
844 results[nf] = None
845 results[nf] = None
845 else: # does it match a missing directory?
846 else: # does it match a missing directory?
846 if self._map.hasdir(nf):
847 if self._map.hasdir(nf):
847 if matchedir:
848 if matchedir:
848 matchedir(nf)
849 matchedir(nf)
849 notfoundadd(nf)
850 notfoundadd(nf)
850 else:
851 else:
851 badfn(ff, encoding.strtolocal(inst.strerror))
852 badfn(ff, encoding.strtolocal(inst.strerror))
852
853
853 # match.files() may contain explicitly-specified paths that shouldn't
854 # match.files() may contain explicitly-specified paths that shouldn't
854 # be taken; drop them from the list of files found. dirsfound/notfound
855 # be taken; drop them from the list of files found. dirsfound/notfound
855 # aren't filtered here because they will be tested later.
856 # aren't filtered here because they will be tested later.
856 if match.anypats():
857 if match.anypats():
857 for f in list(results):
858 for f in list(results):
858 if f == b'.hg' or f in subrepos:
859 if f == b'.hg' or f in subrepos:
859 # keep sentinel to disable further out-of-repo walks
860 # keep sentinel to disable further out-of-repo walks
860 continue
861 continue
861 if not match(f):
862 if not match(f):
862 del results[f]
863 del results[f]
863
864
864 # Case insensitive filesystems cannot rely on lstat() failing to detect
865 # Case insensitive filesystems cannot rely on lstat() failing to detect
865 # a case-only rename. Prune the stat object for any file that does not
866 # a case-only rename. Prune the stat object for any file that does not
866 # match the case in the filesystem, if there are multiple files that
867 # match the case in the filesystem, if there are multiple files that
867 # normalize to the same path.
868 # normalize to the same path.
868 if match.isexact() and self._checkcase:
869 if match.isexact() and self._checkcase:
869 normed = {}
870 normed = {}
870
871
871 for f, st in pycompat.iteritems(results):
872 for f, st in pycompat.iteritems(results):
872 if st is None:
873 if st is None:
873 continue
874 continue
874
875
875 nc = util.normcase(f)
876 nc = util.normcase(f)
876 paths = normed.get(nc)
877 paths = normed.get(nc)
877
878
878 if paths is None:
879 if paths is None:
879 paths = set()
880 paths = set()
880 normed[nc] = paths
881 normed[nc] = paths
881
882
882 paths.add(f)
883 paths.add(f)
883
884
884 for norm, paths in pycompat.iteritems(normed):
885 for norm, paths in pycompat.iteritems(normed):
885 if len(paths) > 1:
886 if len(paths) > 1:
886 for path in paths:
887 for path in paths:
887 folded = self._discoverpath(
888 folded = self._discoverpath(
888 path, norm, True, None, self._map.dirfoldmap
889 path, norm, True, None, self._map.dirfoldmap
889 )
890 )
890 if path != folded:
891 if path != folded:
891 results[path] = None
892 results[path] = None
892
893
893 return results, dirsfound, dirsnotfound
894 return results, dirsfound, dirsnotfound
894
895
895 def walk(self, match, subrepos, unknown, ignored, full=True):
896 def walk(self, match, subrepos, unknown, ignored, full=True):
896 '''
897 '''
897 Walk recursively through the directory tree, finding all files
898 Walk recursively through the directory tree, finding all files
898 matched by match.
899 matched by match.
899
900
900 If full is False, maybe skip some known-clean files.
901 If full is False, maybe skip some known-clean files.
901
902
902 Return a dict mapping filename to stat-like object (either
903 Return a dict mapping filename to stat-like object (either
903 mercurial.osutil.stat instance or return value of os.stat()).
904 mercurial.osutil.stat instance or return value of os.stat()).
904
905
905 '''
906 '''
906 # full is a flag that extensions that hook into walk can use -- this
907 # full is a flag that extensions that hook into walk can use -- this
907 # implementation doesn't use it at all. This satisfies the contract
908 # implementation doesn't use it at all. This satisfies the contract
908 # because we only guarantee a "maybe".
909 # because we only guarantee a "maybe".
909
910
910 if ignored:
911 if ignored:
911 ignore = util.never
912 ignore = util.never
912 dirignore = util.never
913 dirignore = util.never
913 elif unknown:
914 elif unknown:
914 ignore = self._ignore
915 ignore = self._ignore
915 dirignore = self._dirignore
916 dirignore = self._dirignore
916 else:
917 else:
917 # if not unknown and not ignored, drop dir recursion and step 2
918 # if not unknown and not ignored, drop dir recursion and step 2
918 ignore = util.always
919 ignore = util.always
919 dirignore = util.always
920 dirignore = util.always
920
921
921 matchfn = match.matchfn
922 matchfn = match.matchfn
922 matchalways = match.always()
923 matchalways = match.always()
923 matchtdir = match.traversedir
924 matchtdir = match.traversedir
924 dmap = self._map
925 dmap = self._map
925 listdir = util.listdir
926 listdir = util.listdir
926 lstat = os.lstat
927 lstat = os.lstat
927 dirkind = stat.S_IFDIR
928 dirkind = stat.S_IFDIR
928 regkind = stat.S_IFREG
929 regkind = stat.S_IFREG
929 lnkkind = stat.S_IFLNK
930 lnkkind = stat.S_IFLNK
930 join = self._join
931 join = self._join
931
932
932 exact = skipstep3 = False
933 exact = skipstep3 = False
933 if match.isexact(): # match.exact
934 if match.isexact(): # match.exact
934 exact = True
935 exact = True
935 dirignore = util.always # skip step 2
936 dirignore = util.always # skip step 2
936 elif match.prefix(): # match.match, no patterns
937 elif match.prefix(): # match.match, no patterns
937 skipstep3 = True
938 skipstep3 = True
938
939
939 if not exact and self._checkcase:
940 if not exact and self._checkcase:
940 normalize = self._normalize
941 normalize = self._normalize
941 normalizefile = self._normalizefile
942 normalizefile = self._normalizefile
942 skipstep3 = False
943 skipstep3 = False
943 else:
944 else:
944 normalize = self._normalize
945 normalize = self._normalize
945 normalizefile = None
946 normalizefile = None
946
947
947 # step 1: find all explicit files
948 # step 1: find all explicit files
948 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
949 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
949
950
950 skipstep3 = skipstep3 and not (work or dirsnotfound)
951 skipstep3 = skipstep3 and not (work or dirsnotfound)
951 work = [d for d in work if not dirignore(d[0])]
952 work = [d for d in work if not dirignore(d[0])]
952
953
953 # step 2: visit subdirectories
954 # step 2: visit subdirectories
954 def traverse(work, alreadynormed):
955 def traverse(work, alreadynormed):
955 wadd = work.append
956 wadd = work.append
956 while work:
957 while work:
957 tracing.counter('dirstate.walk work', len(work))
958 tracing.counter('dirstate.walk work', len(work))
958 nd = work.pop()
959 nd = work.pop()
959 visitentries = match.visitchildrenset(nd)
960 visitentries = match.visitchildrenset(nd)
960 if not visitentries:
961 if not visitentries:
961 continue
962 continue
962 if visitentries == b'this' or visitentries == b'all':
963 if visitentries == b'this' or visitentries == b'all':
963 visitentries = None
964 visitentries = None
964 skip = None
965 skip = None
965 if nd != b'':
966 if nd != b'':
966 skip = b'.hg'
967 skip = b'.hg'
967 try:
968 try:
968 with tracing.log('dirstate.walk.traverse listdir %s', nd):
969 with tracing.log('dirstate.walk.traverse listdir %s', nd):
969 entries = listdir(join(nd), stat=True, skip=skip)
970 entries = listdir(join(nd), stat=True, skip=skip)
970 except OSError as inst:
971 except OSError as inst:
971 if inst.errno in (errno.EACCES, errno.ENOENT):
972 if inst.errno in (errno.EACCES, errno.ENOENT):
972 match.bad(
973 match.bad(
973 self.pathto(nd), encoding.strtolocal(inst.strerror)
974 self.pathto(nd), encoding.strtolocal(inst.strerror)
974 )
975 )
975 continue
976 continue
976 raise
977 raise
977 for f, kind, st in entries:
978 for f, kind, st in entries:
978 # Some matchers may return files in the visitentries set,
979 # Some matchers may return files in the visitentries set,
979 # instead of 'this', if the matcher explicitly mentions them
980 # instead of 'this', if the matcher explicitly mentions them
980 # and is not an exactmatcher. This is acceptable; we do not
981 # and is not an exactmatcher. This is acceptable; we do not
981 # make any hard assumptions about file-or-directory below
982 # make any hard assumptions about file-or-directory below
982 # based on the presence of `f` in visitentries. If
983 # based on the presence of `f` in visitentries. If
983 # visitchildrenset returned a set, we can always skip the
984 # visitchildrenset returned a set, we can always skip the
984 # entries *not* in the set it provided regardless of whether
985 # entries *not* in the set it provided regardless of whether
985 # they're actually a file or a directory.
986 # they're actually a file or a directory.
986 if visitentries and f not in visitentries:
987 if visitentries and f not in visitentries:
987 continue
988 continue
988 if normalizefile:
989 if normalizefile:
989 # even though f might be a directory, we're only
990 # even though f might be a directory, we're only
990 # interested in comparing it to files currently in the
991 # interested in comparing it to files currently in the
991 # dmap -- therefore normalizefile is enough
992 # dmap -- therefore normalizefile is enough
992 nf = normalizefile(
993 nf = normalizefile(
993 nd and (nd + b"/" + f) or f, True, True
994 nd and (nd + b"/" + f) or f, True, True
994 )
995 )
995 else:
996 else:
996 nf = nd and (nd + b"/" + f) or f
997 nf = nd and (nd + b"/" + f) or f
997 if nf not in results:
998 if nf not in results:
998 if kind == dirkind:
999 if kind == dirkind:
999 if not ignore(nf):
1000 if not ignore(nf):
1000 if matchtdir:
1001 if matchtdir:
1001 matchtdir(nf)
1002 matchtdir(nf)
1002 wadd(nf)
1003 wadd(nf)
1003 if nf in dmap and (matchalways or matchfn(nf)):
1004 if nf in dmap and (matchalways or matchfn(nf)):
1004 results[nf] = None
1005 results[nf] = None
1005 elif kind == regkind or kind == lnkkind:
1006 elif kind == regkind or kind == lnkkind:
1006 if nf in dmap:
1007 if nf in dmap:
1007 if matchalways or matchfn(nf):
1008 if matchalways or matchfn(nf):
1008 results[nf] = st
1009 results[nf] = st
1009 elif (matchalways or matchfn(nf)) and not ignore(
1010 elif (matchalways or matchfn(nf)) and not ignore(
1010 nf
1011 nf
1011 ):
1012 ):
1012 # unknown file -- normalize if necessary
1013 # unknown file -- normalize if necessary
1013 if not alreadynormed:
1014 if not alreadynormed:
1014 nf = normalize(nf, False, True)
1015 nf = normalize(nf, False, True)
1015 results[nf] = st
1016 results[nf] = st
1016 elif nf in dmap and (matchalways or matchfn(nf)):
1017 elif nf in dmap and (matchalways or matchfn(nf)):
1017 results[nf] = None
1018 results[nf] = None
1018
1019
1019 for nd, d in work:
1020 for nd, d in work:
1020 # alreadynormed means that processwork doesn't have to do any
1021 # alreadynormed means that processwork doesn't have to do any
1021 # expensive directory normalization
1022 # expensive directory normalization
1022 alreadynormed = not normalize or nd == d
1023 alreadynormed = not normalize or nd == d
1023 traverse([d], alreadynormed)
1024 traverse([d], alreadynormed)
1024
1025
1025 for s in subrepos:
1026 for s in subrepos:
1026 del results[s]
1027 del results[s]
1027 del results[b'.hg']
1028 del results[b'.hg']
1028
1029
1029 # step 3: visit remaining files from dmap
1030 # step 3: visit remaining files from dmap
1030 if not skipstep3 and not exact:
1031 if not skipstep3 and not exact:
1031 # If a dmap file is not in results yet, it was either
1032 # If a dmap file is not in results yet, it was either
1032 # a) not matching matchfn b) ignored, c) missing, or d) under a
1033 # a) not matching matchfn b) ignored, c) missing, or d) under a
1033 # symlink directory.
1034 # symlink directory.
1034 if not results and matchalways:
1035 if not results and matchalways:
1035 visit = [f for f in dmap]
1036 visit = [f for f in dmap]
1036 else:
1037 else:
1037 visit = [f for f in dmap if f not in results and matchfn(f)]
1038 visit = [f for f in dmap if f not in results and matchfn(f)]
1038 visit.sort()
1039 visit.sort()
1039
1040
1040 if unknown:
1041 if unknown:
1041 # unknown == True means we walked all dirs under the roots
1042 # unknown == True means we walked all dirs under the roots
1042 # that wasn't ignored, and everything that matched was stat'ed
1043 # that wasn't ignored, and everything that matched was stat'ed
1043 # and is already in results.
1044 # and is already in results.
1044 # The rest must thus be ignored or under a symlink.
1045 # The rest must thus be ignored or under a symlink.
1045 audit_path = pathutil.pathauditor(self._root, cached=True)
1046 audit_path = pathutil.pathauditor(self._root, cached=True)
1046
1047
1047 for nf in iter(visit):
1048 for nf in iter(visit):
1048 # If a stat for the same file was already added with a
1049 # If a stat for the same file was already added with a
1049 # different case, don't add one for this, since that would
1050 # different case, don't add one for this, since that would
1050 # make it appear as if the file exists under both names
1051 # make it appear as if the file exists under both names
1051 # on disk.
1052 # on disk.
1052 if (
1053 if (
1053 normalizefile
1054 normalizefile
1054 and normalizefile(nf, True, True) in results
1055 and normalizefile(nf, True, True) in results
1055 ):
1056 ):
1056 results[nf] = None
1057 results[nf] = None
1057 # Report ignored items in the dmap as long as they are not
1058 # Report ignored items in the dmap as long as they are not
1058 # under a symlink directory.
1059 # under a symlink directory.
1059 elif audit_path.check(nf):
1060 elif audit_path.check(nf):
1060 try:
1061 try:
1061 results[nf] = lstat(join(nf))
1062 results[nf] = lstat(join(nf))
1062 # file was just ignored, no links, and exists
1063 # file was just ignored, no links, and exists
1063 except OSError:
1064 except OSError:
1064 # file doesn't exist
1065 # file doesn't exist
1065 results[nf] = None
1066 results[nf] = None
1066 else:
1067 else:
1067 # It's either missing or under a symlink directory
1068 # It's either missing or under a symlink directory
1068 # which we in this case report as missing
1069 # which we in this case report as missing
1069 results[nf] = None
1070 results[nf] = None
1070 else:
1071 else:
1071 # We may not have walked the full directory tree above,
1072 # We may not have walked the full directory tree above,
1072 # so stat and check everything we missed.
1073 # so stat and check everything we missed.
1073 iv = iter(visit)
1074 iv = iter(visit)
1074 for st in util.statfiles([join(i) for i in visit]):
1075 for st in util.statfiles([join(i) for i in visit]):
1075 results[next(iv)] = st
1076 results[next(iv)] = st
1076 return results
1077 return results
1077
1078
1078 def status(self, match, subrepos, ignored, clean, unknown):
1079 def status(self, match, subrepos, ignored, clean, unknown):
1079 '''Determine the status of the working copy relative to the
1080 '''Determine the status of the working copy relative to the
1080 dirstate and return a pair of (unsure, status), where status is of type
1081 dirstate and return a pair of (unsure, status), where status is of type
1081 scmutil.status and:
1082 scmutil.status and:
1082
1083
1083 unsure:
1084 unsure:
1084 files that might have been modified since the dirstate was
1085 files that might have been modified since the dirstate was
1085 written, but need to be read to be sure (size is the same
1086 written, but need to be read to be sure (size is the same
1086 but mtime differs)
1087 but mtime differs)
1087 status.modified:
1088 status.modified:
1088 files that have definitely been modified since the dirstate
1089 files that have definitely been modified since the dirstate
1089 was written (different size or mode)
1090 was written (different size or mode)
1090 status.clean:
1091 status.clean:
1091 files that have definitely not been modified since the
1092 files that have definitely not been modified since the
1092 dirstate was written
1093 dirstate was written
1093 '''
1094 '''
1094 listignored, listclean, listunknown = ignored, clean, unknown
1095 listignored, listclean, listunknown = ignored, clean, unknown
1095 lookup, modified, added, unknown, ignored = [], [], [], [], []
1096 lookup, modified, added, unknown, ignored = [], [], [], [], []
1096 removed, deleted, clean = [], [], []
1097 removed, deleted, clean = [], [], []
1097
1098
1098 dmap = self._map
1099 dmap = self._map
1099 dmap.preload()
1100 dmap.preload()
1101
1102 use_rust = True
1103 if rustmod is None:
1104 use_rust = False
1105 elif subrepos:
1106 use_rust = False
1107 if bool(listunknown):
1108 # Pathauditor does not exist yet in Rust, unknown files
1109 # can't be trusted.
1110 use_rust = False
1111 elif self._ignorefiles() and listignored:
1112 # Rust has no ignore mechanism yet, so don't use Rust for
1113 # commands that need ignore.
1114 use_rust = False
1115 elif not match.always():
1116 # Matchers have yet to be implemented
1117 use_rust = False
1118 # We don't yet have a mechanism for extensions
1119 elif sparse.enabled:
1120 use_rust = False
1121 elif not getattr(self, "_fsmonitordisable", True):
1122 use_rust = False
1123
1124 if use_rust:
1125 (
1126 lookup,
1127 modified,
1128 added,
1129 removed,
1130 deleted,
1131 unknown,
1132 clean,
1133 ) = rustmod.status(
1134 dmap._rustmap,
1135 self._rootdir,
1136 match.files(),
1137 bool(listclean),
1138 self._lastnormaltime,
1139 self._checkexec,
1140 )
1141
1142 status = scmutil.status(
1143 modified=modified,
1144 added=added,
1145 removed=removed,
1146 deleted=deleted,
1147 unknown=unknown,
1148 ignored=ignored,
1149 clean=clean,
1150 )
1151 return (lookup, status)
1152
1100 dcontains = dmap.__contains__
1153 dcontains = dmap.__contains__
1101 dget = dmap.__getitem__
1154 dget = dmap.__getitem__
1102 ladd = lookup.append # aka "unsure"
1155 ladd = lookup.append # aka "unsure"
1103 madd = modified.append
1156 madd = modified.append
1104 aadd = added.append
1157 aadd = added.append
1105 uadd = unknown.append
1158 uadd = unknown.append
1106 iadd = ignored.append
1159 iadd = ignored.append
1107 radd = removed.append
1160 radd = removed.append
1108 dadd = deleted.append
1161 dadd = deleted.append
1109 cadd = clean.append
1162 cadd = clean.append
1110 mexact = match.exact
1163 mexact = match.exact
1111 dirignore = self._dirignore
1164 dirignore = self._dirignore
1112 checkexec = self._checkexec
1165 checkexec = self._checkexec
1113 copymap = self._map.copymap
1166 copymap = self._map.copymap
1114 lastnormaltime = self._lastnormaltime
1167 lastnormaltime = self._lastnormaltime
1115
1168
1116 # We need to do full walks when either
1169 # We need to do full walks when either
1117 # - we're listing all clean files, or
1170 # - we're listing all clean files, or
1118 # - match.traversedir does something, because match.traversedir should
1171 # - match.traversedir does something, because match.traversedir should
1119 # be called for every dir in the working dir
1172 # be called for every dir in the working dir
1120 full = listclean or match.traversedir is not None
1173 full = listclean or match.traversedir is not None
1121 for fn, st in pycompat.iteritems(
1174 for fn, st in pycompat.iteritems(
1122 self.walk(match, subrepos, listunknown, listignored, full=full)
1175 self.walk(match, subrepos, listunknown, listignored, full=full)
1123 ):
1176 ):
1124 if not dcontains(fn):
1177 if not dcontains(fn):
1125 if (listignored or mexact(fn)) and dirignore(fn):
1178 if (listignored or mexact(fn)) and dirignore(fn):
1126 if listignored:
1179 if listignored:
1127 iadd(fn)
1180 iadd(fn)
1128 else:
1181 else:
1129 uadd(fn)
1182 uadd(fn)
1130 continue
1183 continue
1131
1184
1132 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1185 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1133 # written like that for performance reasons. dmap[fn] is not a
1186 # written like that for performance reasons. dmap[fn] is not a
1134 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1187 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1135 # opcode has fast paths when the value to be unpacked is a tuple or
1188 # opcode has fast paths when the value to be unpacked is a tuple or
1136 # a list, but falls back to creating a full-fledged iterator in
1189 # a list, but falls back to creating a full-fledged iterator in
1137 # general. That is much slower than simply accessing and storing the
1190 # general. That is much slower than simply accessing and storing the
1138 # tuple members one by one.
1191 # tuple members one by one.
1139 t = dget(fn)
1192 t = dget(fn)
1140 state = t[0]
1193 state = t[0]
1141 mode = t[1]
1194 mode = t[1]
1142 size = t[2]
1195 size = t[2]
1143 time = t[3]
1196 time = t[3]
1144
1197
1145 if not st and state in b"nma":
1198 if not st and state in b"nma":
1146 dadd(fn)
1199 dadd(fn)
1147 elif state == b'n':
1200 elif state == b'n':
1148 if (
1201 if (
1149 size >= 0
1202 size >= 0
1150 and (
1203 and (
1151 (size != st.st_size and size != st.st_size & _rangemask)
1204 (size != st.st_size and size != st.st_size & _rangemask)
1152 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1205 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1153 )
1206 )
1154 or size == -2 # other parent
1207 or size == -2 # other parent
1155 or fn in copymap
1208 or fn in copymap
1156 ):
1209 ):
1157 madd(fn)
1210 madd(fn)
1158 elif (
1211 elif (
1159 time != st[stat.ST_MTIME]
1212 time != st[stat.ST_MTIME]
1160 and time != st[stat.ST_MTIME] & _rangemask
1213 and time != st[stat.ST_MTIME] & _rangemask
1161 ):
1214 ):
1162 ladd(fn)
1215 ladd(fn)
1163 elif st[stat.ST_MTIME] == lastnormaltime:
1216 elif st[stat.ST_MTIME] == lastnormaltime:
1164 # fn may have just been marked as normal and it may have
1217 # fn may have just been marked as normal and it may have
1165 # changed in the same second without changing its size.
1218 # changed in the same second without changing its size.
1166 # This can happen if we quickly do multiple commits.
1219 # This can happen if we quickly do multiple commits.
1167 # Force lookup, so we don't miss such a racy file change.
1220 # Force lookup, so we don't miss such a racy file change.
1168 ladd(fn)
1221 ladd(fn)
1169 elif listclean:
1222 elif listclean:
1170 cadd(fn)
1223 cadd(fn)
1171 elif state == b'm':
1224 elif state == b'm':
1172 madd(fn)
1225 madd(fn)
1173 elif state == b'a':
1226 elif state == b'a':
1174 aadd(fn)
1227 aadd(fn)
1175 elif state == b'r':
1228 elif state == b'r':
1176 radd(fn)
1229 radd(fn)
1177
1230
1178 return (
1231 return (
1179 lookup,
1232 lookup,
1180 scmutil.status(
1233 scmutil.status(
1181 modified, added, removed, deleted, unknown, ignored, clean
1234 modified, added, removed, deleted, unknown, ignored, clean
1182 ),
1235 ),
1183 )
1236 )
1184
1237
1185 def matches(self, match):
1238 def matches(self, match):
1186 '''
1239 '''
1187 return files in the dirstate (in whatever state) filtered by match
1240 return files in the dirstate (in whatever state) filtered by match
1188 '''
1241 '''
1189 dmap = self._map
1242 dmap = self._map
1190 if match.always():
1243 if match.always():
1191 return dmap.keys()
1244 return dmap.keys()
1192 files = match.files()
1245 files = match.files()
1193 if match.isexact():
1246 if match.isexact():
1194 # fast path -- filter the other way around, since typically files is
1247 # fast path -- filter the other way around, since typically files is
1195 # much smaller than dmap
1248 # much smaller than dmap
1196 return [f for f in files if f in dmap]
1249 return [f for f in files if f in dmap]
1197 if match.prefix() and all(fn in dmap for fn in files):
1250 if match.prefix() and all(fn in dmap for fn in files):
1198 # fast path -- all the values are known to be files, so just return
1251 # fast path -- all the values are known to be files, so just return
1199 # that
1252 # that
1200 return list(files)
1253 return list(files)
1201 return [f for f in dmap if match(f)]
1254 return [f for f in dmap if match(f)]
1202
1255
1203 def _actualfilename(self, tr):
1256 def _actualfilename(self, tr):
1204 if tr:
1257 if tr:
1205 return self._pendingfilename
1258 return self._pendingfilename
1206 else:
1259 else:
1207 return self._filename
1260 return self._filename
1208
1261
1209 def savebackup(self, tr, backupname):
1262 def savebackup(self, tr, backupname):
1210 '''Save current dirstate into backup file'''
1263 '''Save current dirstate into backup file'''
1211 filename = self._actualfilename(tr)
1264 filename = self._actualfilename(tr)
1212 assert backupname != filename
1265 assert backupname != filename
1213
1266
1214 # use '_writedirstate' instead of 'write' to write changes certainly,
1267 # use '_writedirstate' instead of 'write' to write changes certainly,
1215 # because the latter omits writing out if transaction is running.
1268 # because the latter omits writing out if transaction is running.
1216 # output file will be used to create backup of dirstate at this point.
1269 # output file will be used to create backup of dirstate at this point.
1217 if self._dirty or not self._opener.exists(filename):
1270 if self._dirty or not self._opener.exists(filename):
1218 self._writedirstate(
1271 self._writedirstate(
1219 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1272 self._opener(filename, b"w", atomictemp=True, checkambig=True)
1220 )
1273 )
1221
1274
1222 if tr:
1275 if tr:
1223 # ensure that subsequent tr.writepending returns True for
1276 # ensure that subsequent tr.writepending returns True for
1224 # changes written out above, even if dirstate is never
1277 # changes written out above, even if dirstate is never
1225 # changed after this
1278 # changed after this
1226 tr.addfilegenerator(
1279 tr.addfilegenerator(
1227 b'dirstate',
1280 b'dirstate',
1228 (self._filename,),
1281 (self._filename,),
1229 self._writedirstate,
1282 self._writedirstate,
1230 location=b'plain',
1283 location=b'plain',
1231 )
1284 )
1232
1285
1233 # ensure that pending file written above is unlinked at
1286 # ensure that pending file written above is unlinked at
1234 # failure, even if tr.writepending isn't invoked until the
1287 # failure, even if tr.writepending isn't invoked until the
1235 # end of this transaction
1288 # end of this transaction
1236 tr.registertmp(filename, location=b'plain')
1289 tr.registertmp(filename, location=b'plain')
1237
1290
1238 self._opener.tryunlink(backupname)
1291 self._opener.tryunlink(backupname)
1239 # hardlink backup is okay because _writedirstate is always called
1292 # hardlink backup is okay because _writedirstate is always called
1240 # with an "atomictemp=True" file.
1293 # with an "atomictemp=True" file.
1241 util.copyfile(
1294 util.copyfile(
1242 self._opener.join(filename),
1295 self._opener.join(filename),
1243 self._opener.join(backupname),
1296 self._opener.join(backupname),
1244 hardlink=True,
1297 hardlink=True,
1245 )
1298 )
1246
1299
1247 def restorebackup(self, tr, backupname):
1300 def restorebackup(self, tr, backupname):
1248 '''Restore dirstate by backup file'''
1301 '''Restore dirstate by backup file'''
1249 # this "invalidate()" prevents "wlock.release()" from writing
1302 # this "invalidate()" prevents "wlock.release()" from writing
1250 # changes of dirstate out after restoring from backup file
1303 # changes of dirstate out after restoring from backup file
1251 self.invalidate()
1304 self.invalidate()
1252 filename = self._actualfilename(tr)
1305 filename = self._actualfilename(tr)
1253 o = self._opener
1306 o = self._opener
1254 if util.samefile(o.join(backupname), o.join(filename)):
1307 if util.samefile(o.join(backupname), o.join(filename)):
1255 o.unlink(backupname)
1308 o.unlink(backupname)
1256 else:
1309 else:
1257 o.rename(backupname, filename, checkambig=True)
1310 o.rename(backupname, filename, checkambig=True)
1258
1311
1259 def clearbackup(self, tr, backupname):
1312 def clearbackup(self, tr, backupname):
1260 '''Clear backup file'''
1313 '''Clear backup file'''
1261 self._opener.unlink(backupname)
1314 self._opener.unlink(backupname)
1262
1315
1263
1316
1264 class dirstatemap(object):
1317 class dirstatemap(object):
1265 """Map encapsulating the dirstate's contents.
1318 """Map encapsulating the dirstate's contents.
1266
1319
1267 The dirstate contains the following state:
1320 The dirstate contains the following state:
1268
1321
1269 - `identity` is the identity of the dirstate file, which can be used to
1322 - `identity` is the identity of the dirstate file, which can be used to
1270 detect when changes have occurred to the dirstate file.
1323 detect when changes have occurred to the dirstate file.
1271
1324
1272 - `parents` is a pair containing the parents of the working copy. The
1325 - `parents` is a pair containing the parents of the working copy. The
1273 parents are updated by calling `setparents`.
1326 parents are updated by calling `setparents`.
1274
1327
1275 - the state map maps filenames to tuples of (state, mode, size, mtime),
1328 - the state map maps filenames to tuples of (state, mode, size, mtime),
1276 where state is a single character representing 'normal', 'added',
1329 where state is a single character representing 'normal', 'added',
1277 'removed', or 'merged'. It is read by treating the dirstate as a
1330 'removed', or 'merged'. It is read by treating the dirstate as a
1278 dict. File state is updated by calling the `addfile`, `removefile` and
1331 dict. File state is updated by calling the `addfile`, `removefile` and
1279 `dropfile` methods.
1332 `dropfile` methods.
1280
1333
1281 - `copymap` maps destination filenames to their source filename.
1334 - `copymap` maps destination filenames to their source filename.
1282
1335
1283 The dirstate also provides the following views onto the state:
1336 The dirstate also provides the following views onto the state:
1284
1337
1285 - `nonnormalset` is a set of the filenames that have state other
1338 - `nonnormalset` is a set of the filenames that have state other
1286 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1339 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1287
1340
1288 - `otherparentset` is a set of the filenames that are marked as coming
1341 - `otherparentset` is a set of the filenames that are marked as coming
1289 from the second parent when the dirstate is currently being merged.
1342 from the second parent when the dirstate is currently being merged.
1290
1343
1291 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1344 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1292 form that they appear as in the dirstate.
1345 form that they appear as in the dirstate.
1293
1346
1294 - `dirfoldmap` is a dict mapping normalized directory names to the
1347 - `dirfoldmap` is a dict mapping normalized directory names to the
1295 denormalized form that they appear as in the dirstate.
1348 denormalized form that they appear as in the dirstate.
1296 """
1349 """
1297
1350
1298 def __init__(self, ui, opener, root):
1351 def __init__(self, ui, opener, root):
1299 self._ui = ui
1352 self._ui = ui
1300 self._opener = opener
1353 self._opener = opener
1301 self._root = root
1354 self._root = root
1302 self._filename = b'dirstate'
1355 self._filename = b'dirstate'
1303
1356
1304 self._parents = None
1357 self._parents = None
1305 self._dirtyparents = False
1358 self._dirtyparents = False
1306
1359
1307 # for consistent view between _pl() and _read() invocations
1360 # for consistent view between _pl() and _read() invocations
1308 self._pendingmode = None
1361 self._pendingmode = None
1309
1362
1310 @propertycache
1363 @propertycache
1311 def _map(self):
1364 def _map(self):
1312 self._map = {}
1365 self._map = {}
1313 self.read()
1366 self.read()
1314 return self._map
1367 return self._map
1315
1368
1316 @propertycache
1369 @propertycache
1317 def copymap(self):
1370 def copymap(self):
1318 self.copymap = {}
1371 self.copymap = {}
1319 self._map
1372 self._map
1320 return self.copymap
1373 return self.copymap
1321
1374
1322 def clear(self):
1375 def clear(self):
1323 self._map.clear()
1376 self._map.clear()
1324 self.copymap.clear()
1377 self.copymap.clear()
1325 self.setparents(nullid, nullid)
1378 self.setparents(nullid, nullid)
1326 util.clearcachedproperty(self, b"_dirs")
1379 util.clearcachedproperty(self, b"_dirs")
1327 util.clearcachedproperty(self, b"_alldirs")
1380 util.clearcachedproperty(self, b"_alldirs")
1328 util.clearcachedproperty(self, b"filefoldmap")
1381 util.clearcachedproperty(self, b"filefoldmap")
1329 util.clearcachedproperty(self, b"dirfoldmap")
1382 util.clearcachedproperty(self, b"dirfoldmap")
1330 util.clearcachedproperty(self, b"nonnormalset")
1383 util.clearcachedproperty(self, b"nonnormalset")
1331 util.clearcachedproperty(self, b"otherparentset")
1384 util.clearcachedproperty(self, b"otherparentset")
1332
1385
1333 def items(self):
1386 def items(self):
1334 return pycompat.iteritems(self._map)
1387 return pycompat.iteritems(self._map)
1335
1388
1336 # forward for python2,3 compat
1389 # forward for python2,3 compat
1337 iteritems = items
1390 iteritems = items
1338
1391
1339 def __len__(self):
1392 def __len__(self):
1340 return len(self._map)
1393 return len(self._map)
1341
1394
1342 def __iter__(self):
1395 def __iter__(self):
1343 return iter(self._map)
1396 return iter(self._map)
1344
1397
1345 def get(self, key, default=None):
1398 def get(self, key, default=None):
1346 return self._map.get(key, default)
1399 return self._map.get(key, default)
1347
1400
1348 def __contains__(self, key):
1401 def __contains__(self, key):
1349 return key in self._map
1402 return key in self._map
1350
1403
1351 def __getitem__(self, key):
1404 def __getitem__(self, key):
1352 return self._map[key]
1405 return self._map[key]
1353
1406
1354 def keys(self):
1407 def keys(self):
1355 return self._map.keys()
1408 return self._map.keys()
1356
1409
1357 def preload(self):
1410 def preload(self):
1358 """Loads the underlying data, if it's not already loaded"""
1411 """Loads the underlying data, if it's not already loaded"""
1359 self._map
1412 self._map
1360
1413
1361 def addfile(self, f, oldstate, state, mode, size, mtime):
1414 def addfile(self, f, oldstate, state, mode, size, mtime):
1362 """Add a tracked file to the dirstate."""
1415 """Add a tracked file to the dirstate."""
1363 if oldstate in b"?r" and r"_dirs" in self.__dict__:
1416 if oldstate in b"?r" and r"_dirs" in self.__dict__:
1364 self._dirs.addpath(f)
1417 self._dirs.addpath(f)
1365 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1418 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1366 self._alldirs.addpath(f)
1419 self._alldirs.addpath(f)
1367 self._map[f] = dirstatetuple(state, mode, size, mtime)
1420 self._map[f] = dirstatetuple(state, mode, size, mtime)
1368 if state != b'n' or mtime == -1:
1421 if state != b'n' or mtime == -1:
1369 self.nonnormalset.add(f)
1422 self.nonnormalset.add(f)
1370 if size == -2:
1423 if size == -2:
1371 self.otherparentset.add(f)
1424 self.otherparentset.add(f)
1372
1425
1373 def removefile(self, f, oldstate, size):
1426 def removefile(self, f, oldstate, size):
1374 """
1427 """
1375 Mark a file as removed in the dirstate.
1428 Mark a file as removed in the dirstate.
1376
1429
1377 The `size` parameter is used to store sentinel values that indicate
1430 The `size` parameter is used to store sentinel values that indicate
1378 the file's previous state. In the future, we should refactor this
1431 the file's previous state. In the future, we should refactor this
1379 to be more explicit about what that state is.
1432 to be more explicit about what that state is.
1380 """
1433 """
1381 if oldstate not in b"?r" and r"_dirs" in self.__dict__:
1434 if oldstate not in b"?r" and r"_dirs" in self.__dict__:
1382 self._dirs.delpath(f)
1435 self._dirs.delpath(f)
1383 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1436 if oldstate == b"?" and r"_alldirs" in self.__dict__:
1384 self._alldirs.addpath(f)
1437 self._alldirs.addpath(f)
1385 if r"filefoldmap" in self.__dict__:
1438 if r"filefoldmap" in self.__dict__:
1386 normed = util.normcase(f)
1439 normed = util.normcase(f)
1387 self.filefoldmap.pop(normed, None)
1440 self.filefoldmap.pop(normed, None)
1388 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1441 self._map[f] = dirstatetuple(b'r', 0, size, 0)
1389 self.nonnormalset.add(f)
1442 self.nonnormalset.add(f)
1390
1443
1391 def dropfile(self, f, oldstate):
1444 def dropfile(self, f, oldstate):
1392 """
1445 """
1393 Remove a file from the dirstate. Returns True if the file was
1446 Remove a file from the dirstate. Returns True if the file was
1394 previously recorded.
1447 previously recorded.
1395 """
1448 """
1396 exists = self._map.pop(f, None) is not None
1449 exists = self._map.pop(f, None) is not None
1397 if exists:
1450 if exists:
1398 if oldstate != b"r" and r"_dirs" in self.__dict__:
1451 if oldstate != b"r" and r"_dirs" in self.__dict__:
1399 self._dirs.delpath(f)
1452 self._dirs.delpath(f)
1400 if r"_alldirs" in self.__dict__:
1453 if r"_alldirs" in self.__dict__:
1401 self._alldirs.delpath(f)
1454 self._alldirs.delpath(f)
1402 if r"filefoldmap" in self.__dict__:
1455 if r"filefoldmap" in self.__dict__:
1403 normed = util.normcase(f)
1456 normed = util.normcase(f)
1404 self.filefoldmap.pop(normed, None)
1457 self.filefoldmap.pop(normed, None)
1405 self.nonnormalset.discard(f)
1458 self.nonnormalset.discard(f)
1406 return exists
1459 return exists
1407
1460
1408 def clearambiguoustimes(self, files, now):
1461 def clearambiguoustimes(self, files, now):
1409 for f in files:
1462 for f in files:
1410 e = self.get(f)
1463 e = self.get(f)
1411 if e is not None and e[0] == b'n' and e[3] == now:
1464 if e is not None and e[0] == b'n' and e[3] == now:
1412 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1465 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1413 self.nonnormalset.add(f)
1466 self.nonnormalset.add(f)
1414
1467
1415 def nonnormalentries(self):
1468 def nonnormalentries(self):
1416 '''Compute the nonnormal dirstate entries from the dmap'''
1469 '''Compute the nonnormal dirstate entries from the dmap'''
1417 try:
1470 try:
1418 return parsers.nonnormalotherparententries(self._map)
1471 return parsers.nonnormalotherparententries(self._map)
1419 except AttributeError:
1472 except AttributeError:
1420 nonnorm = set()
1473 nonnorm = set()
1421 otherparent = set()
1474 otherparent = set()
1422 for fname, e in pycompat.iteritems(self._map):
1475 for fname, e in pycompat.iteritems(self._map):
1423 if e[0] != b'n' or e[3] == -1:
1476 if e[0] != b'n' or e[3] == -1:
1424 nonnorm.add(fname)
1477 nonnorm.add(fname)
1425 if e[0] == b'n' and e[2] == -2:
1478 if e[0] == b'n' and e[2] == -2:
1426 otherparent.add(fname)
1479 otherparent.add(fname)
1427 return nonnorm, otherparent
1480 return nonnorm, otherparent
1428
1481
1429 @propertycache
1482 @propertycache
1430 def filefoldmap(self):
1483 def filefoldmap(self):
1431 """Returns a dictionary mapping normalized case paths to their
1484 """Returns a dictionary mapping normalized case paths to their
1432 non-normalized versions.
1485 non-normalized versions.
1433 """
1486 """
1434 try:
1487 try:
1435 makefilefoldmap = parsers.make_file_foldmap
1488 makefilefoldmap = parsers.make_file_foldmap
1436 except AttributeError:
1489 except AttributeError:
1437 pass
1490 pass
1438 else:
1491 else:
1439 return makefilefoldmap(
1492 return makefilefoldmap(
1440 self._map, util.normcasespec, util.normcasefallback
1493 self._map, util.normcasespec, util.normcasefallback
1441 )
1494 )
1442
1495
1443 f = {}
1496 f = {}
1444 normcase = util.normcase
1497 normcase = util.normcase
1445 for name, s in pycompat.iteritems(self._map):
1498 for name, s in pycompat.iteritems(self._map):
1446 if s[0] != b'r':
1499 if s[0] != b'r':
1447 f[normcase(name)] = name
1500 f[normcase(name)] = name
1448 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1501 f[b'.'] = b'.' # prevents useless util.fspath() invocation
1449 return f
1502 return f
1450
1503
1451 def hastrackeddir(self, d):
1504 def hastrackeddir(self, d):
1452 """
1505 """
1453 Returns True if the dirstate contains a tracked (not removed) file
1506 Returns True if the dirstate contains a tracked (not removed) file
1454 in this directory.
1507 in this directory.
1455 """
1508 """
1456 return d in self._dirs
1509 return d in self._dirs
1457
1510
1458 def hasdir(self, d):
1511 def hasdir(self, d):
1459 """
1512 """
1460 Returns True if the dirstate contains a file (tracked or removed)
1513 Returns True if the dirstate contains a file (tracked or removed)
1461 in this directory.
1514 in this directory.
1462 """
1515 """
1463 return d in self._alldirs
1516 return d in self._alldirs
1464
1517
1465 @propertycache
1518 @propertycache
1466 def _dirs(self):
1519 def _dirs(self):
1467 return util.dirs(self._map, b'r')
1520 return util.dirs(self._map, b'r')
1468
1521
1469 @propertycache
1522 @propertycache
1470 def _alldirs(self):
1523 def _alldirs(self):
1471 return util.dirs(self._map)
1524 return util.dirs(self._map)
1472
1525
1473 def _opendirstatefile(self):
1526 def _opendirstatefile(self):
1474 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1527 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1475 if self._pendingmode is not None and self._pendingmode != mode:
1528 if self._pendingmode is not None and self._pendingmode != mode:
1476 fp.close()
1529 fp.close()
1477 raise error.Abort(
1530 raise error.Abort(
1478 _(b'working directory state may be changed parallelly')
1531 _(b'working directory state may be changed parallelly')
1479 )
1532 )
1480 self._pendingmode = mode
1533 self._pendingmode = mode
1481 return fp
1534 return fp
1482
1535
1483 def parents(self):
1536 def parents(self):
1484 if not self._parents:
1537 if not self._parents:
1485 try:
1538 try:
1486 fp = self._opendirstatefile()
1539 fp = self._opendirstatefile()
1487 st = fp.read(40)
1540 st = fp.read(40)
1488 fp.close()
1541 fp.close()
1489 except IOError as err:
1542 except IOError as err:
1490 if err.errno != errno.ENOENT:
1543 if err.errno != errno.ENOENT:
1491 raise
1544 raise
1492 # File doesn't exist, so the current state is empty
1545 # File doesn't exist, so the current state is empty
1493 st = b''
1546 st = b''
1494
1547
1495 l = len(st)
1548 l = len(st)
1496 if l == 40:
1549 if l == 40:
1497 self._parents = (st[:20], st[20:40])
1550 self._parents = (st[:20], st[20:40])
1498 elif l == 0:
1551 elif l == 0:
1499 self._parents = (nullid, nullid)
1552 self._parents = (nullid, nullid)
1500 else:
1553 else:
1501 raise error.Abort(
1554 raise error.Abort(
1502 _(b'working directory state appears damaged!')
1555 _(b'working directory state appears damaged!')
1503 )
1556 )
1504
1557
1505 return self._parents
1558 return self._parents
1506
1559
1507 def setparents(self, p1, p2):
1560 def setparents(self, p1, p2):
1508 self._parents = (p1, p2)
1561 self._parents = (p1, p2)
1509 self._dirtyparents = True
1562 self._dirtyparents = True
1510
1563
1511 def read(self):
1564 def read(self):
1512 # ignore HG_PENDING because identity is used only for writing
1565 # ignore HG_PENDING because identity is used only for writing
1513 self.identity = util.filestat.frompath(
1566 self.identity = util.filestat.frompath(
1514 self._opener.join(self._filename)
1567 self._opener.join(self._filename)
1515 )
1568 )
1516
1569
1517 try:
1570 try:
1518 fp = self._opendirstatefile()
1571 fp = self._opendirstatefile()
1519 try:
1572 try:
1520 st = fp.read()
1573 st = fp.read()
1521 finally:
1574 finally:
1522 fp.close()
1575 fp.close()
1523 except IOError as err:
1576 except IOError as err:
1524 if err.errno != errno.ENOENT:
1577 if err.errno != errno.ENOENT:
1525 raise
1578 raise
1526 return
1579 return
1527 if not st:
1580 if not st:
1528 return
1581 return
1529
1582
1530 if util.safehasattr(parsers, b'dict_new_presized'):
1583 if util.safehasattr(parsers, b'dict_new_presized'):
1531 # Make an estimate of the number of files in the dirstate based on
1584 # Make an estimate of the number of files in the dirstate based on
1532 # its size. From a linear regression on a set of real-world repos,
1585 # its size. From a linear regression on a set of real-world repos,
1533 # all over 10,000 files, the size of a dirstate entry is 85
1586 # all over 10,000 files, the size of a dirstate entry is 85
1534 # bytes. The cost of resizing is significantly higher than the cost
1587 # bytes. The cost of resizing is significantly higher than the cost
1535 # of filling in a larger presized dict, so subtract 20% from the
1588 # of filling in a larger presized dict, so subtract 20% from the
1536 # size.
1589 # size.
1537 #
1590 #
1538 # This heuristic is imperfect in many ways, so in a future dirstate
1591 # This heuristic is imperfect in many ways, so in a future dirstate
1539 # format update it makes sense to just record the number of entries
1592 # format update it makes sense to just record the number of entries
1540 # on write.
1593 # on write.
1541 self._map = parsers.dict_new_presized(len(st) // 71)
1594 self._map = parsers.dict_new_presized(len(st) // 71)
1542
1595
1543 # Python's garbage collector triggers a GC each time a certain number
1596 # Python's garbage collector triggers a GC each time a certain number
1544 # of container objects (the number being defined by
1597 # of container objects (the number being defined by
1545 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1598 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1546 # for each file in the dirstate. The C version then immediately marks
1599 # for each file in the dirstate. The C version then immediately marks
1547 # them as not to be tracked by the collector. However, this has no
1600 # them as not to be tracked by the collector. However, this has no
1548 # effect on when GCs are triggered, only on what objects the GC looks
1601 # effect on when GCs are triggered, only on what objects the GC looks
1549 # into. This means that O(number of files) GCs are unavoidable.
1602 # into. This means that O(number of files) GCs are unavoidable.
1550 # Depending on when in the process's lifetime the dirstate is parsed,
1603 # Depending on when in the process's lifetime the dirstate is parsed,
1551 # this can get very expensive. As a workaround, disable GC while
1604 # this can get very expensive. As a workaround, disable GC while
1552 # parsing the dirstate.
1605 # parsing the dirstate.
1553 #
1606 #
1554 # (we cannot decorate the function directly since it is in a C module)
1607 # (we cannot decorate the function directly since it is in a C module)
1555 parse_dirstate = util.nogc(parsers.parse_dirstate)
1608 parse_dirstate = util.nogc(parsers.parse_dirstate)
1556 p = parse_dirstate(self._map, self.copymap, st)
1609 p = parse_dirstate(self._map, self.copymap, st)
1557 if not self._dirtyparents:
1610 if not self._dirtyparents:
1558 self.setparents(*p)
1611 self.setparents(*p)
1559
1612
1560 # Avoid excess attribute lookups by fast pathing certain checks
1613 # Avoid excess attribute lookups by fast pathing certain checks
1561 self.__contains__ = self._map.__contains__
1614 self.__contains__ = self._map.__contains__
1562 self.__getitem__ = self._map.__getitem__
1615 self.__getitem__ = self._map.__getitem__
1563 self.get = self._map.get
1616 self.get = self._map.get
1564
1617
1565 def write(self, st, now):
1618 def write(self, st, now):
1566 st.write(
1619 st.write(
1567 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1620 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
1568 )
1621 )
1569 st.close()
1622 st.close()
1570 self._dirtyparents = False
1623 self._dirtyparents = False
1571 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1624 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1572
1625
1573 @propertycache
1626 @propertycache
1574 def nonnormalset(self):
1627 def nonnormalset(self):
1575 nonnorm, otherparents = self.nonnormalentries()
1628 nonnorm, otherparents = self.nonnormalentries()
1576 self.otherparentset = otherparents
1629 self.otherparentset = otherparents
1577 return nonnorm
1630 return nonnorm
1578
1631
1579 @propertycache
1632 @propertycache
1580 def otherparentset(self):
1633 def otherparentset(self):
1581 nonnorm, otherparents = self.nonnormalentries()
1634 nonnorm, otherparents = self.nonnormalentries()
1582 self.nonnormalset = nonnorm
1635 self.nonnormalset = nonnorm
1583 return otherparents
1636 return otherparents
1584
1637
1585 @propertycache
1638 @propertycache
1586 def identity(self):
1639 def identity(self):
1587 self._map
1640 self._map
1588 return self.identity
1641 return self.identity
1589
1642
1590 @propertycache
1643 @propertycache
1591 def dirfoldmap(self):
1644 def dirfoldmap(self):
1592 f = {}
1645 f = {}
1593 normcase = util.normcase
1646 normcase = util.normcase
1594 for name in self._dirs:
1647 for name in self._dirs:
1595 f[normcase(name)] = name
1648 f[normcase(name)] = name
1596 return f
1649 return f
1597
1650
1598
1651
1599 if rustmod is not None:
1652 if rustmod is not None:
1600
1653
1601 class dirstatemap(object):
1654 class dirstatemap(object):
1602 def __init__(self, ui, opener, root):
1655 def __init__(self, ui, opener, root):
1603 self._ui = ui
1656 self._ui = ui
1604 self._opener = opener
1657 self._opener = opener
1605 self._root = root
1658 self._root = root
1606 self._filename = b'dirstate'
1659 self._filename = b'dirstate'
1607 self._parents = None
1660 self._parents = None
1608 self._dirtyparents = False
1661 self._dirtyparents = False
1609
1662
1610 # for consistent view between _pl() and _read() invocations
1663 # for consistent view between _pl() and _read() invocations
1611 self._pendingmode = None
1664 self._pendingmode = None
1612
1665
1613 def addfile(self, *args, **kwargs):
1666 def addfile(self, *args, **kwargs):
1614 return self._rustmap.addfile(*args, **kwargs)
1667 return self._rustmap.addfile(*args, **kwargs)
1615
1668
1616 def removefile(self, *args, **kwargs):
1669 def removefile(self, *args, **kwargs):
1617 return self._rustmap.removefile(*args, **kwargs)
1670 return self._rustmap.removefile(*args, **kwargs)
1618
1671
1619 def dropfile(self, *args, **kwargs):
1672 def dropfile(self, *args, **kwargs):
1620 return self._rustmap.dropfile(*args, **kwargs)
1673 return self._rustmap.dropfile(*args, **kwargs)
1621
1674
1622 def clearambiguoustimes(self, *args, **kwargs):
1675 def clearambiguoustimes(self, *args, **kwargs):
1623 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1676 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1624
1677
1625 def nonnormalentries(self):
1678 def nonnormalentries(self):
1626 return self._rustmap.nonnormalentries()
1679 return self._rustmap.nonnormalentries()
1627
1680
1628 def get(self, *args, **kwargs):
1681 def get(self, *args, **kwargs):
1629 return self._rustmap.get(*args, **kwargs)
1682 return self._rustmap.get(*args, **kwargs)
1630
1683
1631 @propertycache
1684 @propertycache
1632 def _rustmap(self):
1685 def _rustmap(self):
1633 self._rustmap = rustmod.DirstateMap(self._root)
1686 self._rustmap = rustmod.DirstateMap(self._root)
1634 self.read()
1687 self.read()
1635 return self._rustmap
1688 return self._rustmap
1636
1689
1637 @property
1690 @property
1638 def copymap(self):
1691 def copymap(self):
1639 return self._rustmap.copymap()
1692 return self._rustmap.copymap()
1640
1693
1641 def preload(self):
1694 def preload(self):
1642 self._rustmap
1695 self._rustmap
1643
1696
1644 def clear(self):
1697 def clear(self):
1645 self._rustmap.clear()
1698 self._rustmap.clear()
1646 self.setparents(nullid, nullid)
1699 self.setparents(nullid, nullid)
1647 util.clearcachedproperty(self, b"_dirs")
1700 util.clearcachedproperty(self, b"_dirs")
1648 util.clearcachedproperty(self, b"_alldirs")
1701 util.clearcachedproperty(self, b"_alldirs")
1649 util.clearcachedproperty(self, b"dirfoldmap")
1702 util.clearcachedproperty(self, b"dirfoldmap")
1650
1703
1651 def items(self):
1704 def items(self):
1652 return self._rustmap.items()
1705 return self._rustmap.items()
1653
1706
1654 def keys(self):
1707 def keys(self):
1655 return iter(self._rustmap)
1708 return iter(self._rustmap)
1656
1709
1657 def __contains__(self, key):
1710 def __contains__(self, key):
1658 return key in self._rustmap
1711 return key in self._rustmap
1659
1712
1660 def __getitem__(self, item):
1713 def __getitem__(self, item):
1661 return self._rustmap[item]
1714 return self._rustmap[item]
1662
1715
1663 def __len__(self):
1716 def __len__(self):
1664 return len(self._rustmap)
1717 return len(self._rustmap)
1665
1718
1666 def __iter__(self):
1719 def __iter__(self):
1667 return iter(self._rustmap)
1720 return iter(self._rustmap)
1668
1721
1669 # forward for python2,3 compat
1722 # forward for python2,3 compat
1670 iteritems = items
1723 iteritems = items
1671
1724
1672 def _opendirstatefile(self):
1725 def _opendirstatefile(self):
1673 fp, mode = txnutil.trypending(
1726 fp, mode = txnutil.trypending(
1674 self._root, self._opener, self._filename
1727 self._root, self._opener, self._filename
1675 )
1728 )
1676 if self._pendingmode is not None and self._pendingmode != mode:
1729 if self._pendingmode is not None and self._pendingmode != mode:
1677 fp.close()
1730 fp.close()
1678 raise error.Abort(
1731 raise error.Abort(
1679 _(b'working directory state may be changed parallelly')
1732 _(b'working directory state may be changed parallelly')
1680 )
1733 )
1681 self._pendingmode = mode
1734 self._pendingmode = mode
1682 return fp
1735 return fp
1683
1736
1684 def setparents(self, p1, p2):
1737 def setparents(self, p1, p2):
1685 self._rustmap.setparents(p1, p2)
1738 self._rustmap.setparents(p1, p2)
1686 self._parents = (p1, p2)
1739 self._parents = (p1, p2)
1687 self._dirtyparents = True
1740 self._dirtyparents = True
1688
1741
1689 def parents(self):
1742 def parents(self):
1690 if not self._parents:
1743 if not self._parents:
1691 try:
1744 try:
1692 fp = self._opendirstatefile()
1745 fp = self._opendirstatefile()
1693 st = fp.read(40)
1746 st = fp.read(40)
1694 fp.close()
1747 fp.close()
1695 except IOError as err:
1748 except IOError as err:
1696 if err.errno != errno.ENOENT:
1749 if err.errno != errno.ENOENT:
1697 raise
1750 raise
1698 # File doesn't exist, so the current state is empty
1751 # File doesn't exist, so the current state is empty
1699 st = b''
1752 st = b''
1700
1753
1701 try:
1754 try:
1702 self._parents = self._rustmap.parents(st)
1755 self._parents = self._rustmap.parents(st)
1703 except ValueError:
1756 except ValueError:
1704 raise error.Abort(
1757 raise error.Abort(
1705 _(b'working directory state appears damaged!')
1758 _(b'working directory state appears damaged!')
1706 )
1759 )
1707
1760
1708 return self._parents
1761 return self._parents
1709
1762
1710 def read(self):
1763 def read(self):
1711 # ignore HG_PENDING because identity is used only for writing
1764 # ignore HG_PENDING because identity is used only for writing
1712 self.identity = util.filestat.frompath(
1765 self.identity = util.filestat.frompath(
1713 self._opener.join(self._filename)
1766 self._opener.join(self._filename)
1714 )
1767 )
1715
1768
1716 try:
1769 try:
1717 fp = self._opendirstatefile()
1770 fp = self._opendirstatefile()
1718 try:
1771 try:
1719 st = fp.read()
1772 st = fp.read()
1720 finally:
1773 finally:
1721 fp.close()
1774 fp.close()
1722 except IOError as err:
1775 except IOError as err:
1723 if err.errno != errno.ENOENT:
1776 if err.errno != errno.ENOENT:
1724 raise
1777 raise
1725 return
1778 return
1726 if not st:
1779 if not st:
1727 return
1780 return
1728
1781
1729 parse_dirstate = util.nogc(self._rustmap.read)
1782 parse_dirstate = util.nogc(self._rustmap.read)
1730 parents = parse_dirstate(st)
1783 parents = parse_dirstate(st)
1731 if parents and not self._dirtyparents:
1784 if parents and not self._dirtyparents:
1732 self.setparents(*parents)
1785 self.setparents(*parents)
1733
1786
1734 def write(self, st, now):
1787 def write(self, st, now):
1735 parents = self.parents()
1788 parents = self.parents()
1736 st.write(self._rustmap.write(parents[0], parents[1], now))
1789 st.write(self._rustmap.write(parents[0], parents[1], now))
1737 st.close()
1790 st.close()
1738 self._dirtyparents = False
1791 self._dirtyparents = False
1739
1792
1740 @propertycache
1793 @propertycache
1741 def filefoldmap(self):
1794 def filefoldmap(self):
1742 """Returns a dictionary mapping normalized case paths to their
1795 """Returns a dictionary mapping normalized case paths to their
1743 non-normalized versions.
1796 non-normalized versions.
1744 """
1797 """
1745 return self._rustmap.filefoldmapasdict()
1798 return self._rustmap.filefoldmapasdict()
1746
1799
1747 def hastrackeddir(self, d):
1800 def hastrackeddir(self, d):
1748 self._dirs # Trigger Python's propertycache
1801 self._dirs # Trigger Python's propertycache
1749 return self._rustmap.hastrackeddir(d)
1802 return self._rustmap.hastrackeddir(d)
1750
1803
1751 def hasdir(self, d):
1804 def hasdir(self, d):
1752 self._dirs # Trigger Python's propertycache
1805 self._dirs # Trigger Python's propertycache
1753 return self._rustmap.hasdir(d)
1806 return self._rustmap.hasdir(d)
1754
1807
1755 @propertycache
1808 @propertycache
1756 def _dirs(self):
1809 def _dirs(self):
1757 return self._rustmap.getdirs()
1810 return self._rustmap.getdirs()
1758
1811
1759 @propertycache
1812 @propertycache
1760 def _alldirs(self):
1813 def _alldirs(self):
1761 return self._rustmap.getalldirs()
1814 return self._rustmap.getalldirs()
1762
1815
1763 @propertycache
1816 @propertycache
1764 def identity(self):
1817 def identity(self):
1765 self._rustmap
1818 self._rustmap
1766 return self.identity
1819 return self.identity
1767
1820
1768 @property
1821 @property
1769 def nonnormalset(self):
1822 def nonnormalset(self):
1770 nonnorm, otherparents = self._rustmap.nonnormalentries()
1823 nonnorm, otherparents = self._rustmap.nonnormalentries()
1771 return nonnorm
1824 return nonnorm
1772
1825
1773 @property
1826 @property
1774 def otherparentset(self):
1827 def otherparentset(self):
1775 nonnorm, otherparents = self._rustmap.nonnormalentries()
1828 nonnorm, otherparents = self._rustmap.nonnormalentries()
1776 return otherparents
1829 return otherparents
1777
1830
1778 @propertycache
1831 @propertycache
1779 def dirfoldmap(self):
1832 def dirfoldmap(self):
1780 f = {}
1833 f = {}
1781 normcase = util.normcase
1834 normcase = util.normcase
1782 for name in self._dirs:
1835 for name in self._dirs:
1783 f[normcase(name)] = name
1836 f[normcase(name)] = name
1784 return f
1837 return f
General Comments 0
You need to be logged in to leave comments. Login now