##// END OF EJS Templates
dirstate: move management of the dirstate dirs into the dirstatemap...
Mark Thomas -
r35080:a947cf87 default
parent child Browse files
Show More
@@ -1,1479 +1,1483 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = dirstatemap(self._ui, self._opener, self._root)
131 self._map = dirstatemap(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache('branch')
147 @repocache('branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read("branch").strip() or "default"
150 return self._opener.read("branch").strip() or "default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return "default"
154 return "default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def dirs(self):
160 def dirs(self):
161 return self._map.dirs
161 return self._map.dirs
162
162
163 @rootcache('.hgignore')
163 @rootcache('.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never(self._root, '')
167 return matchmod.never(self._root, '')
168
168
169 pats = ['include:%s' % f for f in files]
169 pats = ['include:%s' % f for f in files]
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join('.hg'))
186 return not util.fscasesensitive(self._join('.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195 def f(x):
195 def f(x):
196 try:
196 try:
197 st = os.lstat(self._join(x))
197 st = os.lstat(self._join(x))
198 if util.statislink(st):
198 if util.statislink(st):
199 return 'l'
199 return 'l'
200 if util.statisexec(st):
200 if util.statisexec(st):
201 return 'x'
201 return 'x'
202 except OSError:
202 except OSError:
203 pass
203 pass
204 return ''
204 return ''
205 return f
205 return f
206
206
207 fallback = buildfallback()
207 fallback = buildfallback()
208 if self._checklink:
208 if self._checklink:
209 def f(x):
209 def f(x):
210 if os.path.islink(self._join(x)):
210 if os.path.islink(self._join(x)):
211 return 'l'
211 return 'l'
212 if 'x' in fallback(x):
212 if 'x' in fallback(x):
213 return 'x'
213 return 'x'
214 return ''
214 return ''
215 return f
215 return f
216 if self._checkexec:
216 if self._checkexec:
217 def f(x):
217 def f(x):
218 if 'l' in fallback(x):
218 if 'l' in fallback(x):
219 return 'l'
219 return 'l'
220 if util.isexec(self._join(x)):
220 if util.isexec(self._join(x)):
221 return 'x'
221 return 'x'
222 return ''
222 return ''
223 return f
223 return f
224 else:
224 else:
225 return fallback
225 return fallback
226
226
227 @propertycache
227 @propertycache
228 def _cwd(self):
228 def _cwd(self):
229 # internal config: ui.forcecwd
229 # internal config: ui.forcecwd
230 forcecwd = self._ui.config('ui', 'forcecwd')
230 forcecwd = self._ui.config('ui', 'forcecwd')
231 if forcecwd:
231 if forcecwd:
232 return forcecwd
232 return forcecwd
233 return pycompat.getcwd()
233 return pycompat.getcwd()
234
234
235 def getcwd(self):
235 def getcwd(self):
236 '''Return the path from which a canonical path is calculated.
236 '''Return the path from which a canonical path is calculated.
237
237
238 This path should be used to resolve file patterns or to convert
238 This path should be used to resolve file patterns or to convert
239 canonical paths back to file paths for display. It shouldn't be
239 canonical paths back to file paths for display. It shouldn't be
240 used to get real file paths. Use vfs functions instead.
240 used to get real file paths. Use vfs functions instead.
241 '''
241 '''
242 cwd = self._cwd
242 cwd = self._cwd
243 if cwd == self._root:
243 if cwd == self._root:
244 return ''
244 return ''
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 rootsep = self._root
246 rootsep = self._root
247 if not util.endswithsep(rootsep):
247 if not util.endswithsep(rootsep):
248 rootsep += pycompat.ossep
248 rootsep += pycompat.ossep
249 if cwd.startswith(rootsep):
249 if cwd.startswith(rootsep):
250 return cwd[len(rootsep):]
250 return cwd[len(rootsep):]
251 else:
251 else:
252 # we're outside the repo. return an absolute path.
252 # we're outside the repo. return an absolute path.
253 return cwd
253 return cwd
254
254
255 def pathto(self, f, cwd=None):
255 def pathto(self, f, cwd=None):
256 if cwd is None:
256 if cwd is None:
257 cwd = self.getcwd()
257 cwd = self.getcwd()
258 path = util.pathto(self._root, cwd, f)
258 path = util.pathto(self._root, cwd, f)
259 if self._slash:
259 if self._slash:
260 return util.pconvert(path)
260 return util.pconvert(path)
261 return path
261 return path
262
262
263 def __getitem__(self, key):
263 def __getitem__(self, key):
264 '''Return the current state of key (a filename) in the dirstate.
264 '''Return the current state of key (a filename) in the dirstate.
265
265
266 States are:
266 States are:
267 n normal
267 n normal
268 m needs merging
268 m needs merging
269 r marked for removal
269 r marked for removal
270 a marked for addition
270 a marked for addition
271 ? not tracked
271 ? not tracked
272 '''
272 '''
273 return self._map.get(key, ("?",))[0]
273 return self._map.get(key, ("?",))[0]
274
274
275 def __contains__(self, key):
275 def __contains__(self, key):
276 return key in self._map
276 return key in self._map
277
277
278 def __iter__(self):
278 def __iter__(self):
279 return iter(sorted(self._map))
279 return iter(sorted(self._map))
280
280
281 def items(self):
281 def items(self):
282 return self._map.iteritems()
282 return self._map.iteritems()
283
283
284 iteritems = items
284 iteritems = items
285
285
286 def parents(self):
286 def parents(self):
287 return [self._validate(p) for p in self._pl]
287 return [self._validate(p) for p in self._pl]
288
288
289 def p1(self):
289 def p1(self):
290 return self._validate(self._pl[0])
290 return self._validate(self._pl[0])
291
291
292 def p2(self):
292 def p2(self):
293 return self._validate(self._pl[1])
293 return self._validate(self._pl[1])
294
294
295 def branch(self):
295 def branch(self):
296 return encoding.tolocal(self._branch)
296 return encoding.tolocal(self._branch)
297
297
298 def setparents(self, p1, p2=nullid):
298 def setparents(self, p1, p2=nullid):
299 """Set dirstate parents to p1 and p2.
299 """Set dirstate parents to p1 and p2.
300
300
301 When moving from two parents to one, 'm' merged entries a
301 When moving from two parents to one, 'm' merged entries a
302 adjusted to normal and previous copy records discarded and
302 adjusted to normal and previous copy records discarded and
303 returned by the call.
303 returned by the call.
304
304
305 See localrepo.setparents()
305 See localrepo.setparents()
306 """
306 """
307 if self._parentwriters == 0:
307 if self._parentwriters == 0:
308 raise ValueError("cannot set dirstate parent without "
308 raise ValueError("cannot set dirstate parent without "
309 "calling dirstate.beginparentchange")
309 "calling dirstate.beginparentchange")
310
310
311 self._dirty = True
311 self._dirty = True
312 oldp2 = self._pl[1]
312 oldp2 = self._pl[1]
313 if self._origpl is None:
313 if self._origpl is None:
314 self._origpl = self._pl
314 self._origpl = self._pl
315 self._map.setparents(p1, p2)
315 self._map.setparents(p1, p2)
316 copies = {}
316 copies = {}
317 if oldp2 != nullid and p2 == nullid:
317 if oldp2 != nullid and p2 == nullid:
318 candidatefiles = self._map.nonnormalset.union(
318 candidatefiles = self._map.nonnormalset.union(
319 self._map.otherparentset)
319 self._map.otherparentset)
320 for f in candidatefiles:
320 for f in candidatefiles:
321 s = self._map.get(f)
321 s = self._map.get(f)
322 if s is None:
322 if s is None:
323 continue
323 continue
324
324
325 # Discard 'm' markers when moving away from a merge state
325 # Discard 'm' markers when moving away from a merge state
326 if s[0] == 'm':
326 if s[0] == 'm':
327 source = self._map.copymap.get(f)
327 source = self._map.copymap.get(f)
328 if source:
328 if source:
329 copies[f] = source
329 copies[f] = source
330 self.normallookup(f)
330 self.normallookup(f)
331 # Also fix up otherparent markers
331 # Also fix up otherparent markers
332 elif s[0] == 'n' and s[2] == -2:
332 elif s[0] == 'n' and s[2] == -2:
333 source = self._map.copymap.get(f)
333 source = self._map.copymap.get(f)
334 if source:
334 if source:
335 copies[f] = source
335 copies[f] = source
336 self.add(f)
336 self.add(f)
337 return copies
337 return copies
338
338
339 def setbranch(self, branch):
339 def setbranch(self, branch):
340 self._branch = encoding.fromlocal(branch)
340 self._branch = encoding.fromlocal(branch)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 try:
342 try:
343 f.write(self._branch + '\n')
343 f.write(self._branch + '\n')
344 f.close()
344 f.close()
345
345
346 # make sure filecache has the correct stat info for _branch after
346 # make sure filecache has the correct stat info for _branch after
347 # replacing the underlying file
347 # replacing the underlying file
348 ce = self._filecache['_branch']
348 ce = self._filecache['_branch']
349 if ce:
349 if ce:
350 ce.refresh()
350 ce.refresh()
351 except: # re-raises
351 except: # re-raises
352 f.discard()
352 f.discard()
353 raise
353 raise
354
354
355 def invalidate(self):
355 def invalidate(self):
356 '''Causes the next access to reread the dirstate.
356 '''Causes the next access to reread the dirstate.
357
357
358 This is different from localrepo.invalidatedirstate() because it always
358 This is different from localrepo.invalidatedirstate() because it always
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 check whether the dirstate has changed before rereading it.'''
360 check whether the dirstate has changed before rereading it.'''
361
361
362 for a in ("_map", "_branch", "_ignore"):
362 for a in ("_map", "_branch", "_ignore"):
363 if a in self.__dict__:
363 if a in self.__dict__:
364 delattr(self, a)
364 delattr(self, a)
365 self._lastnormaltime = 0
365 self._lastnormaltime = 0
366 self._dirty = False
366 self._dirty = False
367 self._updatedfiles.clear()
367 self._updatedfiles.clear()
368 self._parentwriters = 0
368 self._parentwriters = 0
369 self._origpl = None
369 self._origpl = None
370
370
371 def copy(self, source, dest):
371 def copy(self, source, dest):
372 """Mark dest as a copy of source. Unmark dest if source is None."""
372 """Mark dest as a copy of source. Unmark dest if source is None."""
373 if source == dest:
373 if source == dest:
374 return
374 return
375 self._dirty = True
375 self._dirty = True
376 if source is not None:
376 if source is not None:
377 self._map.copymap[dest] = source
377 self._map.copymap[dest] = source
378 self._updatedfiles.add(source)
378 self._updatedfiles.add(source)
379 self._updatedfiles.add(dest)
379 self._updatedfiles.add(dest)
380 elif self._map.copymap.pop(dest, None):
380 elif self._map.copymap.pop(dest, None):
381 self._updatedfiles.add(dest)
381 self._updatedfiles.add(dest)
382
382
383 def copied(self, file):
383 def copied(self, file):
384 return self._map.copymap.get(file, None)
384 return self._map.copymap.get(file, None)
385
385
386 def copies(self):
386 def copies(self):
387 return self._map.copymap
387 return self._map.copymap
388
388
389 def _droppath(self, f):
389 def _droppath(self, f):
390 if self[f] not in "?r" and "dirs" in self._map.__dict__:
391 self._map.dirs.delpath(f)
392
393 if "filefoldmap" in self._map.__dict__:
390 if "filefoldmap" in self._map.__dict__:
394 normed = util.normcase(f)
391 normed = util.normcase(f)
395 if normed in self._map.filefoldmap:
392 if normed in self._map.filefoldmap:
396 del self._map.filefoldmap[normed]
393 del self._map.filefoldmap[normed]
397
394
398 self._updatedfiles.add(f)
395 self._updatedfiles.add(f)
399
396
400 def _addpath(self, f, state, mode, size, mtime):
397 def _addpath(self, f, state, mode, size, mtime):
401 oldstate = self[f]
398 oldstate = self[f]
402 if state == 'a' or oldstate == 'r':
399 if state == 'a' or oldstate == 'r':
403 scmutil.checkfilename(f)
400 scmutil.checkfilename(f)
404 if f in self._map.dirs:
401 if f in self._map.dirs:
405 raise error.Abort(_('directory %r already in dirstate') % f)
402 raise error.Abort(_('directory %r already in dirstate') % f)
406 # shadows
403 # shadows
407 for d in util.finddirs(f):
404 for d in util.finddirs(f):
408 if d in self._map.dirs:
405 if d in self._map.dirs:
409 break
406 break
410 entry = self._map.get(d)
407 entry = self._map.get(d)
411 if entry is not None and entry[0] != 'r':
408 if entry is not None and entry[0] != 'r':
412 raise error.Abort(
409 raise error.Abort(
413 _('file %r in dirstate clashes with %r') % (d, f))
410 _('file %r in dirstate clashes with %r') % (d, f))
414 if oldstate in "?r" and "dirs" in self._map.__dict__:
415 self._map.dirs.addpath(f)
416 self._dirty = True
411 self._dirty = True
417 self._updatedfiles.add(f)
412 self._updatedfiles.add(f)
418 self._map.addfile(f, state, mode, size, mtime)
413 self._map.addfile(f, oldstate, state, mode, size, mtime)
419
414
420 def normal(self, f):
415 def normal(self, f):
421 '''Mark a file normal and clean.'''
416 '''Mark a file normal and clean.'''
422 s = os.lstat(self._join(f))
417 s = os.lstat(self._join(f))
423 mtime = s.st_mtime
418 mtime = s.st_mtime
424 self._addpath(f, 'n', s.st_mode,
419 self._addpath(f, 'n', s.st_mode,
425 s.st_size & _rangemask, mtime & _rangemask)
420 s.st_size & _rangemask, mtime & _rangemask)
426 self._map.copymap.pop(f, None)
421 self._map.copymap.pop(f, None)
427 if f in self._map.nonnormalset:
422 if f in self._map.nonnormalset:
428 self._map.nonnormalset.remove(f)
423 self._map.nonnormalset.remove(f)
429 if mtime > self._lastnormaltime:
424 if mtime > self._lastnormaltime:
430 # Remember the most recent modification timeslot for status(),
425 # Remember the most recent modification timeslot for status(),
431 # to make sure we won't miss future size-preserving file content
426 # to make sure we won't miss future size-preserving file content
432 # modifications that happen within the same timeslot.
427 # modifications that happen within the same timeslot.
433 self._lastnormaltime = mtime
428 self._lastnormaltime = mtime
434
429
435 def normallookup(self, f):
430 def normallookup(self, f):
436 '''Mark a file normal, but possibly dirty.'''
431 '''Mark a file normal, but possibly dirty.'''
437 if self._pl[1] != nullid:
432 if self._pl[1] != nullid:
438 # if there is a merge going on and the file was either
433 # if there is a merge going on and the file was either
439 # in state 'm' (-1) or coming from other parent (-2) before
434 # in state 'm' (-1) or coming from other parent (-2) before
440 # being removed, restore that state.
435 # being removed, restore that state.
441 entry = self._map.get(f)
436 entry = self._map.get(f)
442 if entry is not None:
437 if entry is not None:
443 if entry[0] == 'r' and entry[2] in (-1, -2):
438 if entry[0] == 'r' and entry[2] in (-1, -2):
444 source = self._map.copymap.get(f)
439 source = self._map.copymap.get(f)
445 if entry[2] == -1:
440 if entry[2] == -1:
446 self.merge(f)
441 self.merge(f)
447 elif entry[2] == -2:
442 elif entry[2] == -2:
448 self.otherparent(f)
443 self.otherparent(f)
449 if source:
444 if source:
450 self.copy(source, f)
445 self.copy(source, f)
451 return
446 return
452 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
447 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
453 return
448 return
454 self._addpath(f, 'n', 0, -1, -1)
449 self._addpath(f, 'n', 0, -1, -1)
455 self._map.copymap.pop(f, None)
450 self._map.copymap.pop(f, None)
456
451
457 def otherparent(self, f):
452 def otherparent(self, f):
458 '''Mark as coming from the other parent, always dirty.'''
453 '''Mark as coming from the other parent, always dirty.'''
459 if self._pl[1] == nullid:
454 if self._pl[1] == nullid:
460 raise error.Abort(_("setting %r to other parent "
455 raise error.Abort(_("setting %r to other parent "
461 "only allowed in merges") % f)
456 "only allowed in merges") % f)
462 if f in self and self[f] == 'n':
457 if f in self and self[f] == 'n':
463 # merge-like
458 # merge-like
464 self._addpath(f, 'm', 0, -2, -1)
459 self._addpath(f, 'm', 0, -2, -1)
465 else:
460 else:
466 # add-like
461 # add-like
467 self._addpath(f, 'n', 0, -2, -1)
462 self._addpath(f, 'n', 0, -2, -1)
468 self._map.copymap.pop(f, None)
463 self._map.copymap.pop(f, None)
469
464
470 def add(self, f):
465 def add(self, f):
471 '''Mark a file added.'''
466 '''Mark a file added.'''
472 self._addpath(f, 'a', 0, -1, -1)
467 self._addpath(f, 'a', 0, -1, -1)
473 self._map.copymap.pop(f, None)
468 self._map.copymap.pop(f, None)
474
469
475 def remove(self, f):
470 def remove(self, f):
476 '''Mark a file removed.'''
471 '''Mark a file removed.'''
477 self._dirty = True
472 self._dirty = True
478 self._droppath(f)
473 self._droppath(f)
474 oldstate = self[f]
479 size = 0
475 size = 0
480 if self._pl[1] != nullid:
476 if self._pl[1] != nullid:
481 entry = self._map.get(f)
477 entry = self._map.get(f)
482 if entry is not None:
478 if entry is not None:
483 # backup the previous state
479 # backup the previous state
484 if entry[0] == 'm': # merge
480 if entry[0] == 'm': # merge
485 size = -1
481 size = -1
486 elif entry[0] == 'n' and entry[2] == -2: # other parent
482 elif entry[0] == 'n' and entry[2] == -2: # other parent
487 size = -2
483 size = -2
488 self._map.otherparentset.add(f)
484 self._map.otherparentset.add(f)
489 self._map.removefile(f, size)
485 self._map.removefile(f, oldstate, size)
490 if size == 0:
486 if size == 0:
491 self._map.copymap.pop(f, None)
487 self._map.copymap.pop(f, None)
492
488
493 def merge(self, f):
489 def merge(self, f):
494 '''Mark a file merged.'''
490 '''Mark a file merged.'''
495 if self._pl[1] == nullid:
491 if self._pl[1] == nullid:
496 return self.normallookup(f)
492 return self.normallookup(f)
497 return self.otherparent(f)
493 return self.otherparent(f)
498
494
499 def drop(self, f):
495 def drop(self, f):
500 '''Drop a file from the dirstate'''
496 '''Drop a file from the dirstate'''
501 if self._map.dropfile(f):
497 oldstate = self[f]
498 if self._map.dropfile(f, oldstate):
502 self._dirty = True
499 self._dirty = True
503 self._droppath(f)
500 self._droppath(f)
504 self._map.copymap.pop(f, None)
501 self._map.copymap.pop(f, None)
505
502
506 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
503 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
507 if exists is None:
504 if exists is None:
508 exists = os.path.lexists(os.path.join(self._root, path))
505 exists = os.path.lexists(os.path.join(self._root, path))
509 if not exists:
506 if not exists:
510 # Maybe a path component exists
507 # Maybe a path component exists
511 if not ignoremissing and '/' in path:
508 if not ignoremissing and '/' in path:
512 d, f = path.rsplit('/', 1)
509 d, f = path.rsplit('/', 1)
513 d = self._normalize(d, False, ignoremissing, None)
510 d = self._normalize(d, False, ignoremissing, None)
514 folded = d + "/" + f
511 folded = d + "/" + f
515 else:
512 else:
516 # No path components, preserve original case
513 # No path components, preserve original case
517 folded = path
514 folded = path
518 else:
515 else:
519 # recursively normalize leading directory components
516 # recursively normalize leading directory components
520 # against dirstate
517 # against dirstate
521 if '/' in normed:
518 if '/' in normed:
522 d, f = normed.rsplit('/', 1)
519 d, f = normed.rsplit('/', 1)
523 d = self._normalize(d, False, ignoremissing, True)
520 d = self._normalize(d, False, ignoremissing, True)
524 r = self._root + "/" + d
521 r = self._root + "/" + d
525 folded = d + "/" + util.fspath(f, r)
522 folded = d + "/" + util.fspath(f, r)
526 else:
523 else:
527 folded = util.fspath(normed, self._root)
524 folded = util.fspath(normed, self._root)
528 storemap[normed] = folded
525 storemap[normed] = folded
529
526
530 return folded
527 return folded
531
528
532 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
529 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
533 normed = util.normcase(path)
530 normed = util.normcase(path)
534 folded = self._map.filefoldmap.get(normed, None)
531 folded = self._map.filefoldmap.get(normed, None)
535 if folded is None:
532 if folded is None:
536 if isknown:
533 if isknown:
537 folded = path
534 folded = path
538 else:
535 else:
539 folded = self._discoverpath(path, normed, ignoremissing, exists,
536 folded = self._discoverpath(path, normed, ignoremissing, exists,
540 self._map.filefoldmap)
537 self._map.filefoldmap)
541 return folded
538 return folded
542
539
543 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
540 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
544 normed = util.normcase(path)
541 normed = util.normcase(path)
545 folded = self._map.filefoldmap.get(normed, None)
542 folded = self._map.filefoldmap.get(normed, None)
546 if folded is None:
543 if folded is None:
547 folded = self._map.dirfoldmap.get(normed, None)
544 folded = self._map.dirfoldmap.get(normed, None)
548 if folded is None:
545 if folded is None:
549 if isknown:
546 if isknown:
550 folded = path
547 folded = path
551 else:
548 else:
552 # store discovered result in dirfoldmap so that future
549 # store discovered result in dirfoldmap so that future
553 # normalizefile calls don't start matching directories
550 # normalizefile calls don't start matching directories
554 folded = self._discoverpath(path, normed, ignoremissing, exists,
551 folded = self._discoverpath(path, normed, ignoremissing, exists,
555 self._map.dirfoldmap)
552 self._map.dirfoldmap)
556 return folded
553 return folded
557
554
558 def normalize(self, path, isknown=False, ignoremissing=False):
555 def normalize(self, path, isknown=False, ignoremissing=False):
559 '''
556 '''
560 normalize the case of a pathname when on a casefolding filesystem
557 normalize the case of a pathname when on a casefolding filesystem
561
558
562 isknown specifies whether the filename came from walking the
559 isknown specifies whether the filename came from walking the
563 disk, to avoid extra filesystem access.
560 disk, to avoid extra filesystem access.
564
561
565 If ignoremissing is True, missing path are returned
562 If ignoremissing is True, missing path are returned
566 unchanged. Otherwise, we try harder to normalize possibly
563 unchanged. Otherwise, we try harder to normalize possibly
567 existing path components.
564 existing path components.
568
565
569 The normalized case is determined based on the following precedence:
566 The normalized case is determined based on the following precedence:
570
567
571 - version of name already stored in the dirstate
568 - version of name already stored in the dirstate
572 - version of name stored on disk
569 - version of name stored on disk
573 - version provided via command arguments
570 - version provided via command arguments
574 '''
571 '''
575
572
576 if self._checkcase:
573 if self._checkcase:
577 return self._normalize(path, isknown, ignoremissing)
574 return self._normalize(path, isknown, ignoremissing)
578 return path
575 return path
579
576
580 def clear(self):
577 def clear(self):
581 self._map.clear()
578 self._map.clear()
582 self._lastnormaltime = 0
579 self._lastnormaltime = 0
583 self._updatedfiles.clear()
580 self._updatedfiles.clear()
584 self._dirty = True
581 self._dirty = True
585
582
586 def rebuild(self, parent, allfiles, changedfiles=None):
583 def rebuild(self, parent, allfiles, changedfiles=None):
587 if changedfiles is None:
584 if changedfiles is None:
588 # Rebuild entire dirstate
585 # Rebuild entire dirstate
589 changedfiles = allfiles
586 changedfiles = allfiles
590 lastnormaltime = self._lastnormaltime
587 lastnormaltime = self._lastnormaltime
591 self.clear()
588 self.clear()
592 self._lastnormaltime = lastnormaltime
589 self._lastnormaltime = lastnormaltime
593
590
594 if self._origpl is None:
591 if self._origpl is None:
595 self._origpl = self._pl
592 self._origpl = self._pl
596 self._map.setparents(parent, nullid)
593 self._map.setparents(parent, nullid)
597 for f in changedfiles:
594 for f in changedfiles:
598 if f in allfiles:
595 if f in allfiles:
599 self.normallookup(f)
596 self.normallookup(f)
600 else:
597 else:
601 self.drop(f)
598 self.drop(f)
602
599
603 self._dirty = True
600 self._dirty = True
604
601
605 def identity(self):
602 def identity(self):
606 '''Return identity of dirstate itself to detect changing in storage
603 '''Return identity of dirstate itself to detect changing in storage
607
604
608 If identity of previous dirstate is equal to this, writing
605 If identity of previous dirstate is equal to this, writing
609 changes based on the former dirstate out can keep consistency.
606 changes based on the former dirstate out can keep consistency.
610 '''
607 '''
611 return self._map.identity
608 return self._map.identity
612
609
613 def write(self, tr):
610 def write(self, tr):
614 if not self._dirty:
611 if not self._dirty:
615 return
612 return
616
613
617 filename = self._filename
614 filename = self._filename
618 if tr:
615 if tr:
619 # 'dirstate.write()' is not only for writing in-memory
616 # 'dirstate.write()' is not only for writing in-memory
620 # changes out, but also for dropping ambiguous timestamp.
617 # changes out, but also for dropping ambiguous timestamp.
621 # delayed writing re-raise "ambiguous timestamp issue".
618 # delayed writing re-raise "ambiguous timestamp issue".
622 # See also the wiki page below for detail:
619 # See also the wiki page below for detail:
623 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
620 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
624
621
625 # emulate dropping timestamp in 'parsers.pack_dirstate'
622 # emulate dropping timestamp in 'parsers.pack_dirstate'
626 now = _getfsnow(self._opener)
623 now = _getfsnow(self._opener)
627 self._map.clearambiguoustimes(self._updatedfiles, now)
624 self._map.clearambiguoustimes(self._updatedfiles, now)
628
625
629 # emulate that all 'dirstate.normal' results are written out
626 # emulate that all 'dirstate.normal' results are written out
630 self._lastnormaltime = 0
627 self._lastnormaltime = 0
631 self._updatedfiles.clear()
628 self._updatedfiles.clear()
632
629
633 # delay writing in-memory changes out
630 # delay writing in-memory changes out
634 tr.addfilegenerator('dirstate', (self._filename,),
631 tr.addfilegenerator('dirstate', (self._filename,),
635 self._writedirstate, location='plain')
632 self._writedirstate, location='plain')
636 return
633 return
637
634
638 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
635 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
639 self._writedirstate(st)
636 self._writedirstate(st)
640
637
641 def addparentchangecallback(self, category, callback):
638 def addparentchangecallback(self, category, callback):
642 """add a callback to be called when the wd parents are changed
639 """add a callback to be called when the wd parents are changed
643
640
644 Callback will be called with the following arguments:
641 Callback will be called with the following arguments:
645 dirstate, (oldp1, oldp2), (newp1, newp2)
642 dirstate, (oldp1, oldp2), (newp1, newp2)
646
643
647 Category is a unique identifier to allow overwriting an old callback
644 Category is a unique identifier to allow overwriting an old callback
648 with a newer callback.
645 with a newer callback.
649 """
646 """
650 self._plchangecallbacks[category] = callback
647 self._plchangecallbacks[category] = callback
651
648
652 def _writedirstate(self, st):
649 def _writedirstate(self, st):
653 # notify callbacks about parents change
650 # notify callbacks about parents change
654 if self._origpl is not None and self._origpl != self._pl:
651 if self._origpl is not None and self._origpl != self._pl:
655 for c, callback in sorted(self._plchangecallbacks.iteritems()):
652 for c, callback in sorted(self._plchangecallbacks.iteritems()):
656 callback(self, self._origpl, self._pl)
653 callback(self, self._origpl, self._pl)
657 self._origpl = None
654 self._origpl = None
658 # use the modification time of the newly created temporary file as the
655 # use the modification time of the newly created temporary file as the
659 # filesystem's notion of 'now'
656 # filesystem's notion of 'now'
660 now = util.fstat(st).st_mtime & _rangemask
657 now = util.fstat(st).st_mtime & _rangemask
661
658
662 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
659 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
663 # timestamp of each entries in dirstate, because of 'now > mtime'
660 # timestamp of each entries in dirstate, because of 'now > mtime'
664 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
661 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
665 if delaywrite > 0:
662 if delaywrite > 0:
666 # do we have any files to delay for?
663 # do we have any files to delay for?
667 for f, e in self._map.iteritems():
664 for f, e in self._map.iteritems():
668 if e[0] == 'n' and e[3] == now:
665 if e[0] == 'n' and e[3] == now:
669 import time # to avoid useless import
666 import time # to avoid useless import
670 # rather than sleep n seconds, sleep until the next
667 # rather than sleep n seconds, sleep until the next
671 # multiple of n seconds
668 # multiple of n seconds
672 clock = time.time()
669 clock = time.time()
673 start = int(clock) - (int(clock) % delaywrite)
670 start = int(clock) - (int(clock) % delaywrite)
674 end = start + delaywrite
671 end = start + delaywrite
675 time.sleep(end - clock)
672 time.sleep(end - clock)
676 now = end # trust our estimate that the end is near now
673 now = end # trust our estimate that the end is near now
677 break
674 break
678
675
679 self._map.write(st, now)
676 self._map.write(st, now)
680 self._lastnormaltime = 0
677 self._lastnormaltime = 0
681 self._dirty = False
678 self._dirty = False
682
679
683 def _dirignore(self, f):
680 def _dirignore(self, f):
684 if f == '.':
681 if f == '.':
685 return False
682 return False
686 if self._ignore(f):
683 if self._ignore(f):
687 return True
684 return True
688 for p in util.finddirs(f):
685 for p in util.finddirs(f):
689 if self._ignore(p):
686 if self._ignore(p):
690 return True
687 return True
691 return False
688 return False
692
689
693 def _ignorefiles(self):
690 def _ignorefiles(self):
694 files = []
691 files = []
695 if os.path.exists(self._join('.hgignore')):
692 if os.path.exists(self._join('.hgignore')):
696 files.append(self._join('.hgignore'))
693 files.append(self._join('.hgignore'))
697 for name, path in self._ui.configitems("ui"):
694 for name, path in self._ui.configitems("ui"):
698 if name == 'ignore' or name.startswith('ignore.'):
695 if name == 'ignore' or name.startswith('ignore.'):
699 # we need to use os.path.join here rather than self._join
696 # we need to use os.path.join here rather than self._join
700 # because path is arbitrary and user-specified
697 # because path is arbitrary and user-specified
701 files.append(os.path.join(self._rootdir, util.expandpath(path)))
698 files.append(os.path.join(self._rootdir, util.expandpath(path)))
702 return files
699 return files
703
700
704 def _ignorefileandline(self, f):
701 def _ignorefileandline(self, f):
705 files = collections.deque(self._ignorefiles())
702 files = collections.deque(self._ignorefiles())
706 visited = set()
703 visited = set()
707 while files:
704 while files:
708 i = files.popleft()
705 i = files.popleft()
709 patterns = matchmod.readpatternfile(i, self._ui.warn,
706 patterns = matchmod.readpatternfile(i, self._ui.warn,
710 sourceinfo=True)
707 sourceinfo=True)
711 for pattern, lineno, line in patterns:
708 for pattern, lineno, line in patterns:
712 kind, p = matchmod._patsplit(pattern, 'glob')
709 kind, p = matchmod._patsplit(pattern, 'glob')
713 if kind == "subinclude":
710 if kind == "subinclude":
714 if p not in visited:
711 if p not in visited:
715 files.append(p)
712 files.append(p)
716 continue
713 continue
717 m = matchmod.match(self._root, '', [], [pattern],
714 m = matchmod.match(self._root, '', [], [pattern],
718 warn=self._ui.warn)
715 warn=self._ui.warn)
719 if m(f):
716 if m(f):
720 return (i, lineno, line)
717 return (i, lineno, line)
721 visited.add(i)
718 visited.add(i)
722 return (None, -1, "")
719 return (None, -1, "")
723
720
724 def _walkexplicit(self, match, subrepos):
721 def _walkexplicit(self, match, subrepos):
725 '''Get stat data about the files explicitly specified by match.
722 '''Get stat data about the files explicitly specified by match.
726
723
727 Return a triple (results, dirsfound, dirsnotfound).
724 Return a triple (results, dirsfound, dirsnotfound).
728 - results is a mapping from filename to stat result. It also contains
725 - results is a mapping from filename to stat result. It also contains
729 listings mapping subrepos and .hg to None.
726 listings mapping subrepos and .hg to None.
730 - dirsfound is a list of files found to be directories.
727 - dirsfound is a list of files found to be directories.
731 - dirsnotfound is a list of files that the dirstate thinks are
728 - dirsnotfound is a list of files that the dirstate thinks are
732 directories and that were not found.'''
729 directories and that were not found.'''
733
730
734 def badtype(mode):
731 def badtype(mode):
735 kind = _('unknown')
732 kind = _('unknown')
736 if stat.S_ISCHR(mode):
733 if stat.S_ISCHR(mode):
737 kind = _('character device')
734 kind = _('character device')
738 elif stat.S_ISBLK(mode):
735 elif stat.S_ISBLK(mode):
739 kind = _('block device')
736 kind = _('block device')
740 elif stat.S_ISFIFO(mode):
737 elif stat.S_ISFIFO(mode):
741 kind = _('fifo')
738 kind = _('fifo')
742 elif stat.S_ISSOCK(mode):
739 elif stat.S_ISSOCK(mode):
743 kind = _('socket')
740 kind = _('socket')
744 elif stat.S_ISDIR(mode):
741 elif stat.S_ISDIR(mode):
745 kind = _('directory')
742 kind = _('directory')
746 return _('unsupported file type (type is %s)') % kind
743 return _('unsupported file type (type is %s)') % kind
747
744
748 matchedir = match.explicitdir
745 matchedir = match.explicitdir
749 badfn = match.bad
746 badfn = match.bad
750 dmap = self._map
747 dmap = self._map
751 lstat = os.lstat
748 lstat = os.lstat
752 getkind = stat.S_IFMT
749 getkind = stat.S_IFMT
753 dirkind = stat.S_IFDIR
750 dirkind = stat.S_IFDIR
754 regkind = stat.S_IFREG
751 regkind = stat.S_IFREG
755 lnkkind = stat.S_IFLNK
752 lnkkind = stat.S_IFLNK
756 join = self._join
753 join = self._join
757 dirsfound = []
754 dirsfound = []
758 foundadd = dirsfound.append
755 foundadd = dirsfound.append
759 dirsnotfound = []
756 dirsnotfound = []
760 notfoundadd = dirsnotfound.append
757 notfoundadd = dirsnotfound.append
761
758
762 if not match.isexact() and self._checkcase:
759 if not match.isexact() and self._checkcase:
763 normalize = self._normalize
760 normalize = self._normalize
764 else:
761 else:
765 normalize = None
762 normalize = None
766
763
767 files = sorted(match.files())
764 files = sorted(match.files())
768 subrepos.sort()
765 subrepos.sort()
769 i, j = 0, 0
766 i, j = 0, 0
770 while i < len(files) and j < len(subrepos):
767 while i < len(files) and j < len(subrepos):
771 subpath = subrepos[j] + "/"
768 subpath = subrepos[j] + "/"
772 if files[i] < subpath:
769 if files[i] < subpath:
773 i += 1
770 i += 1
774 continue
771 continue
775 while i < len(files) and files[i].startswith(subpath):
772 while i < len(files) and files[i].startswith(subpath):
776 del files[i]
773 del files[i]
777 j += 1
774 j += 1
778
775
779 if not files or '.' in files:
776 if not files or '.' in files:
780 files = ['.']
777 files = ['.']
781 results = dict.fromkeys(subrepos)
778 results = dict.fromkeys(subrepos)
782 results['.hg'] = None
779 results['.hg'] = None
783
780
784 alldirs = None
781 alldirs = None
785 for ff in files:
782 for ff in files:
786 # constructing the foldmap is expensive, so don't do it for the
783 # constructing the foldmap is expensive, so don't do it for the
787 # common case where files is ['.']
784 # common case where files is ['.']
788 if normalize and ff != '.':
785 if normalize and ff != '.':
789 nf = normalize(ff, False, True)
786 nf = normalize(ff, False, True)
790 else:
787 else:
791 nf = ff
788 nf = ff
792 if nf in results:
789 if nf in results:
793 continue
790 continue
794
791
795 try:
792 try:
796 st = lstat(join(nf))
793 st = lstat(join(nf))
797 kind = getkind(st.st_mode)
794 kind = getkind(st.st_mode)
798 if kind == dirkind:
795 if kind == dirkind:
799 if nf in dmap:
796 if nf in dmap:
800 # file replaced by dir on disk but still in dirstate
797 # file replaced by dir on disk but still in dirstate
801 results[nf] = None
798 results[nf] = None
802 if matchedir:
799 if matchedir:
803 matchedir(nf)
800 matchedir(nf)
804 foundadd((nf, ff))
801 foundadd((nf, ff))
805 elif kind == regkind or kind == lnkkind:
802 elif kind == regkind or kind == lnkkind:
806 results[nf] = st
803 results[nf] = st
807 else:
804 else:
808 badfn(ff, badtype(kind))
805 badfn(ff, badtype(kind))
809 if nf in dmap:
806 if nf in dmap:
810 results[nf] = None
807 results[nf] = None
811 except OSError as inst: # nf not found on disk - it is dirstate only
808 except OSError as inst: # nf not found on disk - it is dirstate only
812 if nf in dmap: # does it exactly match a missing file?
809 if nf in dmap: # does it exactly match a missing file?
813 results[nf] = None
810 results[nf] = None
814 else: # does it match a missing directory?
811 else: # does it match a missing directory?
815 if alldirs is None:
812 if alldirs is None:
816 alldirs = util.dirs(dmap._map)
813 alldirs = util.dirs(dmap._map)
817 if nf in alldirs:
814 if nf in alldirs:
818 if matchedir:
815 if matchedir:
819 matchedir(nf)
816 matchedir(nf)
820 notfoundadd(nf)
817 notfoundadd(nf)
821 else:
818 else:
822 badfn(ff, encoding.strtolocal(inst.strerror))
819 badfn(ff, encoding.strtolocal(inst.strerror))
823
820
824 # Case insensitive filesystems cannot rely on lstat() failing to detect
821 # Case insensitive filesystems cannot rely on lstat() failing to detect
825 # a case-only rename. Prune the stat object for any file that does not
822 # a case-only rename. Prune the stat object for any file that does not
826 # match the case in the filesystem, if there are multiple files that
823 # match the case in the filesystem, if there are multiple files that
827 # normalize to the same path.
824 # normalize to the same path.
828 if match.isexact() and self._checkcase:
825 if match.isexact() and self._checkcase:
829 normed = {}
826 normed = {}
830
827
831 for f, st in results.iteritems():
828 for f, st in results.iteritems():
832 if st is None:
829 if st is None:
833 continue
830 continue
834
831
835 nc = util.normcase(f)
832 nc = util.normcase(f)
836 paths = normed.get(nc)
833 paths = normed.get(nc)
837
834
838 if paths is None:
835 if paths is None:
839 paths = set()
836 paths = set()
840 normed[nc] = paths
837 normed[nc] = paths
841
838
842 paths.add(f)
839 paths.add(f)
843
840
844 for norm, paths in normed.iteritems():
841 for norm, paths in normed.iteritems():
845 if len(paths) > 1:
842 if len(paths) > 1:
846 for path in paths:
843 for path in paths:
847 folded = self._discoverpath(path, norm, True, None,
844 folded = self._discoverpath(path, norm, True, None,
848 self._map.dirfoldmap)
845 self._map.dirfoldmap)
849 if path != folded:
846 if path != folded:
850 results[path] = None
847 results[path] = None
851
848
852 return results, dirsfound, dirsnotfound
849 return results, dirsfound, dirsnotfound
853
850
854 def walk(self, match, subrepos, unknown, ignored, full=True):
851 def walk(self, match, subrepos, unknown, ignored, full=True):
855 '''
852 '''
856 Walk recursively through the directory tree, finding all files
853 Walk recursively through the directory tree, finding all files
857 matched by match.
854 matched by match.
858
855
859 If full is False, maybe skip some known-clean files.
856 If full is False, maybe skip some known-clean files.
860
857
861 Return a dict mapping filename to stat-like object (either
858 Return a dict mapping filename to stat-like object (either
862 mercurial.osutil.stat instance or return value of os.stat()).
859 mercurial.osutil.stat instance or return value of os.stat()).
863
860
864 '''
861 '''
865 # full is a flag that extensions that hook into walk can use -- this
862 # full is a flag that extensions that hook into walk can use -- this
866 # implementation doesn't use it at all. This satisfies the contract
863 # implementation doesn't use it at all. This satisfies the contract
867 # because we only guarantee a "maybe".
864 # because we only guarantee a "maybe".
868
865
869 if ignored:
866 if ignored:
870 ignore = util.never
867 ignore = util.never
871 dirignore = util.never
868 dirignore = util.never
872 elif unknown:
869 elif unknown:
873 ignore = self._ignore
870 ignore = self._ignore
874 dirignore = self._dirignore
871 dirignore = self._dirignore
875 else:
872 else:
876 # if not unknown and not ignored, drop dir recursion and step 2
873 # if not unknown and not ignored, drop dir recursion and step 2
877 ignore = util.always
874 ignore = util.always
878 dirignore = util.always
875 dirignore = util.always
879
876
880 matchfn = match.matchfn
877 matchfn = match.matchfn
881 matchalways = match.always()
878 matchalways = match.always()
882 matchtdir = match.traversedir
879 matchtdir = match.traversedir
883 dmap = self._map
880 dmap = self._map
884 listdir = util.listdir
881 listdir = util.listdir
885 lstat = os.lstat
882 lstat = os.lstat
886 dirkind = stat.S_IFDIR
883 dirkind = stat.S_IFDIR
887 regkind = stat.S_IFREG
884 regkind = stat.S_IFREG
888 lnkkind = stat.S_IFLNK
885 lnkkind = stat.S_IFLNK
889 join = self._join
886 join = self._join
890
887
891 exact = skipstep3 = False
888 exact = skipstep3 = False
892 if match.isexact(): # match.exact
889 if match.isexact(): # match.exact
893 exact = True
890 exact = True
894 dirignore = util.always # skip step 2
891 dirignore = util.always # skip step 2
895 elif match.prefix(): # match.match, no patterns
892 elif match.prefix(): # match.match, no patterns
896 skipstep3 = True
893 skipstep3 = True
897
894
898 if not exact and self._checkcase:
895 if not exact and self._checkcase:
899 normalize = self._normalize
896 normalize = self._normalize
900 normalizefile = self._normalizefile
897 normalizefile = self._normalizefile
901 skipstep3 = False
898 skipstep3 = False
902 else:
899 else:
903 normalize = self._normalize
900 normalize = self._normalize
904 normalizefile = None
901 normalizefile = None
905
902
906 # step 1: find all explicit files
903 # step 1: find all explicit files
907 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
904 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
908
905
909 skipstep3 = skipstep3 and not (work or dirsnotfound)
906 skipstep3 = skipstep3 and not (work or dirsnotfound)
910 work = [d for d in work if not dirignore(d[0])]
907 work = [d for d in work if not dirignore(d[0])]
911
908
912 # step 2: visit subdirectories
909 # step 2: visit subdirectories
913 def traverse(work, alreadynormed):
910 def traverse(work, alreadynormed):
914 wadd = work.append
911 wadd = work.append
915 while work:
912 while work:
916 nd = work.pop()
913 nd = work.pop()
917 if not match.visitdir(nd):
914 if not match.visitdir(nd):
918 continue
915 continue
919 skip = None
916 skip = None
920 if nd == '.':
917 if nd == '.':
921 nd = ''
918 nd = ''
922 else:
919 else:
923 skip = '.hg'
920 skip = '.hg'
924 try:
921 try:
925 entries = listdir(join(nd), stat=True, skip=skip)
922 entries = listdir(join(nd), stat=True, skip=skip)
926 except OSError as inst:
923 except OSError as inst:
927 if inst.errno in (errno.EACCES, errno.ENOENT):
924 if inst.errno in (errno.EACCES, errno.ENOENT):
928 match.bad(self.pathto(nd),
925 match.bad(self.pathto(nd),
929 encoding.strtolocal(inst.strerror))
926 encoding.strtolocal(inst.strerror))
930 continue
927 continue
931 raise
928 raise
932 for f, kind, st in entries:
929 for f, kind, st in entries:
933 if normalizefile:
930 if normalizefile:
934 # even though f might be a directory, we're only
931 # even though f might be a directory, we're only
935 # interested in comparing it to files currently in the
932 # interested in comparing it to files currently in the
936 # dmap -- therefore normalizefile is enough
933 # dmap -- therefore normalizefile is enough
937 nf = normalizefile(nd and (nd + "/" + f) or f, True,
934 nf = normalizefile(nd and (nd + "/" + f) or f, True,
938 True)
935 True)
939 else:
936 else:
940 nf = nd and (nd + "/" + f) or f
937 nf = nd and (nd + "/" + f) or f
941 if nf not in results:
938 if nf not in results:
942 if kind == dirkind:
939 if kind == dirkind:
943 if not ignore(nf):
940 if not ignore(nf):
944 if matchtdir:
941 if matchtdir:
945 matchtdir(nf)
942 matchtdir(nf)
946 wadd(nf)
943 wadd(nf)
947 if nf in dmap and (matchalways or matchfn(nf)):
944 if nf in dmap and (matchalways or matchfn(nf)):
948 results[nf] = None
945 results[nf] = None
949 elif kind == regkind or kind == lnkkind:
946 elif kind == regkind or kind == lnkkind:
950 if nf in dmap:
947 if nf in dmap:
951 if matchalways or matchfn(nf):
948 if matchalways or matchfn(nf):
952 results[nf] = st
949 results[nf] = st
953 elif ((matchalways or matchfn(nf))
950 elif ((matchalways or matchfn(nf))
954 and not ignore(nf)):
951 and not ignore(nf)):
955 # unknown file -- normalize if necessary
952 # unknown file -- normalize if necessary
956 if not alreadynormed:
953 if not alreadynormed:
957 nf = normalize(nf, False, True)
954 nf = normalize(nf, False, True)
958 results[nf] = st
955 results[nf] = st
959 elif nf in dmap and (matchalways or matchfn(nf)):
956 elif nf in dmap and (matchalways or matchfn(nf)):
960 results[nf] = None
957 results[nf] = None
961
958
962 for nd, d in work:
959 for nd, d in work:
963 # alreadynormed means that processwork doesn't have to do any
960 # alreadynormed means that processwork doesn't have to do any
964 # expensive directory normalization
961 # expensive directory normalization
965 alreadynormed = not normalize or nd == d
962 alreadynormed = not normalize or nd == d
966 traverse([d], alreadynormed)
963 traverse([d], alreadynormed)
967
964
968 for s in subrepos:
965 for s in subrepos:
969 del results[s]
966 del results[s]
970 del results['.hg']
967 del results['.hg']
971
968
972 # step 3: visit remaining files from dmap
969 # step 3: visit remaining files from dmap
973 if not skipstep3 and not exact:
970 if not skipstep3 and not exact:
974 # If a dmap file is not in results yet, it was either
971 # If a dmap file is not in results yet, it was either
975 # a) not matching matchfn b) ignored, c) missing, or d) under a
972 # a) not matching matchfn b) ignored, c) missing, or d) under a
976 # symlink directory.
973 # symlink directory.
977 if not results and matchalways:
974 if not results and matchalways:
978 visit = [f for f in dmap]
975 visit = [f for f in dmap]
979 else:
976 else:
980 visit = [f for f in dmap if f not in results and matchfn(f)]
977 visit = [f for f in dmap if f not in results and matchfn(f)]
981 visit.sort()
978 visit.sort()
982
979
983 if unknown:
980 if unknown:
984 # unknown == True means we walked all dirs under the roots
981 # unknown == True means we walked all dirs under the roots
985 # that wasn't ignored, and everything that matched was stat'ed
982 # that wasn't ignored, and everything that matched was stat'ed
986 # and is already in results.
983 # and is already in results.
987 # The rest must thus be ignored or under a symlink.
984 # The rest must thus be ignored or under a symlink.
988 audit_path = pathutil.pathauditor(self._root, cached=True)
985 audit_path = pathutil.pathauditor(self._root, cached=True)
989
986
990 for nf in iter(visit):
987 for nf in iter(visit):
991 # If a stat for the same file was already added with a
988 # If a stat for the same file was already added with a
992 # different case, don't add one for this, since that would
989 # different case, don't add one for this, since that would
993 # make it appear as if the file exists under both names
990 # make it appear as if the file exists under both names
994 # on disk.
991 # on disk.
995 if (normalizefile and
992 if (normalizefile and
996 normalizefile(nf, True, True) in results):
993 normalizefile(nf, True, True) in results):
997 results[nf] = None
994 results[nf] = None
998 # Report ignored items in the dmap as long as they are not
995 # Report ignored items in the dmap as long as they are not
999 # under a symlink directory.
996 # under a symlink directory.
1000 elif audit_path.check(nf):
997 elif audit_path.check(nf):
1001 try:
998 try:
1002 results[nf] = lstat(join(nf))
999 results[nf] = lstat(join(nf))
1003 # file was just ignored, no links, and exists
1000 # file was just ignored, no links, and exists
1004 except OSError:
1001 except OSError:
1005 # file doesn't exist
1002 # file doesn't exist
1006 results[nf] = None
1003 results[nf] = None
1007 else:
1004 else:
1008 # It's either missing or under a symlink directory
1005 # It's either missing or under a symlink directory
1009 # which we in this case report as missing
1006 # which we in this case report as missing
1010 results[nf] = None
1007 results[nf] = None
1011 else:
1008 else:
1012 # We may not have walked the full directory tree above,
1009 # We may not have walked the full directory tree above,
1013 # so stat and check everything we missed.
1010 # so stat and check everything we missed.
1014 iv = iter(visit)
1011 iv = iter(visit)
1015 for st in util.statfiles([join(i) for i in visit]):
1012 for st in util.statfiles([join(i) for i in visit]):
1016 results[next(iv)] = st
1013 results[next(iv)] = st
1017 return results
1014 return results
1018
1015
1019 def status(self, match, subrepos, ignored, clean, unknown):
1016 def status(self, match, subrepos, ignored, clean, unknown):
1020 '''Determine the status of the working copy relative to the
1017 '''Determine the status of the working copy relative to the
1021 dirstate and return a pair of (unsure, status), where status is of type
1018 dirstate and return a pair of (unsure, status), where status is of type
1022 scmutil.status and:
1019 scmutil.status and:
1023
1020
1024 unsure:
1021 unsure:
1025 files that might have been modified since the dirstate was
1022 files that might have been modified since the dirstate was
1026 written, but need to be read to be sure (size is the same
1023 written, but need to be read to be sure (size is the same
1027 but mtime differs)
1024 but mtime differs)
1028 status.modified:
1025 status.modified:
1029 files that have definitely been modified since the dirstate
1026 files that have definitely been modified since the dirstate
1030 was written (different size or mode)
1027 was written (different size or mode)
1031 status.clean:
1028 status.clean:
1032 files that have definitely not been modified since the
1029 files that have definitely not been modified since the
1033 dirstate was written
1030 dirstate was written
1034 '''
1031 '''
1035 listignored, listclean, listunknown = ignored, clean, unknown
1032 listignored, listclean, listunknown = ignored, clean, unknown
1036 lookup, modified, added, unknown, ignored = [], [], [], [], []
1033 lookup, modified, added, unknown, ignored = [], [], [], [], []
1037 removed, deleted, clean = [], [], []
1034 removed, deleted, clean = [], [], []
1038
1035
1039 dmap = self._map
1036 dmap = self._map
1040 dmap.preload()
1037 dmap.preload()
1041 dcontains = dmap.__contains__
1038 dcontains = dmap.__contains__
1042 dget = dmap.__getitem__
1039 dget = dmap.__getitem__
1043 ladd = lookup.append # aka "unsure"
1040 ladd = lookup.append # aka "unsure"
1044 madd = modified.append
1041 madd = modified.append
1045 aadd = added.append
1042 aadd = added.append
1046 uadd = unknown.append
1043 uadd = unknown.append
1047 iadd = ignored.append
1044 iadd = ignored.append
1048 radd = removed.append
1045 radd = removed.append
1049 dadd = deleted.append
1046 dadd = deleted.append
1050 cadd = clean.append
1047 cadd = clean.append
1051 mexact = match.exact
1048 mexact = match.exact
1052 dirignore = self._dirignore
1049 dirignore = self._dirignore
1053 checkexec = self._checkexec
1050 checkexec = self._checkexec
1054 copymap = self._map.copymap
1051 copymap = self._map.copymap
1055 lastnormaltime = self._lastnormaltime
1052 lastnormaltime = self._lastnormaltime
1056
1053
1057 # We need to do full walks when either
1054 # We need to do full walks when either
1058 # - we're listing all clean files, or
1055 # - we're listing all clean files, or
1059 # - match.traversedir does something, because match.traversedir should
1056 # - match.traversedir does something, because match.traversedir should
1060 # be called for every dir in the working dir
1057 # be called for every dir in the working dir
1061 full = listclean or match.traversedir is not None
1058 full = listclean or match.traversedir is not None
1062 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1059 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1063 full=full).iteritems():
1060 full=full).iteritems():
1064 if not dcontains(fn):
1061 if not dcontains(fn):
1065 if (listignored or mexact(fn)) and dirignore(fn):
1062 if (listignored or mexact(fn)) and dirignore(fn):
1066 if listignored:
1063 if listignored:
1067 iadd(fn)
1064 iadd(fn)
1068 else:
1065 else:
1069 uadd(fn)
1066 uadd(fn)
1070 continue
1067 continue
1071
1068
1072 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1069 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1073 # written like that for performance reasons. dmap[fn] is not a
1070 # written like that for performance reasons. dmap[fn] is not a
1074 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1071 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1075 # opcode has fast paths when the value to be unpacked is a tuple or
1072 # opcode has fast paths when the value to be unpacked is a tuple or
1076 # a list, but falls back to creating a full-fledged iterator in
1073 # a list, but falls back to creating a full-fledged iterator in
1077 # general. That is much slower than simply accessing and storing the
1074 # general. That is much slower than simply accessing and storing the
1078 # tuple members one by one.
1075 # tuple members one by one.
1079 t = dget(fn)
1076 t = dget(fn)
1080 state = t[0]
1077 state = t[0]
1081 mode = t[1]
1078 mode = t[1]
1082 size = t[2]
1079 size = t[2]
1083 time = t[3]
1080 time = t[3]
1084
1081
1085 if not st and state in "nma":
1082 if not st and state in "nma":
1086 dadd(fn)
1083 dadd(fn)
1087 elif state == 'n':
1084 elif state == 'n':
1088 if (size >= 0 and
1085 if (size >= 0 and
1089 ((size != st.st_size and size != st.st_size & _rangemask)
1086 ((size != st.st_size and size != st.st_size & _rangemask)
1090 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1087 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1091 or size == -2 # other parent
1088 or size == -2 # other parent
1092 or fn in copymap):
1089 or fn in copymap):
1093 madd(fn)
1090 madd(fn)
1094 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1091 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1095 ladd(fn)
1092 ladd(fn)
1096 elif st.st_mtime == lastnormaltime:
1093 elif st.st_mtime == lastnormaltime:
1097 # fn may have just been marked as normal and it may have
1094 # fn may have just been marked as normal and it may have
1098 # changed in the same second without changing its size.
1095 # changed in the same second without changing its size.
1099 # This can happen if we quickly do multiple commits.
1096 # This can happen if we quickly do multiple commits.
1100 # Force lookup, so we don't miss such a racy file change.
1097 # Force lookup, so we don't miss such a racy file change.
1101 ladd(fn)
1098 ladd(fn)
1102 elif listclean:
1099 elif listclean:
1103 cadd(fn)
1100 cadd(fn)
1104 elif state == 'm':
1101 elif state == 'm':
1105 madd(fn)
1102 madd(fn)
1106 elif state == 'a':
1103 elif state == 'a':
1107 aadd(fn)
1104 aadd(fn)
1108 elif state == 'r':
1105 elif state == 'r':
1109 radd(fn)
1106 radd(fn)
1110
1107
1111 return (lookup, scmutil.status(modified, added, removed, deleted,
1108 return (lookup, scmutil.status(modified, added, removed, deleted,
1112 unknown, ignored, clean))
1109 unknown, ignored, clean))
1113
1110
1114 def matches(self, match):
1111 def matches(self, match):
1115 '''
1112 '''
1116 return files in the dirstate (in whatever state) filtered by match
1113 return files in the dirstate (in whatever state) filtered by match
1117 '''
1114 '''
1118 dmap = self._map
1115 dmap = self._map
1119 if match.always():
1116 if match.always():
1120 return dmap.keys()
1117 return dmap.keys()
1121 files = match.files()
1118 files = match.files()
1122 if match.isexact():
1119 if match.isexact():
1123 # fast path -- filter the other way around, since typically files is
1120 # fast path -- filter the other way around, since typically files is
1124 # much smaller than dmap
1121 # much smaller than dmap
1125 return [f for f in files if f in dmap]
1122 return [f for f in files if f in dmap]
1126 if match.prefix() and all(fn in dmap for fn in files):
1123 if match.prefix() and all(fn in dmap for fn in files):
1127 # fast path -- all the values are known to be files, so just return
1124 # fast path -- all the values are known to be files, so just return
1128 # that
1125 # that
1129 return list(files)
1126 return list(files)
1130 return [f for f in dmap if match(f)]
1127 return [f for f in dmap if match(f)]
1131
1128
1132 def _actualfilename(self, tr):
1129 def _actualfilename(self, tr):
1133 if tr:
1130 if tr:
1134 return self._pendingfilename
1131 return self._pendingfilename
1135 else:
1132 else:
1136 return self._filename
1133 return self._filename
1137
1134
1138 def savebackup(self, tr, backupname):
1135 def savebackup(self, tr, backupname):
1139 '''Save current dirstate into backup file'''
1136 '''Save current dirstate into backup file'''
1140 filename = self._actualfilename(tr)
1137 filename = self._actualfilename(tr)
1141 assert backupname != filename
1138 assert backupname != filename
1142
1139
1143 # use '_writedirstate' instead of 'write' to write changes certainly,
1140 # use '_writedirstate' instead of 'write' to write changes certainly,
1144 # because the latter omits writing out if transaction is running.
1141 # because the latter omits writing out if transaction is running.
1145 # output file will be used to create backup of dirstate at this point.
1142 # output file will be used to create backup of dirstate at this point.
1146 if self._dirty or not self._opener.exists(filename):
1143 if self._dirty or not self._opener.exists(filename):
1147 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1144 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1148 checkambig=True))
1145 checkambig=True))
1149
1146
1150 if tr:
1147 if tr:
1151 # ensure that subsequent tr.writepending returns True for
1148 # ensure that subsequent tr.writepending returns True for
1152 # changes written out above, even if dirstate is never
1149 # changes written out above, even if dirstate is never
1153 # changed after this
1150 # changed after this
1154 tr.addfilegenerator('dirstate', (self._filename,),
1151 tr.addfilegenerator('dirstate', (self._filename,),
1155 self._writedirstate, location='plain')
1152 self._writedirstate, location='plain')
1156
1153
1157 # ensure that pending file written above is unlinked at
1154 # ensure that pending file written above is unlinked at
1158 # failure, even if tr.writepending isn't invoked until the
1155 # failure, even if tr.writepending isn't invoked until the
1159 # end of this transaction
1156 # end of this transaction
1160 tr.registertmp(filename, location='plain')
1157 tr.registertmp(filename, location='plain')
1161
1158
1162 self._opener.tryunlink(backupname)
1159 self._opener.tryunlink(backupname)
1163 # hardlink backup is okay because _writedirstate is always called
1160 # hardlink backup is okay because _writedirstate is always called
1164 # with an "atomictemp=True" file.
1161 # with an "atomictemp=True" file.
1165 util.copyfile(self._opener.join(filename),
1162 util.copyfile(self._opener.join(filename),
1166 self._opener.join(backupname), hardlink=True)
1163 self._opener.join(backupname), hardlink=True)
1167
1164
1168 def restorebackup(self, tr, backupname):
1165 def restorebackup(self, tr, backupname):
1169 '''Restore dirstate by backup file'''
1166 '''Restore dirstate by backup file'''
1170 # this "invalidate()" prevents "wlock.release()" from writing
1167 # this "invalidate()" prevents "wlock.release()" from writing
1171 # changes of dirstate out after restoring from backup file
1168 # changes of dirstate out after restoring from backup file
1172 self.invalidate()
1169 self.invalidate()
1173 filename = self._actualfilename(tr)
1170 filename = self._actualfilename(tr)
1174 o = self._opener
1171 o = self._opener
1175 if util.samefile(o.join(backupname), o.join(filename)):
1172 if util.samefile(o.join(backupname), o.join(filename)):
1176 o.unlink(backupname)
1173 o.unlink(backupname)
1177 else:
1174 else:
1178 o.rename(backupname, filename, checkambig=True)
1175 o.rename(backupname, filename, checkambig=True)
1179
1176
1180 def clearbackup(self, tr, backupname):
1177 def clearbackup(self, tr, backupname):
1181 '''Clear backup file'''
1178 '''Clear backup file'''
1182 self._opener.unlink(backupname)
1179 self._opener.unlink(backupname)
1183
1180
1184 class dirstatemap(object):
1181 class dirstatemap(object):
1185 """Map encapsulating the dirstate's contents.
1182 """Map encapsulating the dirstate's contents.
1186
1183
1187 The dirstate contains the following state:
1184 The dirstate contains the following state:
1188
1185
1189 - `identity` is the identity of the dirstate file, which can be used to
1186 - `identity` is the identity of the dirstate file, which can be used to
1190 detect when changes have occurred to the dirstate file.
1187 detect when changes have occurred to the dirstate file.
1191
1188
1192 - `parents` is a pair containing the parents of the working copy. The
1189 - `parents` is a pair containing the parents of the working copy. The
1193 parents are updated by calling `setparents`.
1190 parents are updated by calling `setparents`.
1194
1191
1195 - the state map maps filenames to tuples of (state, mode, size, mtime),
1192 - the state map maps filenames to tuples of (state, mode, size, mtime),
1196 where state is a single character representing 'normal', 'added',
1193 where state is a single character representing 'normal', 'added',
1197 'removed', or 'merged'. It is read by treating the dirstate as a
1194 'removed', or 'merged'. It is read by treating the dirstate as a
1198 dict. File state is updated by calling the `addfile`, `removefile` and
1195 dict. File state is updated by calling the `addfile`, `removefile` and
1199 `dropfile` methods.
1196 `dropfile` methods.
1200
1197
1201 - `copymap` maps destination filenames to their source filename.
1198 - `copymap` maps destination filenames to their source filename.
1202
1199
1203 The dirstate also provides the following views onto the state:
1200 The dirstate also provides the following views onto the state:
1204
1201
1205 - `nonnormalset` is a set of the filenames that have state other
1202 - `nonnormalset` is a set of the filenames that have state other
1206 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1203 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1207
1204
1208 - `otherparentset` is a set of the filenames that are marked as coming
1205 - `otherparentset` is a set of the filenames that are marked as coming
1209 from the second parent when the dirstate is currently being merged.
1206 from the second parent when the dirstate is currently being merged.
1210
1207
1211 - `dirs` is a set-like object containing all the directories that contain
1208 - `dirs` is a set-like object containing all the directories that contain
1212 files in the dirstate, excluding any files that are marked as removed.
1209 files in the dirstate, excluding any files that are marked as removed.
1213
1210
1214 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1211 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1215 form that they appear as in the dirstate.
1212 form that they appear as in the dirstate.
1216
1213
1217 - `dirfoldmap` is a dict mapping normalized directory names to the
1214 - `dirfoldmap` is a dict mapping normalized directory names to the
1218 denormalized form that they appear as in the dirstate.
1215 denormalized form that they appear as in the dirstate.
1219
1216
1220 Once instantiated, the dirs, filefoldmap and dirfoldmap views must be
1217 Once instantiated, the filefoldmap and dirfoldmap views must be maintained
1221 maintained by the caller.
1218 by the caller.
1222 """
1219 """
1223
1220
1224 def __init__(self, ui, opener, root):
1221 def __init__(self, ui, opener, root):
1225 self._ui = ui
1222 self._ui = ui
1226 self._opener = opener
1223 self._opener = opener
1227 self._root = root
1224 self._root = root
1228 self._filename = 'dirstate'
1225 self._filename = 'dirstate'
1229
1226
1230 self._parents = None
1227 self._parents = None
1231 self._dirtyparents = False
1228 self._dirtyparents = False
1232
1229
1233 # for consistent view between _pl() and _read() invocations
1230 # for consistent view between _pl() and _read() invocations
1234 self._pendingmode = None
1231 self._pendingmode = None
1235
1232
1236 @propertycache
1233 @propertycache
1237 def _map(self):
1234 def _map(self):
1238 self._map = {}
1235 self._map = {}
1239 self.read()
1236 self.read()
1240 return self._map
1237 return self._map
1241
1238
1242 @propertycache
1239 @propertycache
1243 def copymap(self):
1240 def copymap(self):
1244 self.copymap = {}
1241 self.copymap = {}
1245 self._map
1242 self._map
1246 return self.copymap
1243 return self.copymap
1247
1244
1248 def clear(self):
1245 def clear(self):
1249 self._map.clear()
1246 self._map.clear()
1250 self.copymap.clear()
1247 self.copymap.clear()
1251 self.setparents(nullid, nullid)
1248 self.setparents(nullid, nullid)
1252 util.clearcachedproperty(self, "dirs")
1249 util.clearcachedproperty(self, "dirs")
1253 util.clearcachedproperty(self, "filefoldmap")
1250 util.clearcachedproperty(self, "filefoldmap")
1254 util.clearcachedproperty(self, "dirfoldmap")
1251 util.clearcachedproperty(self, "dirfoldmap")
1255 util.clearcachedproperty(self, "nonnormalset")
1252 util.clearcachedproperty(self, "nonnormalset")
1256 util.clearcachedproperty(self, "otherparentset")
1253 util.clearcachedproperty(self, "otherparentset")
1257
1254
1258 def iteritems(self):
1255 def iteritems(self):
1259 return self._map.iteritems()
1256 return self._map.iteritems()
1260
1257
1261 def __len__(self):
1258 def __len__(self):
1262 return len(self._map)
1259 return len(self._map)
1263
1260
1264 def __iter__(self):
1261 def __iter__(self):
1265 return iter(self._map)
1262 return iter(self._map)
1266
1263
1267 def get(self, key, default=None):
1264 def get(self, key, default=None):
1268 return self._map.get(key, default)
1265 return self._map.get(key, default)
1269
1266
1270 def __contains__(self, key):
1267 def __contains__(self, key):
1271 return key in self._map
1268 return key in self._map
1272
1269
1273 def __getitem__(self, key):
1270 def __getitem__(self, key):
1274 return self._map[key]
1271 return self._map[key]
1275
1272
1276 def keys(self):
1273 def keys(self):
1277 return self._map.keys()
1274 return self._map.keys()
1278
1275
1279 def preload(self):
1276 def preload(self):
1280 """Loads the underlying data, if it's not already loaded"""
1277 """Loads the underlying data, if it's not already loaded"""
1281 self._map
1278 self._map
1282
1279
1283 def addfile(self, f, state, mode, size, mtime):
1280 def addfile(self, f, oldstate, state, mode, size, mtime):
1284 """Add a tracked file to the dirstate."""
1281 """Add a tracked file to the dirstate."""
1282 if oldstate in "?r" and "dirs" in self.__dict__:
1283 self.dirs.addpath(f)
1285 self._map[f] = dirstatetuple(state, mode, size, mtime)
1284 self._map[f] = dirstatetuple(state, mode, size, mtime)
1286 if state != 'n' or mtime == -1:
1285 if state != 'n' or mtime == -1:
1287 self.nonnormalset.add(f)
1286 self.nonnormalset.add(f)
1288 if size == -2:
1287 if size == -2:
1289 self.otherparentset.add(f)
1288 self.otherparentset.add(f)
1290
1289
1291 def removefile(self, f, size):
1290 def removefile(self, f, oldstate, size):
1292 """
1291 """
1293 Mark a file as removed in the dirstate.
1292 Mark a file as removed in the dirstate.
1294
1293
1295 The `size` parameter is used to store sentinel values that indicate
1294 The `size` parameter is used to store sentinel values that indicate
1296 the file's previous state. In the future, we should refactor this
1295 the file's previous state. In the future, we should refactor this
1297 to be more explicit about what that state is.
1296 to be more explicit about what that state is.
1298 """
1297 """
1298 if oldstate not in "?r" and "dirs" in self.__dict__:
1299 self.dirs.delpath(f)
1299 self._map[f] = dirstatetuple('r', 0, size, 0)
1300 self._map[f] = dirstatetuple('r', 0, size, 0)
1300 self.nonnormalset.add(f)
1301 self.nonnormalset.add(f)
1301
1302
1302 def dropfile(self, f):
1303 def dropfile(self, f, oldstate):
1303 """
1304 """
1304 Remove a file from the dirstate. Returns True if the file was
1305 Remove a file from the dirstate. Returns True if the file was
1305 previously recorded.
1306 previously recorded.
1306 """
1307 """
1307 exists = self._map.pop(f, None) is not None
1308 exists = self._map.pop(f, None) is not None
1309 if exists:
1310 if oldstate != "r" and "dirs" in self.__dict__:
1311 self.dirs.delpath(f)
1308 self.nonnormalset.discard(f)
1312 self.nonnormalset.discard(f)
1309 return exists
1313 return exists
1310
1314
1311 def clearambiguoustimes(self, files, now):
1315 def clearambiguoustimes(self, files, now):
1312 for f in files:
1316 for f in files:
1313 e = self.get(f)
1317 e = self.get(f)
1314 if e is not None and e[0] == 'n' and e[3] == now:
1318 if e is not None and e[0] == 'n' and e[3] == now:
1315 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1319 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1316 self.nonnormalset.add(f)
1320 self.nonnormalset.add(f)
1317
1321
1318 def nonnormalentries(self):
1322 def nonnormalentries(self):
1319 '''Compute the nonnormal dirstate entries from the dmap'''
1323 '''Compute the nonnormal dirstate entries from the dmap'''
1320 try:
1324 try:
1321 return parsers.nonnormalotherparententries(self._map)
1325 return parsers.nonnormalotherparententries(self._map)
1322 except AttributeError:
1326 except AttributeError:
1323 nonnorm = set()
1327 nonnorm = set()
1324 otherparent = set()
1328 otherparent = set()
1325 for fname, e in self._map.iteritems():
1329 for fname, e in self._map.iteritems():
1326 if e[0] != 'n' or e[3] == -1:
1330 if e[0] != 'n' or e[3] == -1:
1327 nonnorm.add(fname)
1331 nonnorm.add(fname)
1328 if e[0] == 'n' and e[2] == -2:
1332 if e[0] == 'n' and e[2] == -2:
1329 otherparent.add(fname)
1333 otherparent.add(fname)
1330 return nonnorm, otherparent
1334 return nonnorm, otherparent
1331
1335
1332 @propertycache
1336 @propertycache
1333 def filefoldmap(self):
1337 def filefoldmap(self):
1334 """Returns a dictionary mapping normalized case paths to their
1338 """Returns a dictionary mapping normalized case paths to their
1335 non-normalized versions.
1339 non-normalized versions.
1336 """
1340 """
1337 try:
1341 try:
1338 makefilefoldmap = parsers.make_file_foldmap
1342 makefilefoldmap = parsers.make_file_foldmap
1339 except AttributeError:
1343 except AttributeError:
1340 pass
1344 pass
1341 else:
1345 else:
1342 return makefilefoldmap(self._map, util.normcasespec,
1346 return makefilefoldmap(self._map, util.normcasespec,
1343 util.normcasefallback)
1347 util.normcasefallback)
1344
1348
1345 f = {}
1349 f = {}
1346 normcase = util.normcase
1350 normcase = util.normcase
1347 for name, s in self._map.iteritems():
1351 for name, s in self._map.iteritems():
1348 if s[0] != 'r':
1352 if s[0] != 'r':
1349 f[normcase(name)] = name
1353 f[normcase(name)] = name
1350 f['.'] = '.' # prevents useless util.fspath() invocation
1354 f['.'] = '.' # prevents useless util.fspath() invocation
1351 return f
1355 return f
1352
1356
1353 @propertycache
1357 @propertycache
1354 def dirs(self):
1358 def dirs(self):
1355 """Returns a set-like object containing all the directories in the
1359 """Returns a set-like object containing all the directories in the
1356 current dirstate.
1360 current dirstate.
1357 """
1361 """
1358 return util.dirs(self._map, 'r')
1362 return util.dirs(self._map, 'r')
1359
1363
1360 def _opendirstatefile(self):
1364 def _opendirstatefile(self):
1361 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1365 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1362 if self._pendingmode is not None and self._pendingmode != mode:
1366 if self._pendingmode is not None and self._pendingmode != mode:
1363 fp.close()
1367 fp.close()
1364 raise error.Abort(_('working directory state may be '
1368 raise error.Abort(_('working directory state may be '
1365 'changed parallelly'))
1369 'changed parallelly'))
1366 self._pendingmode = mode
1370 self._pendingmode = mode
1367 return fp
1371 return fp
1368
1372
1369 def parents(self):
1373 def parents(self):
1370 if not self._parents:
1374 if not self._parents:
1371 try:
1375 try:
1372 fp = self._opendirstatefile()
1376 fp = self._opendirstatefile()
1373 st = fp.read(40)
1377 st = fp.read(40)
1374 fp.close()
1378 fp.close()
1375 except IOError as err:
1379 except IOError as err:
1376 if err.errno != errno.ENOENT:
1380 if err.errno != errno.ENOENT:
1377 raise
1381 raise
1378 # File doesn't exist, so the current state is empty
1382 # File doesn't exist, so the current state is empty
1379 st = ''
1383 st = ''
1380
1384
1381 l = len(st)
1385 l = len(st)
1382 if l == 40:
1386 if l == 40:
1383 self._parents = st[:20], st[20:40]
1387 self._parents = st[:20], st[20:40]
1384 elif l == 0:
1388 elif l == 0:
1385 self._parents = [nullid, nullid]
1389 self._parents = [nullid, nullid]
1386 else:
1390 else:
1387 raise error.Abort(_('working directory state appears '
1391 raise error.Abort(_('working directory state appears '
1388 'damaged!'))
1392 'damaged!'))
1389
1393
1390 return self._parents
1394 return self._parents
1391
1395
1392 def setparents(self, p1, p2):
1396 def setparents(self, p1, p2):
1393 self._parents = (p1, p2)
1397 self._parents = (p1, p2)
1394 self._dirtyparents = True
1398 self._dirtyparents = True
1395
1399
1396 def read(self):
1400 def read(self):
1397 # ignore HG_PENDING because identity is used only for writing
1401 # ignore HG_PENDING because identity is used only for writing
1398 self.identity = util.filestat.frompath(
1402 self.identity = util.filestat.frompath(
1399 self._opener.join(self._filename))
1403 self._opener.join(self._filename))
1400
1404
1401 try:
1405 try:
1402 fp = self._opendirstatefile()
1406 fp = self._opendirstatefile()
1403 try:
1407 try:
1404 st = fp.read()
1408 st = fp.read()
1405 finally:
1409 finally:
1406 fp.close()
1410 fp.close()
1407 except IOError as err:
1411 except IOError as err:
1408 if err.errno != errno.ENOENT:
1412 if err.errno != errno.ENOENT:
1409 raise
1413 raise
1410 return
1414 return
1411 if not st:
1415 if not st:
1412 return
1416 return
1413
1417
1414 if util.safehasattr(parsers, 'dict_new_presized'):
1418 if util.safehasattr(parsers, 'dict_new_presized'):
1415 # Make an estimate of the number of files in the dirstate based on
1419 # Make an estimate of the number of files in the dirstate based on
1416 # its size. From a linear regression on a set of real-world repos,
1420 # its size. From a linear regression on a set of real-world repos,
1417 # all over 10,000 files, the size of a dirstate entry is 85
1421 # all over 10,000 files, the size of a dirstate entry is 85
1418 # bytes. The cost of resizing is significantly higher than the cost
1422 # bytes. The cost of resizing is significantly higher than the cost
1419 # of filling in a larger presized dict, so subtract 20% from the
1423 # of filling in a larger presized dict, so subtract 20% from the
1420 # size.
1424 # size.
1421 #
1425 #
1422 # This heuristic is imperfect in many ways, so in a future dirstate
1426 # This heuristic is imperfect in many ways, so in a future dirstate
1423 # format update it makes sense to just record the number of entries
1427 # format update it makes sense to just record the number of entries
1424 # on write.
1428 # on write.
1425 self._map = parsers.dict_new_presized(len(st) / 71)
1429 self._map = parsers.dict_new_presized(len(st) / 71)
1426
1430
1427 # Python's garbage collector triggers a GC each time a certain number
1431 # Python's garbage collector triggers a GC each time a certain number
1428 # of container objects (the number being defined by
1432 # of container objects (the number being defined by
1429 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1433 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1430 # for each file in the dirstate. The C version then immediately marks
1434 # for each file in the dirstate. The C version then immediately marks
1431 # them as not to be tracked by the collector. However, this has no
1435 # them as not to be tracked by the collector. However, this has no
1432 # effect on when GCs are triggered, only on what objects the GC looks
1436 # effect on when GCs are triggered, only on what objects the GC looks
1433 # into. This means that O(number of files) GCs are unavoidable.
1437 # into. This means that O(number of files) GCs are unavoidable.
1434 # Depending on when in the process's lifetime the dirstate is parsed,
1438 # Depending on when in the process's lifetime the dirstate is parsed,
1435 # this can get very expensive. As a workaround, disable GC while
1439 # this can get very expensive. As a workaround, disable GC while
1436 # parsing the dirstate.
1440 # parsing the dirstate.
1437 #
1441 #
1438 # (we cannot decorate the function directly since it is in a C module)
1442 # (we cannot decorate the function directly since it is in a C module)
1439 parse_dirstate = util.nogc(parsers.parse_dirstate)
1443 parse_dirstate = util.nogc(parsers.parse_dirstate)
1440 p = parse_dirstate(self._map, self.copymap, st)
1444 p = parse_dirstate(self._map, self.copymap, st)
1441 if not self._dirtyparents:
1445 if not self._dirtyparents:
1442 self.setparents(*p)
1446 self.setparents(*p)
1443
1447
1444 # Avoid excess attribute lookups by fast pathing certain checks
1448 # Avoid excess attribute lookups by fast pathing certain checks
1445 self.__contains__ = self._map.__contains__
1449 self.__contains__ = self._map.__contains__
1446 self.__getitem__ = self._map.__getitem__
1450 self.__getitem__ = self._map.__getitem__
1447 self.get = self._map.get
1451 self.get = self._map.get
1448
1452
1449 def write(self, st, now):
1453 def write(self, st, now):
1450 st.write(parsers.pack_dirstate(self._map, self.copymap,
1454 st.write(parsers.pack_dirstate(self._map, self.copymap,
1451 self.parents(), now))
1455 self.parents(), now))
1452 st.close()
1456 st.close()
1453 self._dirtyparents = False
1457 self._dirtyparents = False
1454 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1458 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1455
1459
1456 @propertycache
1460 @propertycache
1457 def nonnormalset(self):
1461 def nonnormalset(self):
1458 nonnorm, otherparents = self.nonnormalentries()
1462 nonnorm, otherparents = self.nonnormalentries()
1459 self.otherparentset = otherparents
1463 self.otherparentset = otherparents
1460 return nonnorm
1464 return nonnorm
1461
1465
1462 @propertycache
1466 @propertycache
1463 def otherparentset(self):
1467 def otherparentset(self):
1464 nonnorm, otherparents = self.nonnormalentries()
1468 nonnorm, otherparents = self.nonnormalentries()
1465 self.nonnormalset = nonnorm
1469 self.nonnormalset = nonnorm
1466 return otherparents
1470 return otherparents
1467
1471
1468 @propertycache
1472 @propertycache
1469 def identity(self):
1473 def identity(self):
1470 self._map
1474 self._map
1471 return self.identity
1475 return self.identity
1472
1476
1473 @propertycache
1477 @propertycache
1474 def dirfoldmap(self):
1478 def dirfoldmap(self):
1475 f = {}
1479 f = {}
1476 normcase = util.normcase
1480 normcase = util.normcase
1477 for name in self.dirs:
1481 for name in self.dirs:
1478 f[normcase(name)] = name
1482 f[normcase(name)] = name
1479 return f
1483 return f
General Comments 0
You need to be logged in to leave comments. Login now