##// END OF EJS Templates
dirstate: don't remove normallookup files from nonnormalset...
Mark Thomas -
r35023:1664dc7c default
parent child Browse files
Show More
@@ -1,1429 +1,1427
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._map = dirstatemap(self._ui, self._opener, self._root)
132 self._map = dirstatemap(self._ui, self._opener, self._root)
133 return self._map
133 return self._map
134
134
135 @property
135 @property
136 def _sparsematcher(self):
136 def _sparsematcher(self):
137 """The matcher for the sparse checkout.
137 """The matcher for the sparse checkout.
138
138
139 The working directory may not include every file from a manifest. The
139 The working directory may not include every file from a manifest. The
140 matcher obtained by this property will match a path if it is to be
140 matcher obtained by this property will match a path if it is to be
141 included in the working directory.
141 included in the working directory.
142 """
142 """
143 # TODO there is potential to cache this property. For now, the matcher
143 # TODO there is potential to cache this property. For now, the matcher
144 # is resolved on every access. (But the called function does use a
144 # is resolved on every access. (But the called function does use a
145 # cache to keep the lookup fast.)
145 # cache to keep the lookup fast.)
146 return self._sparsematchfn()
146 return self._sparsematchfn()
147
147
148 @repocache('branch')
148 @repocache('branch')
149 def _branch(self):
149 def _branch(self):
150 try:
150 try:
151 return self._opener.read("branch").strip() or "default"
151 return self._opener.read("branch").strip() or "default"
152 except IOError as inst:
152 except IOError as inst:
153 if inst.errno != errno.ENOENT:
153 if inst.errno != errno.ENOENT:
154 raise
154 raise
155 return "default"
155 return "default"
156
156
157 @property
157 @property
158 def _pl(self):
158 def _pl(self):
159 return self._map.parents()
159 return self._map.parents()
160
160
161 def dirs(self):
161 def dirs(self):
162 return self._map.dirs
162 return self._map.dirs
163
163
164 @rootcache('.hgignore')
164 @rootcache('.hgignore')
165 def _ignore(self):
165 def _ignore(self):
166 files = self._ignorefiles()
166 files = self._ignorefiles()
167 if not files:
167 if not files:
168 return matchmod.never(self._root, '')
168 return matchmod.never(self._root, '')
169
169
170 pats = ['include:%s' % f for f in files]
170 pats = ['include:%s' % f for f in files]
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
172
172
173 @propertycache
173 @propertycache
174 def _slash(self):
174 def _slash(self):
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
176
176
177 @propertycache
177 @propertycache
178 def _checklink(self):
178 def _checklink(self):
179 return util.checklink(self._root)
179 return util.checklink(self._root)
180
180
181 @propertycache
181 @propertycache
182 def _checkexec(self):
182 def _checkexec(self):
183 return util.checkexec(self._root)
183 return util.checkexec(self._root)
184
184
185 @propertycache
185 @propertycache
186 def _checkcase(self):
186 def _checkcase(self):
187 return not util.fscasesensitive(self._join('.hg'))
187 return not util.fscasesensitive(self._join('.hg'))
188
188
189 def _join(self, f):
189 def _join(self, f):
190 # much faster than os.path.join()
190 # much faster than os.path.join()
191 # it's safe because f is always a relative path
191 # it's safe because f is always a relative path
192 return self._rootdir + f
192 return self._rootdir + f
193
193
194 def flagfunc(self, buildfallback):
194 def flagfunc(self, buildfallback):
195 if self._checklink and self._checkexec:
195 if self._checklink and self._checkexec:
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return 'l'
200 return 'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return 'x'
202 return 'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return ''
205 return ''
206 return f
206 return f
207
207
208 fallback = buildfallback()
208 fallback = buildfallback()
209 if self._checklink:
209 if self._checklink:
210 def f(x):
210 def f(x):
211 if os.path.islink(self._join(x)):
211 if os.path.islink(self._join(x)):
212 return 'l'
212 return 'l'
213 if 'x' in fallback(x):
213 if 'x' in fallback(x):
214 return 'x'
214 return 'x'
215 return ''
215 return ''
216 return f
216 return f
217 if self._checkexec:
217 if self._checkexec:
218 def f(x):
218 def f(x):
219 if 'l' in fallback(x):
219 if 'l' in fallback(x):
220 return 'l'
220 return 'l'
221 if util.isexec(self._join(x)):
221 if util.isexec(self._join(x)):
222 return 'x'
222 return 'x'
223 return ''
223 return ''
224 return f
224 return f
225 else:
225 else:
226 return fallback
226 return fallback
227
227
228 @propertycache
228 @propertycache
229 def _cwd(self):
229 def _cwd(self):
230 # internal config: ui.forcecwd
230 # internal config: ui.forcecwd
231 forcecwd = self._ui.config('ui', 'forcecwd')
231 forcecwd = self._ui.config('ui', 'forcecwd')
232 if forcecwd:
232 if forcecwd:
233 return forcecwd
233 return forcecwd
234 return pycompat.getcwd()
234 return pycompat.getcwd()
235
235
236 def getcwd(self):
236 def getcwd(self):
237 '''Return the path from which a canonical path is calculated.
237 '''Return the path from which a canonical path is calculated.
238
238
239 This path should be used to resolve file patterns or to convert
239 This path should be used to resolve file patterns or to convert
240 canonical paths back to file paths for display. It shouldn't be
240 canonical paths back to file paths for display. It shouldn't be
241 used to get real file paths. Use vfs functions instead.
241 used to get real file paths. Use vfs functions instead.
242 '''
242 '''
243 cwd = self._cwd
243 cwd = self._cwd
244 if cwd == self._root:
244 if cwd == self._root:
245 return ''
245 return ''
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
247 rootsep = self._root
247 rootsep = self._root
248 if not util.endswithsep(rootsep):
248 if not util.endswithsep(rootsep):
249 rootsep += pycompat.ossep
249 rootsep += pycompat.ossep
250 if cwd.startswith(rootsep):
250 if cwd.startswith(rootsep):
251 return cwd[len(rootsep):]
251 return cwd[len(rootsep):]
252 else:
252 else:
253 # we're outside the repo. return an absolute path.
253 # we're outside the repo. return an absolute path.
254 return cwd
254 return cwd
255
255
256 def pathto(self, f, cwd=None):
256 def pathto(self, f, cwd=None):
257 if cwd is None:
257 if cwd is None:
258 cwd = self.getcwd()
258 cwd = self.getcwd()
259 path = util.pathto(self._root, cwd, f)
259 path = util.pathto(self._root, cwd, f)
260 if self._slash:
260 if self._slash:
261 return util.pconvert(path)
261 return util.pconvert(path)
262 return path
262 return path
263
263
264 def __getitem__(self, key):
264 def __getitem__(self, key):
265 '''Return the current state of key (a filename) in the dirstate.
265 '''Return the current state of key (a filename) in the dirstate.
266
266
267 States are:
267 States are:
268 n normal
268 n normal
269 m needs merging
269 m needs merging
270 r marked for removal
270 r marked for removal
271 a marked for addition
271 a marked for addition
272 ? not tracked
272 ? not tracked
273 '''
273 '''
274 return self._map.get(key, ("?",))[0]
274 return self._map.get(key, ("?",))[0]
275
275
276 def __contains__(self, key):
276 def __contains__(self, key):
277 return key in self._map
277 return key in self._map
278
278
279 def __iter__(self):
279 def __iter__(self):
280 return iter(sorted(self._map))
280 return iter(sorted(self._map))
281
281
282 def items(self):
282 def items(self):
283 return self._map.iteritems()
283 return self._map.iteritems()
284
284
285 iteritems = items
285 iteritems = items
286
286
287 def parents(self):
287 def parents(self):
288 return [self._validate(p) for p in self._pl]
288 return [self._validate(p) for p in self._pl]
289
289
290 def p1(self):
290 def p1(self):
291 return self._validate(self._pl[0])
291 return self._validate(self._pl[0])
292
292
293 def p2(self):
293 def p2(self):
294 return self._validate(self._pl[1])
294 return self._validate(self._pl[1])
295
295
296 def branch(self):
296 def branch(self):
297 return encoding.tolocal(self._branch)
297 return encoding.tolocal(self._branch)
298
298
299 def setparents(self, p1, p2=nullid):
299 def setparents(self, p1, p2=nullid):
300 """Set dirstate parents to p1 and p2.
300 """Set dirstate parents to p1 and p2.
301
301
302 When moving from two parents to one, 'm' merged entries a
302 When moving from two parents to one, 'm' merged entries a
303 adjusted to normal and previous copy records discarded and
303 adjusted to normal and previous copy records discarded and
304 returned by the call.
304 returned by the call.
305
305
306 See localrepo.setparents()
306 See localrepo.setparents()
307 """
307 """
308 if self._parentwriters == 0:
308 if self._parentwriters == 0:
309 raise ValueError("cannot set dirstate parent without "
309 raise ValueError("cannot set dirstate parent without "
310 "calling dirstate.beginparentchange")
310 "calling dirstate.beginparentchange")
311
311
312 self._dirty = True
312 self._dirty = True
313 oldp2 = self._pl[1]
313 oldp2 = self._pl[1]
314 if self._origpl is None:
314 if self._origpl is None:
315 self._origpl = self._pl
315 self._origpl = self._pl
316 self._map.setparents(p1, p2)
316 self._map.setparents(p1, p2)
317 copies = {}
317 copies = {}
318 if oldp2 != nullid and p2 == nullid:
318 if oldp2 != nullid and p2 == nullid:
319 candidatefiles = self._map.nonnormalset.union(
319 candidatefiles = self._map.nonnormalset.union(
320 self._map.otherparentset)
320 self._map.otherparentset)
321 for f in candidatefiles:
321 for f in candidatefiles:
322 s = self._map.get(f)
322 s = self._map.get(f)
323 if s is None:
323 if s is None:
324 continue
324 continue
325
325
326 # Discard 'm' markers when moving away from a merge state
326 # Discard 'm' markers when moving away from a merge state
327 if s[0] == 'm':
327 if s[0] == 'm':
328 source = self._map.copymap.get(f)
328 source = self._map.copymap.get(f)
329 if source:
329 if source:
330 copies[f] = source
330 copies[f] = source
331 self.normallookup(f)
331 self.normallookup(f)
332 # Also fix up otherparent markers
332 # Also fix up otherparent markers
333 elif s[0] == 'n' and s[2] == -2:
333 elif s[0] == 'n' and s[2] == -2:
334 source = self._map.copymap.get(f)
334 source = self._map.copymap.get(f)
335 if source:
335 if source:
336 copies[f] = source
336 copies[f] = source
337 self.add(f)
337 self.add(f)
338 return copies
338 return copies
339
339
340 def setbranch(self, branch):
340 def setbranch(self, branch):
341 self._branch = encoding.fromlocal(branch)
341 self._branch = encoding.fromlocal(branch)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
343 try:
343 try:
344 f.write(self._branch + '\n')
344 f.write(self._branch + '\n')
345 f.close()
345 f.close()
346
346
347 # make sure filecache has the correct stat info for _branch after
347 # make sure filecache has the correct stat info for _branch after
348 # replacing the underlying file
348 # replacing the underlying file
349 ce = self._filecache['_branch']
349 ce = self._filecache['_branch']
350 if ce:
350 if ce:
351 ce.refresh()
351 ce.refresh()
352 except: # re-raises
352 except: # re-raises
353 f.discard()
353 f.discard()
354 raise
354 raise
355
355
356 def invalidate(self):
356 def invalidate(self):
357 '''Causes the next access to reread the dirstate.
357 '''Causes the next access to reread the dirstate.
358
358
359 This is different from localrepo.invalidatedirstate() because it always
359 This is different from localrepo.invalidatedirstate() because it always
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
361 check whether the dirstate has changed before rereading it.'''
361 check whether the dirstate has changed before rereading it.'''
362
362
363 for a in ("_map", "_branch", "_ignore"):
363 for a in ("_map", "_branch", "_ignore"):
364 if a in self.__dict__:
364 if a in self.__dict__:
365 delattr(self, a)
365 delattr(self, a)
366 self._lastnormaltime = 0
366 self._lastnormaltime = 0
367 self._dirty = False
367 self._dirty = False
368 self._updatedfiles.clear()
368 self._updatedfiles.clear()
369 self._parentwriters = 0
369 self._parentwriters = 0
370 self._origpl = None
370 self._origpl = None
371
371
372 def copy(self, source, dest):
372 def copy(self, source, dest):
373 """Mark dest as a copy of source. Unmark dest if source is None."""
373 """Mark dest as a copy of source. Unmark dest if source is None."""
374 if source == dest:
374 if source == dest:
375 return
375 return
376 self._dirty = True
376 self._dirty = True
377 if source is not None:
377 if source is not None:
378 self._map.copymap[dest] = source
378 self._map.copymap[dest] = source
379 self._updatedfiles.add(source)
379 self._updatedfiles.add(source)
380 self._updatedfiles.add(dest)
380 self._updatedfiles.add(dest)
381 elif self._map.copymap.pop(dest, None):
381 elif self._map.copymap.pop(dest, None):
382 self._updatedfiles.add(dest)
382 self._updatedfiles.add(dest)
383
383
384 def copied(self, file):
384 def copied(self, file):
385 return self._map.copymap.get(file, None)
385 return self._map.copymap.get(file, None)
386
386
387 def copies(self):
387 def copies(self):
388 return self._map.copymap
388 return self._map.copymap
389
389
390 def _droppath(self, f):
390 def _droppath(self, f):
391 if self[f] not in "?r" and "dirs" in self._map.__dict__:
391 if self[f] not in "?r" and "dirs" in self._map.__dict__:
392 self._map.dirs.delpath(f)
392 self._map.dirs.delpath(f)
393
393
394 if "filefoldmap" in self._map.__dict__:
394 if "filefoldmap" in self._map.__dict__:
395 normed = util.normcase(f)
395 normed = util.normcase(f)
396 if normed in self._map.filefoldmap:
396 if normed in self._map.filefoldmap:
397 del self._map.filefoldmap[normed]
397 del self._map.filefoldmap[normed]
398
398
399 self._updatedfiles.add(f)
399 self._updatedfiles.add(f)
400
400
401 def _addpath(self, f, state, mode, size, mtime):
401 def _addpath(self, f, state, mode, size, mtime):
402 oldstate = self[f]
402 oldstate = self[f]
403 if state == 'a' or oldstate == 'r':
403 if state == 'a' or oldstate == 'r':
404 scmutil.checkfilename(f)
404 scmutil.checkfilename(f)
405 if f in self._map.dirs:
405 if f in self._map.dirs:
406 raise error.Abort(_('directory %r already in dirstate') % f)
406 raise error.Abort(_('directory %r already in dirstate') % f)
407 # shadows
407 # shadows
408 for d in util.finddirs(f):
408 for d in util.finddirs(f):
409 if d in self._map.dirs:
409 if d in self._map.dirs:
410 break
410 break
411 entry = self._map.get(d)
411 entry = self._map.get(d)
412 if entry is not None and entry[0] != 'r':
412 if entry is not None and entry[0] != 'r':
413 raise error.Abort(
413 raise error.Abort(
414 _('file %r in dirstate clashes with %r') % (d, f))
414 _('file %r in dirstate clashes with %r') % (d, f))
415 if oldstate in "?r" and "dirs" in self._map.__dict__:
415 if oldstate in "?r" and "dirs" in self._map.__dict__:
416 self._map.dirs.addpath(f)
416 self._map.dirs.addpath(f)
417 self._dirty = True
417 self._dirty = True
418 self._updatedfiles.add(f)
418 self._updatedfiles.add(f)
419 self._map[f] = dirstatetuple(state, mode, size, mtime)
419 self._map[f] = dirstatetuple(state, mode, size, mtime)
420 if state != 'n' or mtime == -1:
420 if state != 'n' or mtime == -1:
421 self._map.nonnormalset.add(f)
421 self._map.nonnormalset.add(f)
422 if size == -2:
422 if size == -2:
423 self._map.otherparentset.add(f)
423 self._map.otherparentset.add(f)
424
424
425 def normal(self, f):
425 def normal(self, f):
426 '''Mark a file normal and clean.'''
426 '''Mark a file normal and clean.'''
427 s = os.lstat(self._join(f))
427 s = os.lstat(self._join(f))
428 mtime = s.st_mtime
428 mtime = s.st_mtime
429 self._addpath(f, 'n', s.st_mode,
429 self._addpath(f, 'n', s.st_mode,
430 s.st_size & _rangemask, mtime & _rangemask)
430 s.st_size & _rangemask, mtime & _rangemask)
431 self._map.copymap.pop(f, None)
431 self._map.copymap.pop(f, None)
432 if f in self._map.nonnormalset:
432 if f in self._map.nonnormalset:
433 self._map.nonnormalset.remove(f)
433 self._map.nonnormalset.remove(f)
434 if mtime > self._lastnormaltime:
434 if mtime > self._lastnormaltime:
435 # Remember the most recent modification timeslot for status(),
435 # Remember the most recent modification timeslot for status(),
436 # to make sure we won't miss future size-preserving file content
436 # to make sure we won't miss future size-preserving file content
437 # modifications that happen within the same timeslot.
437 # modifications that happen within the same timeslot.
438 self._lastnormaltime = mtime
438 self._lastnormaltime = mtime
439
439
440 def normallookup(self, f):
440 def normallookup(self, f):
441 '''Mark a file normal, but possibly dirty.'''
441 '''Mark a file normal, but possibly dirty.'''
442 if self._pl[1] != nullid:
442 if self._pl[1] != nullid:
443 # if there is a merge going on and the file was either
443 # if there is a merge going on and the file was either
444 # in state 'm' (-1) or coming from other parent (-2) before
444 # in state 'm' (-1) or coming from other parent (-2) before
445 # being removed, restore that state.
445 # being removed, restore that state.
446 entry = self._map.get(f)
446 entry = self._map.get(f)
447 if entry is not None:
447 if entry is not None:
448 if entry[0] == 'r' and entry[2] in (-1, -2):
448 if entry[0] == 'r' and entry[2] in (-1, -2):
449 source = self._map.copymap.get(f)
449 source = self._map.copymap.get(f)
450 if entry[2] == -1:
450 if entry[2] == -1:
451 self.merge(f)
451 self.merge(f)
452 elif entry[2] == -2:
452 elif entry[2] == -2:
453 self.otherparent(f)
453 self.otherparent(f)
454 if source:
454 if source:
455 self.copy(source, f)
455 self.copy(source, f)
456 return
456 return
457 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
457 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
458 return
458 return
459 self._addpath(f, 'n', 0, -1, -1)
459 self._addpath(f, 'n', 0, -1, -1)
460 self._map.copymap.pop(f, None)
460 self._map.copymap.pop(f, None)
461 if f in self._map.nonnormalset:
462 self._map.nonnormalset.remove(f)
463
461
464 def otherparent(self, f):
462 def otherparent(self, f):
465 '''Mark as coming from the other parent, always dirty.'''
463 '''Mark as coming from the other parent, always dirty.'''
466 if self._pl[1] == nullid:
464 if self._pl[1] == nullid:
467 raise error.Abort(_("setting %r to other parent "
465 raise error.Abort(_("setting %r to other parent "
468 "only allowed in merges") % f)
466 "only allowed in merges") % f)
469 if f in self and self[f] == 'n':
467 if f in self and self[f] == 'n':
470 # merge-like
468 # merge-like
471 self._addpath(f, 'm', 0, -2, -1)
469 self._addpath(f, 'm', 0, -2, -1)
472 else:
470 else:
473 # add-like
471 # add-like
474 self._addpath(f, 'n', 0, -2, -1)
472 self._addpath(f, 'n', 0, -2, -1)
475 self._map.copymap.pop(f, None)
473 self._map.copymap.pop(f, None)
476
474
477 def add(self, f):
475 def add(self, f):
478 '''Mark a file added.'''
476 '''Mark a file added.'''
479 self._addpath(f, 'a', 0, -1, -1)
477 self._addpath(f, 'a', 0, -1, -1)
480 self._map.copymap.pop(f, None)
478 self._map.copymap.pop(f, None)
481
479
482 def remove(self, f):
480 def remove(self, f):
483 '''Mark a file removed.'''
481 '''Mark a file removed.'''
484 self._dirty = True
482 self._dirty = True
485 self._droppath(f)
483 self._droppath(f)
486 size = 0
484 size = 0
487 if self._pl[1] != nullid:
485 if self._pl[1] != nullid:
488 entry = self._map.get(f)
486 entry = self._map.get(f)
489 if entry is not None:
487 if entry is not None:
490 # backup the previous state
488 # backup the previous state
491 if entry[0] == 'm': # merge
489 if entry[0] == 'm': # merge
492 size = -1
490 size = -1
493 elif entry[0] == 'n' and entry[2] == -2: # other parent
491 elif entry[0] == 'n' and entry[2] == -2: # other parent
494 size = -2
492 size = -2
495 self._map.otherparentset.add(f)
493 self._map.otherparentset.add(f)
496 self._map[f] = dirstatetuple('r', 0, size, 0)
494 self._map[f] = dirstatetuple('r', 0, size, 0)
497 self._map.nonnormalset.add(f)
495 self._map.nonnormalset.add(f)
498 if size == 0:
496 if size == 0:
499 self._map.copymap.pop(f, None)
497 self._map.copymap.pop(f, None)
500
498
501 def merge(self, f):
499 def merge(self, f):
502 '''Mark a file merged.'''
500 '''Mark a file merged.'''
503 if self._pl[1] == nullid:
501 if self._pl[1] == nullid:
504 return self.normallookup(f)
502 return self.normallookup(f)
505 return self.otherparent(f)
503 return self.otherparent(f)
506
504
507 def drop(self, f):
505 def drop(self, f):
508 '''Drop a file from the dirstate'''
506 '''Drop a file from the dirstate'''
509 if f in self._map:
507 if f in self._map:
510 self._dirty = True
508 self._dirty = True
511 self._droppath(f)
509 self._droppath(f)
512 del self._map[f]
510 del self._map[f]
513 if f in self._map.nonnormalset:
511 if f in self._map.nonnormalset:
514 self._map.nonnormalset.remove(f)
512 self._map.nonnormalset.remove(f)
515 self._map.copymap.pop(f, None)
513 self._map.copymap.pop(f, None)
516
514
517 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
515 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
518 if exists is None:
516 if exists is None:
519 exists = os.path.lexists(os.path.join(self._root, path))
517 exists = os.path.lexists(os.path.join(self._root, path))
520 if not exists:
518 if not exists:
521 # Maybe a path component exists
519 # Maybe a path component exists
522 if not ignoremissing and '/' in path:
520 if not ignoremissing and '/' in path:
523 d, f = path.rsplit('/', 1)
521 d, f = path.rsplit('/', 1)
524 d = self._normalize(d, False, ignoremissing, None)
522 d = self._normalize(d, False, ignoremissing, None)
525 folded = d + "/" + f
523 folded = d + "/" + f
526 else:
524 else:
527 # No path components, preserve original case
525 # No path components, preserve original case
528 folded = path
526 folded = path
529 else:
527 else:
530 # recursively normalize leading directory components
528 # recursively normalize leading directory components
531 # against dirstate
529 # against dirstate
532 if '/' in normed:
530 if '/' in normed:
533 d, f = normed.rsplit('/', 1)
531 d, f = normed.rsplit('/', 1)
534 d = self._normalize(d, False, ignoremissing, True)
532 d = self._normalize(d, False, ignoremissing, True)
535 r = self._root + "/" + d
533 r = self._root + "/" + d
536 folded = d + "/" + util.fspath(f, r)
534 folded = d + "/" + util.fspath(f, r)
537 else:
535 else:
538 folded = util.fspath(normed, self._root)
536 folded = util.fspath(normed, self._root)
539 storemap[normed] = folded
537 storemap[normed] = folded
540
538
541 return folded
539 return folded
542
540
543 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
541 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
544 normed = util.normcase(path)
542 normed = util.normcase(path)
545 folded = self._map.filefoldmap.get(normed, None)
543 folded = self._map.filefoldmap.get(normed, None)
546 if folded is None:
544 if folded is None:
547 if isknown:
545 if isknown:
548 folded = path
546 folded = path
549 else:
547 else:
550 folded = self._discoverpath(path, normed, ignoremissing, exists,
548 folded = self._discoverpath(path, normed, ignoremissing, exists,
551 self._map.filefoldmap)
549 self._map.filefoldmap)
552 return folded
550 return folded
553
551
554 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
552 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
555 normed = util.normcase(path)
553 normed = util.normcase(path)
556 folded = self._map.filefoldmap.get(normed, None)
554 folded = self._map.filefoldmap.get(normed, None)
557 if folded is None:
555 if folded is None:
558 folded = self._map.dirfoldmap.get(normed, None)
556 folded = self._map.dirfoldmap.get(normed, None)
559 if folded is None:
557 if folded is None:
560 if isknown:
558 if isknown:
561 folded = path
559 folded = path
562 else:
560 else:
563 # store discovered result in dirfoldmap so that future
561 # store discovered result in dirfoldmap so that future
564 # normalizefile calls don't start matching directories
562 # normalizefile calls don't start matching directories
565 folded = self._discoverpath(path, normed, ignoremissing, exists,
563 folded = self._discoverpath(path, normed, ignoremissing, exists,
566 self._map.dirfoldmap)
564 self._map.dirfoldmap)
567 return folded
565 return folded
568
566
569 def normalize(self, path, isknown=False, ignoremissing=False):
567 def normalize(self, path, isknown=False, ignoremissing=False):
570 '''
568 '''
571 normalize the case of a pathname when on a casefolding filesystem
569 normalize the case of a pathname when on a casefolding filesystem
572
570
573 isknown specifies whether the filename came from walking the
571 isknown specifies whether the filename came from walking the
574 disk, to avoid extra filesystem access.
572 disk, to avoid extra filesystem access.
575
573
576 If ignoremissing is True, missing path are returned
574 If ignoremissing is True, missing path are returned
577 unchanged. Otherwise, we try harder to normalize possibly
575 unchanged. Otherwise, we try harder to normalize possibly
578 existing path components.
576 existing path components.
579
577
580 The normalized case is determined based on the following precedence:
578 The normalized case is determined based on the following precedence:
581
579
582 - version of name already stored in the dirstate
580 - version of name already stored in the dirstate
583 - version of name stored on disk
581 - version of name stored on disk
584 - version provided via command arguments
582 - version provided via command arguments
585 '''
583 '''
586
584
587 if self._checkcase:
585 if self._checkcase:
588 return self._normalize(path, isknown, ignoremissing)
586 return self._normalize(path, isknown, ignoremissing)
589 return path
587 return path
590
588
591 def clear(self):
589 def clear(self):
592 self._map.clear()
590 self._map.clear()
593 self._lastnormaltime = 0
591 self._lastnormaltime = 0
594 self._updatedfiles.clear()
592 self._updatedfiles.clear()
595 self._dirty = True
593 self._dirty = True
596
594
597 def rebuild(self, parent, allfiles, changedfiles=None):
595 def rebuild(self, parent, allfiles, changedfiles=None):
598 if changedfiles is None:
596 if changedfiles is None:
599 # Rebuild entire dirstate
597 # Rebuild entire dirstate
600 changedfiles = allfiles
598 changedfiles = allfiles
601 lastnormaltime = self._lastnormaltime
599 lastnormaltime = self._lastnormaltime
602 self.clear()
600 self.clear()
603 self._lastnormaltime = lastnormaltime
601 self._lastnormaltime = lastnormaltime
604
602
605 if self._origpl is None:
603 if self._origpl is None:
606 self._origpl = self._pl
604 self._origpl = self._pl
607 self._map.setparents(parent, nullid)
605 self._map.setparents(parent, nullid)
608 for f in changedfiles:
606 for f in changedfiles:
609 if f in allfiles:
607 if f in allfiles:
610 self.normallookup(f)
608 self.normallookup(f)
611 else:
609 else:
612 self.drop(f)
610 self.drop(f)
613
611
614 self._dirty = True
612 self._dirty = True
615
613
616 def identity(self):
614 def identity(self):
617 '''Return identity of dirstate itself to detect changing in storage
615 '''Return identity of dirstate itself to detect changing in storage
618
616
619 If identity of previous dirstate is equal to this, writing
617 If identity of previous dirstate is equal to this, writing
620 changes based on the former dirstate out can keep consistency.
618 changes based on the former dirstate out can keep consistency.
621 '''
619 '''
622 return self._map.identity
620 return self._map.identity
623
621
624 def write(self, tr):
622 def write(self, tr):
625 if not self._dirty:
623 if not self._dirty:
626 return
624 return
627
625
628 filename = self._filename
626 filename = self._filename
629 if tr:
627 if tr:
630 # 'dirstate.write()' is not only for writing in-memory
628 # 'dirstate.write()' is not only for writing in-memory
631 # changes out, but also for dropping ambiguous timestamp.
629 # changes out, but also for dropping ambiguous timestamp.
632 # delayed writing re-raise "ambiguous timestamp issue".
630 # delayed writing re-raise "ambiguous timestamp issue".
633 # See also the wiki page below for detail:
631 # See also the wiki page below for detail:
634 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
632 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
635
633
636 # emulate dropping timestamp in 'parsers.pack_dirstate'
634 # emulate dropping timestamp in 'parsers.pack_dirstate'
637 now = _getfsnow(self._opener)
635 now = _getfsnow(self._opener)
638 dmap = self._map
636 dmap = self._map
639 for f in self._updatedfiles:
637 for f in self._updatedfiles:
640 e = dmap.get(f)
638 e = dmap.get(f)
641 if e is not None and e[0] == 'n' and e[3] == now:
639 if e is not None and e[0] == 'n' and e[3] == now:
642 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
640 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
643 self._map.nonnormalset.add(f)
641 self._map.nonnormalset.add(f)
644
642
645 # emulate that all 'dirstate.normal' results are written out
643 # emulate that all 'dirstate.normal' results are written out
646 self._lastnormaltime = 0
644 self._lastnormaltime = 0
647 self._updatedfiles.clear()
645 self._updatedfiles.clear()
648
646
649 # delay writing in-memory changes out
647 # delay writing in-memory changes out
650 tr.addfilegenerator('dirstate', (self._filename,),
648 tr.addfilegenerator('dirstate', (self._filename,),
651 self._writedirstate, location='plain')
649 self._writedirstate, location='plain')
652 return
650 return
653
651
654 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
652 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
655 self._writedirstate(st)
653 self._writedirstate(st)
656
654
657 def addparentchangecallback(self, category, callback):
655 def addparentchangecallback(self, category, callback):
658 """add a callback to be called when the wd parents are changed
656 """add a callback to be called when the wd parents are changed
659
657
660 Callback will be called with the following arguments:
658 Callback will be called with the following arguments:
661 dirstate, (oldp1, oldp2), (newp1, newp2)
659 dirstate, (oldp1, oldp2), (newp1, newp2)
662
660
663 Category is a unique identifier to allow overwriting an old callback
661 Category is a unique identifier to allow overwriting an old callback
664 with a newer callback.
662 with a newer callback.
665 """
663 """
666 self._plchangecallbacks[category] = callback
664 self._plchangecallbacks[category] = callback
667
665
668 def _writedirstate(self, st):
666 def _writedirstate(self, st):
669 # notify callbacks about parents change
667 # notify callbacks about parents change
670 if self._origpl is not None and self._origpl != self._pl:
668 if self._origpl is not None and self._origpl != self._pl:
671 for c, callback in sorted(self._plchangecallbacks.iteritems()):
669 for c, callback in sorted(self._plchangecallbacks.iteritems()):
672 callback(self, self._origpl, self._pl)
670 callback(self, self._origpl, self._pl)
673 self._origpl = None
671 self._origpl = None
674 # use the modification time of the newly created temporary file as the
672 # use the modification time of the newly created temporary file as the
675 # filesystem's notion of 'now'
673 # filesystem's notion of 'now'
676 now = util.fstat(st).st_mtime & _rangemask
674 now = util.fstat(st).st_mtime & _rangemask
677
675
678 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
676 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
679 # timestamp of each entries in dirstate, because of 'now > mtime'
677 # timestamp of each entries in dirstate, because of 'now > mtime'
680 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
678 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
681 if delaywrite > 0:
679 if delaywrite > 0:
682 # do we have any files to delay for?
680 # do we have any files to delay for?
683 for f, e in self._map.iteritems():
681 for f, e in self._map.iteritems():
684 if e[0] == 'n' and e[3] == now:
682 if e[0] == 'n' and e[3] == now:
685 import time # to avoid useless import
683 import time # to avoid useless import
686 # rather than sleep n seconds, sleep until the next
684 # rather than sleep n seconds, sleep until the next
687 # multiple of n seconds
685 # multiple of n seconds
688 clock = time.time()
686 clock = time.time()
689 start = int(clock) - (int(clock) % delaywrite)
687 start = int(clock) - (int(clock) % delaywrite)
690 end = start + delaywrite
688 end = start + delaywrite
691 time.sleep(end - clock)
689 time.sleep(end - clock)
692 now = end # trust our estimate that the end is near now
690 now = end # trust our estimate that the end is near now
693 break
691 break
694
692
695 self._map.write(st, now)
693 self._map.write(st, now)
696 self._lastnormaltime = 0
694 self._lastnormaltime = 0
697 self._dirty = False
695 self._dirty = False
698
696
699 def _dirignore(self, f):
697 def _dirignore(self, f):
700 if f == '.':
698 if f == '.':
701 return False
699 return False
702 if self._ignore(f):
700 if self._ignore(f):
703 return True
701 return True
704 for p in util.finddirs(f):
702 for p in util.finddirs(f):
705 if self._ignore(p):
703 if self._ignore(p):
706 return True
704 return True
707 return False
705 return False
708
706
709 def _ignorefiles(self):
707 def _ignorefiles(self):
710 files = []
708 files = []
711 if os.path.exists(self._join('.hgignore')):
709 if os.path.exists(self._join('.hgignore')):
712 files.append(self._join('.hgignore'))
710 files.append(self._join('.hgignore'))
713 for name, path in self._ui.configitems("ui"):
711 for name, path in self._ui.configitems("ui"):
714 if name == 'ignore' or name.startswith('ignore.'):
712 if name == 'ignore' or name.startswith('ignore.'):
715 # we need to use os.path.join here rather than self._join
713 # we need to use os.path.join here rather than self._join
716 # because path is arbitrary and user-specified
714 # because path is arbitrary and user-specified
717 files.append(os.path.join(self._rootdir, util.expandpath(path)))
715 files.append(os.path.join(self._rootdir, util.expandpath(path)))
718 return files
716 return files
719
717
720 def _ignorefileandline(self, f):
718 def _ignorefileandline(self, f):
721 files = collections.deque(self._ignorefiles())
719 files = collections.deque(self._ignorefiles())
722 visited = set()
720 visited = set()
723 while files:
721 while files:
724 i = files.popleft()
722 i = files.popleft()
725 patterns = matchmod.readpatternfile(i, self._ui.warn,
723 patterns = matchmod.readpatternfile(i, self._ui.warn,
726 sourceinfo=True)
724 sourceinfo=True)
727 for pattern, lineno, line in patterns:
725 for pattern, lineno, line in patterns:
728 kind, p = matchmod._patsplit(pattern, 'glob')
726 kind, p = matchmod._patsplit(pattern, 'glob')
729 if kind == "subinclude":
727 if kind == "subinclude":
730 if p not in visited:
728 if p not in visited:
731 files.append(p)
729 files.append(p)
732 continue
730 continue
733 m = matchmod.match(self._root, '', [], [pattern],
731 m = matchmod.match(self._root, '', [], [pattern],
734 warn=self._ui.warn)
732 warn=self._ui.warn)
735 if m(f):
733 if m(f):
736 return (i, lineno, line)
734 return (i, lineno, line)
737 visited.add(i)
735 visited.add(i)
738 return (None, -1, "")
736 return (None, -1, "")
739
737
740 def _walkexplicit(self, match, subrepos):
738 def _walkexplicit(self, match, subrepos):
741 '''Get stat data about the files explicitly specified by match.
739 '''Get stat data about the files explicitly specified by match.
742
740
743 Return a triple (results, dirsfound, dirsnotfound).
741 Return a triple (results, dirsfound, dirsnotfound).
744 - results is a mapping from filename to stat result. It also contains
742 - results is a mapping from filename to stat result. It also contains
745 listings mapping subrepos and .hg to None.
743 listings mapping subrepos and .hg to None.
746 - dirsfound is a list of files found to be directories.
744 - dirsfound is a list of files found to be directories.
747 - dirsnotfound is a list of files that the dirstate thinks are
745 - dirsnotfound is a list of files that the dirstate thinks are
748 directories and that were not found.'''
746 directories and that were not found.'''
749
747
750 def badtype(mode):
748 def badtype(mode):
751 kind = _('unknown')
749 kind = _('unknown')
752 if stat.S_ISCHR(mode):
750 if stat.S_ISCHR(mode):
753 kind = _('character device')
751 kind = _('character device')
754 elif stat.S_ISBLK(mode):
752 elif stat.S_ISBLK(mode):
755 kind = _('block device')
753 kind = _('block device')
756 elif stat.S_ISFIFO(mode):
754 elif stat.S_ISFIFO(mode):
757 kind = _('fifo')
755 kind = _('fifo')
758 elif stat.S_ISSOCK(mode):
756 elif stat.S_ISSOCK(mode):
759 kind = _('socket')
757 kind = _('socket')
760 elif stat.S_ISDIR(mode):
758 elif stat.S_ISDIR(mode):
761 kind = _('directory')
759 kind = _('directory')
762 return _('unsupported file type (type is %s)') % kind
760 return _('unsupported file type (type is %s)') % kind
763
761
764 matchedir = match.explicitdir
762 matchedir = match.explicitdir
765 badfn = match.bad
763 badfn = match.bad
766 dmap = self._map
764 dmap = self._map
767 lstat = os.lstat
765 lstat = os.lstat
768 getkind = stat.S_IFMT
766 getkind = stat.S_IFMT
769 dirkind = stat.S_IFDIR
767 dirkind = stat.S_IFDIR
770 regkind = stat.S_IFREG
768 regkind = stat.S_IFREG
771 lnkkind = stat.S_IFLNK
769 lnkkind = stat.S_IFLNK
772 join = self._join
770 join = self._join
773 dirsfound = []
771 dirsfound = []
774 foundadd = dirsfound.append
772 foundadd = dirsfound.append
775 dirsnotfound = []
773 dirsnotfound = []
776 notfoundadd = dirsnotfound.append
774 notfoundadd = dirsnotfound.append
777
775
778 if not match.isexact() and self._checkcase:
776 if not match.isexact() and self._checkcase:
779 normalize = self._normalize
777 normalize = self._normalize
780 else:
778 else:
781 normalize = None
779 normalize = None
782
780
783 files = sorted(match.files())
781 files = sorted(match.files())
784 subrepos.sort()
782 subrepos.sort()
785 i, j = 0, 0
783 i, j = 0, 0
786 while i < len(files) and j < len(subrepos):
784 while i < len(files) and j < len(subrepos):
787 subpath = subrepos[j] + "/"
785 subpath = subrepos[j] + "/"
788 if files[i] < subpath:
786 if files[i] < subpath:
789 i += 1
787 i += 1
790 continue
788 continue
791 while i < len(files) and files[i].startswith(subpath):
789 while i < len(files) and files[i].startswith(subpath):
792 del files[i]
790 del files[i]
793 j += 1
791 j += 1
794
792
795 if not files or '.' in files:
793 if not files or '.' in files:
796 files = ['.']
794 files = ['.']
797 results = dict.fromkeys(subrepos)
795 results = dict.fromkeys(subrepos)
798 results['.hg'] = None
796 results['.hg'] = None
799
797
800 alldirs = None
798 alldirs = None
801 for ff in files:
799 for ff in files:
802 # constructing the foldmap is expensive, so don't do it for the
800 # constructing the foldmap is expensive, so don't do it for the
803 # common case where files is ['.']
801 # common case where files is ['.']
804 if normalize and ff != '.':
802 if normalize and ff != '.':
805 nf = normalize(ff, False, True)
803 nf = normalize(ff, False, True)
806 else:
804 else:
807 nf = ff
805 nf = ff
808 if nf in results:
806 if nf in results:
809 continue
807 continue
810
808
811 try:
809 try:
812 st = lstat(join(nf))
810 st = lstat(join(nf))
813 kind = getkind(st.st_mode)
811 kind = getkind(st.st_mode)
814 if kind == dirkind:
812 if kind == dirkind:
815 if nf in dmap:
813 if nf in dmap:
816 # file replaced by dir on disk but still in dirstate
814 # file replaced by dir on disk but still in dirstate
817 results[nf] = None
815 results[nf] = None
818 if matchedir:
816 if matchedir:
819 matchedir(nf)
817 matchedir(nf)
820 foundadd((nf, ff))
818 foundadd((nf, ff))
821 elif kind == regkind or kind == lnkkind:
819 elif kind == regkind or kind == lnkkind:
822 results[nf] = st
820 results[nf] = st
823 else:
821 else:
824 badfn(ff, badtype(kind))
822 badfn(ff, badtype(kind))
825 if nf in dmap:
823 if nf in dmap:
826 results[nf] = None
824 results[nf] = None
827 except OSError as inst: # nf not found on disk - it is dirstate only
825 except OSError as inst: # nf not found on disk - it is dirstate only
828 if nf in dmap: # does it exactly match a missing file?
826 if nf in dmap: # does it exactly match a missing file?
829 results[nf] = None
827 results[nf] = None
830 else: # does it match a missing directory?
828 else: # does it match a missing directory?
831 if alldirs is None:
829 if alldirs is None:
832 alldirs = util.dirs(dmap._map)
830 alldirs = util.dirs(dmap._map)
833 if nf in alldirs:
831 if nf in alldirs:
834 if matchedir:
832 if matchedir:
835 matchedir(nf)
833 matchedir(nf)
836 notfoundadd(nf)
834 notfoundadd(nf)
837 else:
835 else:
838 badfn(ff, encoding.strtolocal(inst.strerror))
836 badfn(ff, encoding.strtolocal(inst.strerror))
839
837
840 # Case insensitive filesystems cannot rely on lstat() failing to detect
838 # Case insensitive filesystems cannot rely on lstat() failing to detect
841 # a case-only rename. Prune the stat object for any file that does not
839 # a case-only rename. Prune the stat object for any file that does not
842 # match the case in the filesystem, if there are multiple files that
840 # match the case in the filesystem, if there are multiple files that
843 # normalize to the same path.
841 # normalize to the same path.
844 if match.isexact() and self._checkcase:
842 if match.isexact() and self._checkcase:
845 normed = {}
843 normed = {}
846
844
847 for f, st in results.iteritems():
845 for f, st in results.iteritems():
848 if st is None:
846 if st is None:
849 continue
847 continue
850
848
851 nc = util.normcase(f)
849 nc = util.normcase(f)
852 paths = normed.get(nc)
850 paths = normed.get(nc)
853
851
854 if paths is None:
852 if paths is None:
855 paths = set()
853 paths = set()
856 normed[nc] = paths
854 normed[nc] = paths
857
855
858 paths.add(f)
856 paths.add(f)
859
857
860 for norm, paths in normed.iteritems():
858 for norm, paths in normed.iteritems():
861 if len(paths) > 1:
859 if len(paths) > 1:
862 for path in paths:
860 for path in paths:
863 folded = self._discoverpath(path, norm, True, None,
861 folded = self._discoverpath(path, norm, True, None,
864 self._map.dirfoldmap)
862 self._map.dirfoldmap)
865 if path != folded:
863 if path != folded:
866 results[path] = None
864 results[path] = None
867
865
868 return results, dirsfound, dirsnotfound
866 return results, dirsfound, dirsnotfound
869
867
870 def walk(self, match, subrepos, unknown, ignored, full=True):
868 def walk(self, match, subrepos, unknown, ignored, full=True):
871 '''
869 '''
872 Walk recursively through the directory tree, finding all files
870 Walk recursively through the directory tree, finding all files
873 matched by match.
871 matched by match.
874
872
875 If full is False, maybe skip some known-clean files.
873 If full is False, maybe skip some known-clean files.
876
874
877 Return a dict mapping filename to stat-like object (either
875 Return a dict mapping filename to stat-like object (either
878 mercurial.osutil.stat instance or return value of os.stat()).
876 mercurial.osutil.stat instance or return value of os.stat()).
879
877
880 '''
878 '''
881 # full is a flag that extensions that hook into walk can use -- this
879 # full is a flag that extensions that hook into walk can use -- this
882 # implementation doesn't use it at all. This satisfies the contract
880 # implementation doesn't use it at all. This satisfies the contract
883 # because we only guarantee a "maybe".
881 # because we only guarantee a "maybe".
884
882
885 if ignored:
883 if ignored:
886 ignore = util.never
884 ignore = util.never
887 dirignore = util.never
885 dirignore = util.never
888 elif unknown:
886 elif unknown:
889 ignore = self._ignore
887 ignore = self._ignore
890 dirignore = self._dirignore
888 dirignore = self._dirignore
891 else:
889 else:
892 # if not unknown and not ignored, drop dir recursion and step 2
890 # if not unknown and not ignored, drop dir recursion and step 2
893 ignore = util.always
891 ignore = util.always
894 dirignore = util.always
892 dirignore = util.always
895
893
896 matchfn = match.matchfn
894 matchfn = match.matchfn
897 matchalways = match.always()
895 matchalways = match.always()
898 matchtdir = match.traversedir
896 matchtdir = match.traversedir
899 dmap = self._map
897 dmap = self._map
900 listdir = util.listdir
898 listdir = util.listdir
901 lstat = os.lstat
899 lstat = os.lstat
902 dirkind = stat.S_IFDIR
900 dirkind = stat.S_IFDIR
903 regkind = stat.S_IFREG
901 regkind = stat.S_IFREG
904 lnkkind = stat.S_IFLNK
902 lnkkind = stat.S_IFLNK
905 join = self._join
903 join = self._join
906
904
907 exact = skipstep3 = False
905 exact = skipstep3 = False
908 if match.isexact(): # match.exact
906 if match.isexact(): # match.exact
909 exact = True
907 exact = True
910 dirignore = util.always # skip step 2
908 dirignore = util.always # skip step 2
911 elif match.prefix(): # match.match, no patterns
909 elif match.prefix(): # match.match, no patterns
912 skipstep3 = True
910 skipstep3 = True
913
911
914 if not exact and self._checkcase:
912 if not exact and self._checkcase:
915 normalize = self._normalize
913 normalize = self._normalize
916 normalizefile = self._normalizefile
914 normalizefile = self._normalizefile
917 skipstep3 = False
915 skipstep3 = False
918 else:
916 else:
919 normalize = self._normalize
917 normalize = self._normalize
920 normalizefile = None
918 normalizefile = None
921
919
922 # step 1: find all explicit files
920 # step 1: find all explicit files
923 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
921 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
924
922
925 skipstep3 = skipstep3 and not (work or dirsnotfound)
923 skipstep3 = skipstep3 and not (work or dirsnotfound)
926 work = [d for d in work if not dirignore(d[0])]
924 work = [d for d in work if not dirignore(d[0])]
927
925
928 # step 2: visit subdirectories
926 # step 2: visit subdirectories
929 def traverse(work, alreadynormed):
927 def traverse(work, alreadynormed):
930 wadd = work.append
928 wadd = work.append
931 while work:
929 while work:
932 nd = work.pop()
930 nd = work.pop()
933 if not match.visitdir(nd):
931 if not match.visitdir(nd):
934 continue
932 continue
935 skip = None
933 skip = None
936 if nd == '.':
934 if nd == '.':
937 nd = ''
935 nd = ''
938 else:
936 else:
939 skip = '.hg'
937 skip = '.hg'
940 try:
938 try:
941 entries = listdir(join(nd), stat=True, skip=skip)
939 entries = listdir(join(nd), stat=True, skip=skip)
942 except OSError as inst:
940 except OSError as inst:
943 if inst.errno in (errno.EACCES, errno.ENOENT):
941 if inst.errno in (errno.EACCES, errno.ENOENT):
944 match.bad(self.pathto(nd),
942 match.bad(self.pathto(nd),
945 encoding.strtolocal(inst.strerror))
943 encoding.strtolocal(inst.strerror))
946 continue
944 continue
947 raise
945 raise
948 for f, kind, st in entries:
946 for f, kind, st in entries:
949 if normalizefile:
947 if normalizefile:
950 # even though f might be a directory, we're only
948 # even though f might be a directory, we're only
951 # interested in comparing it to files currently in the
949 # interested in comparing it to files currently in the
952 # dmap -- therefore normalizefile is enough
950 # dmap -- therefore normalizefile is enough
953 nf = normalizefile(nd and (nd + "/" + f) or f, True,
951 nf = normalizefile(nd and (nd + "/" + f) or f, True,
954 True)
952 True)
955 else:
953 else:
956 nf = nd and (nd + "/" + f) or f
954 nf = nd and (nd + "/" + f) or f
957 if nf not in results:
955 if nf not in results:
958 if kind == dirkind:
956 if kind == dirkind:
959 if not ignore(nf):
957 if not ignore(nf):
960 if matchtdir:
958 if matchtdir:
961 matchtdir(nf)
959 matchtdir(nf)
962 wadd(nf)
960 wadd(nf)
963 if nf in dmap and (matchalways or matchfn(nf)):
961 if nf in dmap and (matchalways or matchfn(nf)):
964 results[nf] = None
962 results[nf] = None
965 elif kind == regkind or kind == lnkkind:
963 elif kind == regkind or kind == lnkkind:
966 if nf in dmap:
964 if nf in dmap:
967 if matchalways or matchfn(nf):
965 if matchalways or matchfn(nf):
968 results[nf] = st
966 results[nf] = st
969 elif ((matchalways or matchfn(nf))
967 elif ((matchalways or matchfn(nf))
970 and not ignore(nf)):
968 and not ignore(nf)):
971 # unknown file -- normalize if necessary
969 # unknown file -- normalize if necessary
972 if not alreadynormed:
970 if not alreadynormed:
973 nf = normalize(nf, False, True)
971 nf = normalize(nf, False, True)
974 results[nf] = st
972 results[nf] = st
975 elif nf in dmap and (matchalways or matchfn(nf)):
973 elif nf in dmap and (matchalways or matchfn(nf)):
976 results[nf] = None
974 results[nf] = None
977
975
978 for nd, d in work:
976 for nd, d in work:
979 # alreadynormed means that processwork doesn't have to do any
977 # alreadynormed means that processwork doesn't have to do any
980 # expensive directory normalization
978 # expensive directory normalization
981 alreadynormed = not normalize or nd == d
979 alreadynormed = not normalize or nd == d
982 traverse([d], alreadynormed)
980 traverse([d], alreadynormed)
983
981
984 for s in subrepos:
982 for s in subrepos:
985 del results[s]
983 del results[s]
986 del results['.hg']
984 del results['.hg']
987
985
988 # step 3: visit remaining files from dmap
986 # step 3: visit remaining files from dmap
989 if not skipstep3 and not exact:
987 if not skipstep3 and not exact:
990 # If a dmap file is not in results yet, it was either
988 # If a dmap file is not in results yet, it was either
991 # a) not matching matchfn b) ignored, c) missing, or d) under a
989 # a) not matching matchfn b) ignored, c) missing, or d) under a
992 # symlink directory.
990 # symlink directory.
993 if not results and matchalways:
991 if not results and matchalways:
994 visit = [f for f in dmap]
992 visit = [f for f in dmap]
995 else:
993 else:
996 visit = [f for f in dmap if f not in results and matchfn(f)]
994 visit = [f for f in dmap if f not in results and matchfn(f)]
997 visit.sort()
995 visit.sort()
998
996
999 if unknown:
997 if unknown:
1000 # unknown == True means we walked all dirs under the roots
998 # unknown == True means we walked all dirs under the roots
1001 # that wasn't ignored, and everything that matched was stat'ed
999 # that wasn't ignored, and everything that matched was stat'ed
1002 # and is already in results.
1000 # and is already in results.
1003 # The rest must thus be ignored or under a symlink.
1001 # The rest must thus be ignored or under a symlink.
1004 audit_path = pathutil.pathauditor(self._root, cached=True)
1002 audit_path = pathutil.pathauditor(self._root, cached=True)
1005
1003
1006 for nf in iter(visit):
1004 for nf in iter(visit):
1007 # If a stat for the same file was already added with a
1005 # If a stat for the same file was already added with a
1008 # different case, don't add one for this, since that would
1006 # different case, don't add one for this, since that would
1009 # make it appear as if the file exists under both names
1007 # make it appear as if the file exists under both names
1010 # on disk.
1008 # on disk.
1011 if (normalizefile and
1009 if (normalizefile and
1012 normalizefile(nf, True, True) in results):
1010 normalizefile(nf, True, True) in results):
1013 results[nf] = None
1011 results[nf] = None
1014 # Report ignored items in the dmap as long as they are not
1012 # Report ignored items in the dmap as long as they are not
1015 # under a symlink directory.
1013 # under a symlink directory.
1016 elif audit_path.check(nf):
1014 elif audit_path.check(nf):
1017 try:
1015 try:
1018 results[nf] = lstat(join(nf))
1016 results[nf] = lstat(join(nf))
1019 # file was just ignored, no links, and exists
1017 # file was just ignored, no links, and exists
1020 except OSError:
1018 except OSError:
1021 # file doesn't exist
1019 # file doesn't exist
1022 results[nf] = None
1020 results[nf] = None
1023 else:
1021 else:
1024 # It's either missing or under a symlink directory
1022 # It's either missing or under a symlink directory
1025 # which we in this case report as missing
1023 # which we in this case report as missing
1026 results[nf] = None
1024 results[nf] = None
1027 else:
1025 else:
1028 # We may not have walked the full directory tree above,
1026 # We may not have walked the full directory tree above,
1029 # so stat and check everything we missed.
1027 # so stat and check everything we missed.
1030 iv = iter(visit)
1028 iv = iter(visit)
1031 for st in util.statfiles([join(i) for i in visit]):
1029 for st in util.statfiles([join(i) for i in visit]):
1032 results[next(iv)] = st
1030 results[next(iv)] = st
1033 return results
1031 return results
1034
1032
1035 def status(self, match, subrepos, ignored, clean, unknown):
1033 def status(self, match, subrepos, ignored, clean, unknown):
1036 '''Determine the status of the working copy relative to the
1034 '''Determine the status of the working copy relative to the
1037 dirstate and return a pair of (unsure, status), where status is of type
1035 dirstate and return a pair of (unsure, status), where status is of type
1038 scmutil.status and:
1036 scmutil.status and:
1039
1037
1040 unsure:
1038 unsure:
1041 files that might have been modified since the dirstate was
1039 files that might have been modified since the dirstate was
1042 written, but need to be read to be sure (size is the same
1040 written, but need to be read to be sure (size is the same
1043 but mtime differs)
1041 but mtime differs)
1044 status.modified:
1042 status.modified:
1045 files that have definitely been modified since the dirstate
1043 files that have definitely been modified since the dirstate
1046 was written (different size or mode)
1044 was written (different size or mode)
1047 status.clean:
1045 status.clean:
1048 files that have definitely not been modified since the
1046 files that have definitely not been modified since the
1049 dirstate was written
1047 dirstate was written
1050 '''
1048 '''
1051 listignored, listclean, listunknown = ignored, clean, unknown
1049 listignored, listclean, listunknown = ignored, clean, unknown
1052 lookup, modified, added, unknown, ignored = [], [], [], [], []
1050 lookup, modified, added, unknown, ignored = [], [], [], [], []
1053 removed, deleted, clean = [], [], []
1051 removed, deleted, clean = [], [], []
1054
1052
1055 dmap = self._map
1053 dmap = self._map
1056 dmap.preload()
1054 dmap.preload()
1057 dcontains = dmap.__contains__
1055 dcontains = dmap.__contains__
1058 dget = dmap.__getitem__
1056 dget = dmap.__getitem__
1059 ladd = lookup.append # aka "unsure"
1057 ladd = lookup.append # aka "unsure"
1060 madd = modified.append
1058 madd = modified.append
1061 aadd = added.append
1059 aadd = added.append
1062 uadd = unknown.append
1060 uadd = unknown.append
1063 iadd = ignored.append
1061 iadd = ignored.append
1064 radd = removed.append
1062 radd = removed.append
1065 dadd = deleted.append
1063 dadd = deleted.append
1066 cadd = clean.append
1064 cadd = clean.append
1067 mexact = match.exact
1065 mexact = match.exact
1068 dirignore = self._dirignore
1066 dirignore = self._dirignore
1069 checkexec = self._checkexec
1067 checkexec = self._checkexec
1070 copymap = self._map.copymap
1068 copymap = self._map.copymap
1071 lastnormaltime = self._lastnormaltime
1069 lastnormaltime = self._lastnormaltime
1072
1070
1073 # We need to do full walks when either
1071 # We need to do full walks when either
1074 # - we're listing all clean files, or
1072 # - we're listing all clean files, or
1075 # - match.traversedir does something, because match.traversedir should
1073 # - match.traversedir does something, because match.traversedir should
1076 # be called for every dir in the working dir
1074 # be called for every dir in the working dir
1077 full = listclean or match.traversedir is not None
1075 full = listclean or match.traversedir is not None
1078 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1076 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1079 full=full).iteritems():
1077 full=full).iteritems():
1080 if not dcontains(fn):
1078 if not dcontains(fn):
1081 if (listignored or mexact(fn)) and dirignore(fn):
1079 if (listignored or mexact(fn)) and dirignore(fn):
1082 if listignored:
1080 if listignored:
1083 iadd(fn)
1081 iadd(fn)
1084 else:
1082 else:
1085 uadd(fn)
1083 uadd(fn)
1086 continue
1084 continue
1087
1085
1088 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1086 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1089 # written like that for performance reasons. dmap[fn] is not a
1087 # written like that for performance reasons. dmap[fn] is not a
1090 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1088 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1091 # opcode has fast paths when the value to be unpacked is a tuple or
1089 # opcode has fast paths when the value to be unpacked is a tuple or
1092 # a list, but falls back to creating a full-fledged iterator in
1090 # a list, but falls back to creating a full-fledged iterator in
1093 # general. That is much slower than simply accessing and storing the
1091 # general. That is much slower than simply accessing and storing the
1094 # tuple members one by one.
1092 # tuple members one by one.
1095 t = dget(fn)
1093 t = dget(fn)
1096 state = t[0]
1094 state = t[0]
1097 mode = t[1]
1095 mode = t[1]
1098 size = t[2]
1096 size = t[2]
1099 time = t[3]
1097 time = t[3]
1100
1098
1101 if not st and state in "nma":
1099 if not st and state in "nma":
1102 dadd(fn)
1100 dadd(fn)
1103 elif state == 'n':
1101 elif state == 'n':
1104 if (size >= 0 and
1102 if (size >= 0 and
1105 ((size != st.st_size and size != st.st_size & _rangemask)
1103 ((size != st.st_size and size != st.st_size & _rangemask)
1106 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1104 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1107 or size == -2 # other parent
1105 or size == -2 # other parent
1108 or fn in copymap):
1106 or fn in copymap):
1109 madd(fn)
1107 madd(fn)
1110 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1108 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1111 ladd(fn)
1109 ladd(fn)
1112 elif st.st_mtime == lastnormaltime:
1110 elif st.st_mtime == lastnormaltime:
1113 # fn may have just been marked as normal and it may have
1111 # fn may have just been marked as normal and it may have
1114 # changed in the same second without changing its size.
1112 # changed in the same second without changing its size.
1115 # This can happen if we quickly do multiple commits.
1113 # This can happen if we quickly do multiple commits.
1116 # Force lookup, so we don't miss such a racy file change.
1114 # Force lookup, so we don't miss such a racy file change.
1117 ladd(fn)
1115 ladd(fn)
1118 elif listclean:
1116 elif listclean:
1119 cadd(fn)
1117 cadd(fn)
1120 elif state == 'm':
1118 elif state == 'm':
1121 madd(fn)
1119 madd(fn)
1122 elif state == 'a':
1120 elif state == 'a':
1123 aadd(fn)
1121 aadd(fn)
1124 elif state == 'r':
1122 elif state == 'r':
1125 radd(fn)
1123 radd(fn)
1126
1124
1127 return (lookup, scmutil.status(modified, added, removed, deleted,
1125 return (lookup, scmutil.status(modified, added, removed, deleted,
1128 unknown, ignored, clean))
1126 unknown, ignored, clean))
1129
1127
1130 def matches(self, match):
1128 def matches(self, match):
1131 '''
1129 '''
1132 return files in the dirstate (in whatever state) filtered by match
1130 return files in the dirstate (in whatever state) filtered by match
1133 '''
1131 '''
1134 dmap = self._map
1132 dmap = self._map
1135 if match.always():
1133 if match.always():
1136 return dmap.keys()
1134 return dmap.keys()
1137 files = match.files()
1135 files = match.files()
1138 if match.isexact():
1136 if match.isexact():
1139 # fast path -- filter the other way around, since typically files is
1137 # fast path -- filter the other way around, since typically files is
1140 # much smaller than dmap
1138 # much smaller than dmap
1141 return [f for f in files if f in dmap]
1139 return [f for f in files if f in dmap]
1142 if match.prefix() and all(fn in dmap for fn in files):
1140 if match.prefix() and all(fn in dmap for fn in files):
1143 # fast path -- all the values are known to be files, so just return
1141 # fast path -- all the values are known to be files, so just return
1144 # that
1142 # that
1145 return list(files)
1143 return list(files)
1146 return [f for f in dmap if match(f)]
1144 return [f for f in dmap if match(f)]
1147
1145
1148 def _actualfilename(self, tr):
1146 def _actualfilename(self, tr):
1149 if tr:
1147 if tr:
1150 return self._pendingfilename
1148 return self._pendingfilename
1151 else:
1149 else:
1152 return self._filename
1150 return self._filename
1153
1151
1154 def savebackup(self, tr, backupname):
1152 def savebackup(self, tr, backupname):
1155 '''Save current dirstate into backup file'''
1153 '''Save current dirstate into backup file'''
1156 filename = self._actualfilename(tr)
1154 filename = self._actualfilename(tr)
1157 assert backupname != filename
1155 assert backupname != filename
1158
1156
1159 # use '_writedirstate' instead of 'write' to write changes certainly,
1157 # use '_writedirstate' instead of 'write' to write changes certainly,
1160 # because the latter omits writing out if transaction is running.
1158 # because the latter omits writing out if transaction is running.
1161 # output file will be used to create backup of dirstate at this point.
1159 # output file will be used to create backup of dirstate at this point.
1162 if self._dirty or not self._opener.exists(filename):
1160 if self._dirty or not self._opener.exists(filename):
1163 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1161 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1164 checkambig=True))
1162 checkambig=True))
1165
1163
1166 if tr:
1164 if tr:
1167 # ensure that subsequent tr.writepending returns True for
1165 # ensure that subsequent tr.writepending returns True for
1168 # changes written out above, even if dirstate is never
1166 # changes written out above, even if dirstate is never
1169 # changed after this
1167 # changed after this
1170 tr.addfilegenerator('dirstate', (self._filename,),
1168 tr.addfilegenerator('dirstate', (self._filename,),
1171 self._writedirstate, location='plain')
1169 self._writedirstate, location='plain')
1172
1170
1173 # ensure that pending file written above is unlinked at
1171 # ensure that pending file written above is unlinked at
1174 # failure, even if tr.writepending isn't invoked until the
1172 # failure, even if tr.writepending isn't invoked until the
1175 # end of this transaction
1173 # end of this transaction
1176 tr.registertmp(filename, location='plain')
1174 tr.registertmp(filename, location='plain')
1177
1175
1178 self._opener.tryunlink(backupname)
1176 self._opener.tryunlink(backupname)
1179 # hardlink backup is okay because _writedirstate is always called
1177 # hardlink backup is okay because _writedirstate is always called
1180 # with an "atomictemp=True" file.
1178 # with an "atomictemp=True" file.
1181 util.copyfile(self._opener.join(filename),
1179 util.copyfile(self._opener.join(filename),
1182 self._opener.join(backupname), hardlink=True)
1180 self._opener.join(backupname), hardlink=True)
1183
1181
1184 def restorebackup(self, tr, backupname):
1182 def restorebackup(self, tr, backupname):
1185 '''Restore dirstate by backup file'''
1183 '''Restore dirstate by backup file'''
1186 # this "invalidate()" prevents "wlock.release()" from writing
1184 # this "invalidate()" prevents "wlock.release()" from writing
1187 # changes of dirstate out after restoring from backup file
1185 # changes of dirstate out after restoring from backup file
1188 self.invalidate()
1186 self.invalidate()
1189 filename = self._actualfilename(tr)
1187 filename = self._actualfilename(tr)
1190 o = self._opener
1188 o = self._opener
1191 if util.samefile(o.join(backupname), o.join(filename)):
1189 if util.samefile(o.join(backupname), o.join(filename)):
1192 o.unlink(backupname)
1190 o.unlink(backupname)
1193 else:
1191 else:
1194 o.rename(backupname, filename, checkambig=True)
1192 o.rename(backupname, filename, checkambig=True)
1195
1193
1196 def clearbackup(self, tr, backupname):
1194 def clearbackup(self, tr, backupname):
1197 '''Clear backup file'''
1195 '''Clear backup file'''
1198 self._opener.unlink(backupname)
1196 self._opener.unlink(backupname)
1199
1197
1200 class dirstatemap(object):
1198 class dirstatemap(object):
1201 def __init__(self, ui, opener, root):
1199 def __init__(self, ui, opener, root):
1202 self._ui = ui
1200 self._ui = ui
1203 self._opener = opener
1201 self._opener = opener
1204 self._root = root
1202 self._root = root
1205 self._filename = 'dirstate'
1203 self._filename = 'dirstate'
1206
1204
1207 self._parents = None
1205 self._parents = None
1208 self._dirtyparents = False
1206 self._dirtyparents = False
1209
1207
1210 # for consistent view between _pl() and _read() invocations
1208 # for consistent view between _pl() and _read() invocations
1211 self._pendingmode = None
1209 self._pendingmode = None
1212
1210
1213 @propertycache
1211 @propertycache
1214 def _map(self):
1212 def _map(self):
1215 self._map = {}
1213 self._map = {}
1216 self.read()
1214 self.read()
1217 return self._map
1215 return self._map
1218
1216
1219 @propertycache
1217 @propertycache
1220 def copymap(self):
1218 def copymap(self):
1221 self.copymap = {}
1219 self.copymap = {}
1222 self._map
1220 self._map
1223 return self.copymap
1221 return self.copymap
1224
1222
1225 def clear(self):
1223 def clear(self):
1226 self._map.clear()
1224 self._map.clear()
1227 self.copymap.clear()
1225 self.copymap.clear()
1228 self.setparents(nullid, nullid)
1226 self.setparents(nullid, nullid)
1229 util.clearcachedproperty(self, "dirs")
1227 util.clearcachedproperty(self, "dirs")
1230 util.clearcachedproperty(self, "filefoldmap")
1228 util.clearcachedproperty(self, "filefoldmap")
1231 util.clearcachedproperty(self, "dirfoldmap")
1229 util.clearcachedproperty(self, "dirfoldmap")
1232 util.clearcachedproperty(self, "nonnormalset")
1230 util.clearcachedproperty(self, "nonnormalset")
1233 util.clearcachedproperty(self, "otherparentset")
1231 util.clearcachedproperty(self, "otherparentset")
1234
1232
1235 def iteritems(self):
1233 def iteritems(self):
1236 return self._map.iteritems()
1234 return self._map.iteritems()
1237
1235
1238 def __len__(self):
1236 def __len__(self):
1239 return len(self._map)
1237 return len(self._map)
1240
1238
1241 def __iter__(self):
1239 def __iter__(self):
1242 return iter(self._map)
1240 return iter(self._map)
1243
1241
1244 def get(self, key, default=None):
1242 def get(self, key, default=None):
1245 return self._map.get(key, default)
1243 return self._map.get(key, default)
1246
1244
1247 def __contains__(self, key):
1245 def __contains__(self, key):
1248 return key in self._map
1246 return key in self._map
1249
1247
1250 def __setitem__(self, key, value):
1248 def __setitem__(self, key, value):
1251 self._map[key] = value
1249 self._map[key] = value
1252
1250
1253 def __getitem__(self, key):
1251 def __getitem__(self, key):
1254 return self._map[key]
1252 return self._map[key]
1255
1253
1256 def __delitem__(self, key):
1254 def __delitem__(self, key):
1257 del self._map[key]
1255 del self._map[key]
1258
1256
1259 def keys(self):
1257 def keys(self):
1260 return self._map.keys()
1258 return self._map.keys()
1261
1259
1262 def preload(self):
1260 def preload(self):
1263 """Loads the underlying data, if it's not already loaded"""
1261 """Loads the underlying data, if it's not already loaded"""
1264 self._map
1262 self._map
1265
1263
1266 def nonnormalentries(self):
1264 def nonnormalentries(self):
1267 '''Compute the nonnormal dirstate entries from the dmap'''
1265 '''Compute the nonnormal dirstate entries from the dmap'''
1268 try:
1266 try:
1269 return parsers.nonnormalotherparententries(self._map)
1267 return parsers.nonnormalotherparententries(self._map)
1270 except AttributeError:
1268 except AttributeError:
1271 nonnorm = set()
1269 nonnorm = set()
1272 otherparent = set()
1270 otherparent = set()
1273 for fname, e in self._map.iteritems():
1271 for fname, e in self._map.iteritems():
1274 if e[0] != 'n' or e[3] == -1:
1272 if e[0] != 'n' or e[3] == -1:
1275 nonnorm.add(fname)
1273 nonnorm.add(fname)
1276 if e[0] == 'n' and e[2] == -2:
1274 if e[0] == 'n' and e[2] == -2:
1277 otherparent.add(fname)
1275 otherparent.add(fname)
1278 return nonnorm, otherparent
1276 return nonnorm, otherparent
1279
1277
1280 @propertycache
1278 @propertycache
1281 def filefoldmap(self):
1279 def filefoldmap(self):
1282 """Returns a dictionary mapping normalized case paths to their
1280 """Returns a dictionary mapping normalized case paths to their
1283 non-normalized versions.
1281 non-normalized versions.
1284 """
1282 """
1285 try:
1283 try:
1286 makefilefoldmap = parsers.make_file_foldmap
1284 makefilefoldmap = parsers.make_file_foldmap
1287 except AttributeError:
1285 except AttributeError:
1288 pass
1286 pass
1289 else:
1287 else:
1290 return makefilefoldmap(self._map, util.normcasespec,
1288 return makefilefoldmap(self._map, util.normcasespec,
1291 util.normcasefallback)
1289 util.normcasefallback)
1292
1290
1293 f = {}
1291 f = {}
1294 normcase = util.normcase
1292 normcase = util.normcase
1295 for name, s in self._map.iteritems():
1293 for name, s in self._map.iteritems():
1296 if s[0] != 'r':
1294 if s[0] != 'r':
1297 f[normcase(name)] = name
1295 f[normcase(name)] = name
1298 f['.'] = '.' # prevents useless util.fspath() invocation
1296 f['.'] = '.' # prevents useless util.fspath() invocation
1299 return f
1297 return f
1300
1298
1301 @propertycache
1299 @propertycache
1302 def dirs(self):
1300 def dirs(self):
1303 """Returns a set-like object containing all the directories in the
1301 """Returns a set-like object containing all the directories in the
1304 current dirstate.
1302 current dirstate.
1305 """
1303 """
1306 return util.dirs(self._map, 'r')
1304 return util.dirs(self._map, 'r')
1307
1305
1308 def _opendirstatefile(self):
1306 def _opendirstatefile(self):
1309 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1307 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1310 if self._pendingmode is not None and self._pendingmode != mode:
1308 if self._pendingmode is not None and self._pendingmode != mode:
1311 fp.close()
1309 fp.close()
1312 raise error.Abort(_('working directory state may be '
1310 raise error.Abort(_('working directory state may be '
1313 'changed parallelly'))
1311 'changed parallelly'))
1314 self._pendingmode = mode
1312 self._pendingmode = mode
1315 return fp
1313 return fp
1316
1314
1317 def parents(self):
1315 def parents(self):
1318 if not self._parents:
1316 if not self._parents:
1319 try:
1317 try:
1320 fp = self._opendirstatefile()
1318 fp = self._opendirstatefile()
1321 st = fp.read(40)
1319 st = fp.read(40)
1322 fp.close()
1320 fp.close()
1323 except IOError as err:
1321 except IOError as err:
1324 if err.errno != errno.ENOENT:
1322 if err.errno != errno.ENOENT:
1325 raise
1323 raise
1326 # File doesn't exist, so the current state is empty
1324 # File doesn't exist, so the current state is empty
1327 st = ''
1325 st = ''
1328
1326
1329 l = len(st)
1327 l = len(st)
1330 if l == 40:
1328 if l == 40:
1331 self._parents = st[:20], st[20:40]
1329 self._parents = st[:20], st[20:40]
1332 elif l == 0:
1330 elif l == 0:
1333 self._parents = [nullid, nullid]
1331 self._parents = [nullid, nullid]
1334 else:
1332 else:
1335 raise error.Abort(_('working directory state appears '
1333 raise error.Abort(_('working directory state appears '
1336 'damaged!'))
1334 'damaged!'))
1337
1335
1338 return self._parents
1336 return self._parents
1339
1337
1340 def setparents(self, p1, p2):
1338 def setparents(self, p1, p2):
1341 self._parents = (p1, p2)
1339 self._parents = (p1, p2)
1342 self._dirtyparents = True
1340 self._dirtyparents = True
1343
1341
1344 def read(self):
1342 def read(self):
1345 # ignore HG_PENDING because identity is used only for writing
1343 # ignore HG_PENDING because identity is used only for writing
1346 self.identity = util.filestat.frompath(
1344 self.identity = util.filestat.frompath(
1347 self._opener.join(self._filename))
1345 self._opener.join(self._filename))
1348
1346
1349 try:
1347 try:
1350 fp = self._opendirstatefile()
1348 fp = self._opendirstatefile()
1351 try:
1349 try:
1352 st = fp.read()
1350 st = fp.read()
1353 finally:
1351 finally:
1354 fp.close()
1352 fp.close()
1355 except IOError as err:
1353 except IOError as err:
1356 if err.errno != errno.ENOENT:
1354 if err.errno != errno.ENOENT:
1357 raise
1355 raise
1358 return
1356 return
1359 if not st:
1357 if not st:
1360 return
1358 return
1361
1359
1362 if util.safehasattr(parsers, 'dict_new_presized'):
1360 if util.safehasattr(parsers, 'dict_new_presized'):
1363 # Make an estimate of the number of files in the dirstate based on
1361 # Make an estimate of the number of files in the dirstate based on
1364 # its size. From a linear regression on a set of real-world repos,
1362 # its size. From a linear regression on a set of real-world repos,
1365 # all over 10,000 files, the size of a dirstate entry is 85
1363 # all over 10,000 files, the size of a dirstate entry is 85
1366 # bytes. The cost of resizing is significantly higher than the cost
1364 # bytes. The cost of resizing is significantly higher than the cost
1367 # of filling in a larger presized dict, so subtract 20% from the
1365 # of filling in a larger presized dict, so subtract 20% from the
1368 # size.
1366 # size.
1369 #
1367 #
1370 # This heuristic is imperfect in many ways, so in a future dirstate
1368 # This heuristic is imperfect in many ways, so in a future dirstate
1371 # format update it makes sense to just record the number of entries
1369 # format update it makes sense to just record the number of entries
1372 # on write.
1370 # on write.
1373 self._map = parsers.dict_new_presized(len(st) / 71)
1371 self._map = parsers.dict_new_presized(len(st) / 71)
1374
1372
1375 # Python's garbage collector triggers a GC each time a certain number
1373 # Python's garbage collector triggers a GC each time a certain number
1376 # of container objects (the number being defined by
1374 # of container objects (the number being defined by
1377 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1375 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1378 # for each file in the dirstate. The C version then immediately marks
1376 # for each file in the dirstate. The C version then immediately marks
1379 # them as not to be tracked by the collector. However, this has no
1377 # them as not to be tracked by the collector. However, this has no
1380 # effect on when GCs are triggered, only on what objects the GC looks
1378 # effect on when GCs are triggered, only on what objects the GC looks
1381 # into. This means that O(number of files) GCs are unavoidable.
1379 # into. This means that O(number of files) GCs are unavoidable.
1382 # Depending on when in the process's lifetime the dirstate is parsed,
1380 # Depending on when in the process's lifetime the dirstate is parsed,
1383 # this can get very expensive. As a workaround, disable GC while
1381 # this can get very expensive. As a workaround, disable GC while
1384 # parsing the dirstate.
1382 # parsing the dirstate.
1385 #
1383 #
1386 # (we cannot decorate the function directly since it is in a C module)
1384 # (we cannot decorate the function directly since it is in a C module)
1387 parse_dirstate = util.nogc(parsers.parse_dirstate)
1385 parse_dirstate = util.nogc(parsers.parse_dirstate)
1388 p = parse_dirstate(self._map, self.copymap, st)
1386 p = parse_dirstate(self._map, self.copymap, st)
1389 if not self._dirtyparents:
1387 if not self._dirtyparents:
1390 self.setparents(*p)
1388 self.setparents(*p)
1391
1389
1392 # Avoid excess attribute lookups by fast pathing certain checks
1390 # Avoid excess attribute lookups by fast pathing certain checks
1393 self.__contains__ = self._map.__contains__
1391 self.__contains__ = self._map.__contains__
1394 self.__getitem__ = self._map.__getitem__
1392 self.__getitem__ = self._map.__getitem__
1395 self.__setitem__ = self._map.__setitem__
1393 self.__setitem__ = self._map.__setitem__
1396 self.__delitem__ = self._map.__delitem__
1394 self.__delitem__ = self._map.__delitem__
1397 self.get = self._map.get
1395 self.get = self._map.get
1398
1396
1399 def write(self, st, now):
1397 def write(self, st, now):
1400 st.write(parsers.pack_dirstate(self._map, self.copymap,
1398 st.write(parsers.pack_dirstate(self._map, self.copymap,
1401 self.parents(), now))
1399 self.parents(), now))
1402 st.close()
1400 st.close()
1403 self._dirtyparents = False
1401 self._dirtyparents = False
1404 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1402 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1405
1403
1406 @propertycache
1404 @propertycache
1407 def nonnormalset(self):
1405 def nonnormalset(self):
1408 nonnorm, otherparents = self.nonnormalentries()
1406 nonnorm, otherparents = self.nonnormalentries()
1409 self.otherparentset = otherparents
1407 self.otherparentset = otherparents
1410 return nonnorm
1408 return nonnorm
1411
1409
1412 @propertycache
1410 @propertycache
1413 def otherparentset(self):
1411 def otherparentset(self):
1414 nonnorm, otherparents = self.nonnormalentries()
1412 nonnorm, otherparents = self.nonnormalentries()
1415 self.nonnormalset = nonnorm
1413 self.nonnormalset = nonnorm
1416 return otherparents
1414 return otherparents
1417
1415
1418 @propertycache
1416 @propertycache
1419 def identity(self):
1417 def identity(self):
1420 self._map
1418 self._map
1421 return self.identity
1419 return self.identity
1422
1420
1423 @propertycache
1421 @propertycache
1424 def dirfoldmap(self):
1422 def dirfoldmap(self):
1425 f = {}
1423 f = {}
1426 normcase = util.normcase
1424 normcase = util.normcase
1427 for name in self.dirs:
1425 for name in self.dirs:
1428 f[normcase(name)] = name
1426 f[normcase(name)] = name
1429 return f
1427 return f
General Comments 0
You need to be logged in to leave comments. Login now