##// END OF EJS Templates
dirstate: move dropping of folded filenames into the dirstate map...
Mark Thomas -
r35081:e6c64744 default
parent child Browse files
Show More
@@ -1,1483 +1,1481 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = dirstatemap(self._ui, self._opener, self._root)
131 self._map = dirstatemap(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache('branch')
147 @repocache('branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read("branch").strip() or "default"
150 return self._opener.read("branch").strip() or "default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return "default"
154 return "default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def dirs(self):
160 def dirs(self):
161 return self._map.dirs
161 return self._map.dirs
162
162
163 @rootcache('.hgignore')
163 @rootcache('.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never(self._root, '')
167 return matchmod.never(self._root, '')
168
168
169 pats = ['include:%s' % f for f in files]
169 pats = ['include:%s' % f for f in files]
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join('.hg'))
186 return not util.fscasesensitive(self._join('.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195 def f(x):
195 def f(x):
196 try:
196 try:
197 st = os.lstat(self._join(x))
197 st = os.lstat(self._join(x))
198 if util.statislink(st):
198 if util.statislink(st):
199 return 'l'
199 return 'l'
200 if util.statisexec(st):
200 if util.statisexec(st):
201 return 'x'
201 return 'x'
202 except OSError:
202 except OSError:
203 pass
203 pass
204 return ''
204 return ''
205 return f
205 return f
206
206
207 fallback = buildfallback()
207 fallback = buildfallback()
208 if self._checklink:
208 if self._checklink:
209 def f(x):
209 def f(x):
210 if os.path.islink(self._join(x)):
210 if os.path.islink(self._join(x)):
211 return 'l'
211 return 'l'
212 if 'x' in fallback(x):
212 if 'x' in fallback(x):
213 return 'x'
213 return 'x'
214 return ''
214 return ''
215 return f
215 return f
216 if self._checkexec:
216 if self._checkexec:
217 def f(x):
217 def f(x):
218 if 'l' in fallback(x):
218 if 'l' in fallback(x):
219 return 'l'
219 return 'l'
220 if util.isexec(self._join(x)):
220 if util.isexec(self._join(x)):
221 return 'x'
221 return 'x'
222 return ''
222 return ''
223 return f
223 return f
224 else:
224 else:
225 return fallback
225 return fallback
226
226
227 @propertycache
227 @propertycache
228 def _cwd(self):
228 def _cwd(self):
229 # internal config: ui.forcecwd
229 # internal config: ui.forcecwd
230 forcecwd = self._ui.config('ui', 'forcecwd')
230 forcecwd = self._ui.config('ui', 'forcecwd')
231 if forcecwd:
231 if forcecwd:
232 return forcecwd
232 return forcecwd
233 return pycompat.getcwd()
233 return pycompat.getcwd()
234
234
235 def getcwd(self):
235 def getcwd(self):
236 '''Return the path from which a canonical path is calculated.
236 '''Return the path from which a canonical path is calculated.
237
237
238 This path should be used to resolve file patterns or to convert
238 This path should be used to resolve file patterns or to convert
239 canonical paths back to file paths for display. It shouldn't be
239 canonical paths back to file paths for display. It shouldn't be
240 used to get real file paths. Use vfs functions instead.
240 used to get real file paths. Use vfs functions instead.
241 '''
241 '''
242 cwd = self._cwd
242 cwd = self._cwd
243 if cwd == self._root:
243 if cwd == self._root:
244 return ''
244 return ''
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 rootsep = self._root
246 rootsep = self._root
247 if not util.endswithsep(rootsep):
247 if not util.endswithsep(rootsep):
248 rootsep += pycompat.ossep
248 rootsep += pycompat.ossep
249 if cwd.startswith(rootsep):
249 if cwd.startswith(rootsep):
250 return cwd[len(rootsep):]
250 return cwd[len(rootsep):]
251 else:
251 else:
252 # we're outside the repo. return an absolute path.
252 # we're outside the repo. return an absolute path.
253 return cwd
253 return cwd
254
254
255 def pathto(self, f, cwd=None):
255 def pathto(self, f, cwd=None):
256 if cwd is None:
256 if cwd is None:
257 cwd = self.getcwd()
257 cwd = self.getcwd()
258 path = util.pathto(self._root, cwd, f)
258 path = util.pathto(self._root, cwd, f)
259 if self._slash:
259 if self._slash:
260 return util.pconvert(path)
260 return util.pconvert(path)
261 return path
261 return path
262
262
263 def __getitem__(self, key):
263 def __getitem__(self, key):
264 '''Return the current state of key (a filename) in the dirstate.
264 '''Return the current state of key (a filename) in the dirstate.
265
265
266 States are:
266 States are:
267 n normal
267 n normal
268 m needs merging
268 m needs merging
269 r marked for removal
269 r marked for removal
270 a marked for addition
270 a marked for addition
271 ? not tracked
271 ? not tracked
272 '''
272 '''
273 return self._map.get(key, ("?",))[0]
273 return self._map.get(key, ("?",))[0]
274
274
275 def __contains__(self, key):
275 def __contains__(self, key):
276 return key in self._map
276 return key in self._map
277
277
278 def __iter__(self):
278 def __iter__(self):
279 return iter(sorted(self._map))
279 return iter(sorted(self._map))
280
280
281 def items(self):
281 def items(self):
282 return self._map.iteritems()
282 return self._map.iteritems()
283
283
284 iteritems = items
284 iteritems = items
285
285
286 def parents(self):
286 def parents(self):
287 return [self._validate(p) for p in self._pl]
287 return [self._validate(p) for p in self._pl]
288
288
289 def p1(self):
289 def p1(self):
290 return self._validate(self._pl[0])
290 return self._validate(self._pl[0])
291
291
292 def p2(self):
292 def p2(self):
293 return self._validate(self._pl[1])
293 return self._validate(self._pl[1])
294
294
295 def branch(self):
295 def branch(self):
296 return encoding.tolocal(self._branch)
296 return encoding.tolocal(self._branch)
297
297
298 def setparents(self, p1, p2=nullid):
298 def setparents(self, p1, p2=nullid):
299 """Set dirstate parents to p1 and p2.
299 """Set dirstate parents to p1 and p2.
300
300
301 When moving from two parents to one, 'm' merged entries a
301 When moving from two parents to one, 'm' merged entries a
302 adjusted to normal and previous copy records discarded and
302 adjusted to normal and previous copy records discarded and
303 returned by the call.
303 returned by the call.
304
304
305 See localrepo.setparents()
305 See localrepo.setparents()
306 """
306 """
307 if self._parentwriters == 0:
307 if self._parentwriters == 0:
308 raise ValueError("cannot set dirstate parent without "
308 raise ValueError("cannot set dirstate parent without "
309 "calling dirstate.beginparentchange")
309 "calling dirstate.beginparentchange")
310
310
311 self._dirty = True
311 self._dirty = True
312 oldp2 = self._pl[1]
312 oldp2 = self._pl[1]
313 if self._origpl is None:
313 if self._origpl is None:
314 self._origpl = self._pl
314 self._origpl = self._pl
315 self._map.setparents(p1, p2)
315 self._map.setparents(p1, p2)
316 copies = {}
316 copies = {}
317 if oldp2 != nullid and p2 == nullid:
317 if oldp2 != nullid and p2 == nullid:
318 candidatefiles = self._map.nonnormalset.union(
318 candidatefiles = self._map.nonnormalset.union(
319 self._map.otherparentset)
319 self._map.otherparentset)
320 for f in candidatefiles:
320 for f in candidatefiles:
321 s = self._map.get(f)
321 s = self._map.get(f)
322 if s is None:
322 if s is None:
323 continue
323 continue
324
324
325 # Discard 'm' markers when moving away from a merge state
325 # Discard 'm' markers when moving away from a merge state
326 if s[0] == 'm':
326 if s[0] == 'm':
327 source = self._map.copymap.get(f)
327 source = self._map.copymap.get(f)
328 if source:
328 if source:
329 copies[f] = source
329 copies[f] = source
330 self.normallookup(f)
330 self.normallookup(f)
331 # Also fix up otherparent markers
331 # Also fix up otherparent markers
332 elif s[0] == 'n' and s[2] == -2:
332 elif s[0] == 'n' and s[2] == -2:
333 source = self._map.copymap.get(f)
333 source = self._map.copymap.get(f)
334 if source:
334 if source:
335 copies[f] = source
335 copies[f] = source
336 self.add(f)
336 self.add(f)
337 return copies
337 return copies
338
338
339 def setbranch(self, branch):
339 def setbranch(self, branch):
340 self._branch = encoding.fromlocal(branch)
340 self._branch = encoding.fromlocal(branch)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 try:
342 try:
343 f.write(self._branch + '\n')
343 f.write(self._branch + '\n')
344 f.close()
344 f.close()
345
345
346 # make sure filecache has the correct stat info for _branch after
346 # make sure filecache has the correct stat info for _branch after
347 # replacing the underlying file
347 # replacing the underlying file
348 ce = self._filecache['_branch']
348 ce = self._filecache['_branch']
349 if ce:
349 if ce:
350 ce.refresh()
350 ce.refresh()
351 except: # re-raises
351 except: # re-raises
352 f.discard()
352 f.discard()
353 raise
353 raise
354
354
355 def invalidate(self):
355 def invalidate(self):
356 '''Causes the next access to reread the dirstate.
356 '''Causes the next access to reread the dirstate.
357
357
358 This is different from localrepo.invalidatedirstate() because it always
358 This is different from localrepo.invalidatedirstate() because it always
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 check whether the dirstate has changed before rereading it.'''
360 check whether the dirstate has changed before rereading it.'''
361
361
362 for a in ("_map", "_branch", "_ignore"):
362 for a in ("_map", "_branch", "_ignore"):
363 if a in self.__dict__:
363 if a in self.__dict__:
364 delattr(self, a)
364 delattr(self, a)
365 self._lastnormaltime = 0
365 self._lastnormaltime = 0
366 self._dirty = False
366 self._dirty = False
367 self._updatedfiles.clear()
367 self._updatedfiles.clear()
368 self._parentwriters = 0
368 self._parentwriters = 0
369 self._origpl = None
369 self._origpl = None
370
370
371 def copy(self, source, dest):
371 def copy(self, source, dest):
372 """Mark dest as a copy of source. Unmark dest if source is None."""
372 """Mark dest as a copy of source. Unmark dest if source is None."""
373 if source == dest:
373 if source == dest:
374 return
374 return
375 self._dirty = True
375 self._dirty = True
376 if source is not None:
376 if source is not None:
377 self._map.copymap[dest] = source
377 self._map.copymap[dest] = source
378 self._updatedfiles.add(source)
378 self._updatedfiles.add(source)
379 self._updatedfiles.add(dest)
379 self._updatedfiles.add(dest)
380 elif self._map.copymap.pop(dest, None):
380 elif self._map.copymap.pop(dest, None):
381 self._updatedfiles.add(dest)
381 self._updatedfiles.add(dest)
382
382
383 def copied(self, file):
383 def copied(self, file):
384 return self._map.copymap.get(file, None)
384 return self._map.copymap.get(file, None)
385
385
386 def copies(self):
386 def copies(self):
387 return self._map.copymap
387 return self._map.copymap
388
388
389 def _droppath(self, f):
389 def _droppath(self, f):
390 if "filefoldmap" in self._map.__dict__:
391 normed = util.normcase(f)
392 if normed in self._map.filefoldmap:
393 del self._map.filefoldmap[normed]
394
395 self._updatedfiles.add(f)
390 self._updatedfiles.add(f)
396
391
397 def _addpath(self, f, state, mode, size, mtime):
392 def _addpath(self, f, state, mode, size, mtime):
398 oldstate = self[f]
393 oldstate = self[f]
399 if state == 'a' or oldstate == 'r':
394 if state == 'a' or oldstate == 'r':
400 scmutil.checkfilename(f)
395 scmutil.checkfilename(f)
401 if f in self._map.dirs:
396 if f in self._map.dirs:
402 raise error.Abort(_('directory %r already in dirstate') % f)
397 raise error.Abort(_('directory %r already in dirstate') % f)
403 # shadows
398 # shadows
404 for d in util.finddirs(f):
399 for d in util.finddirs(f):
405 if d in self._map.dirs:
400 if d in self._map.dirs:
406 break
401 break
407 entry = self._map.get(d)
402 entry = self._map.get(d)
408 if entry is not None and entry[0] != 'r':
403 if entry is not None and entry[0] != 'r':
409 raise error.Abort(
404 raise error.Abort(
410 _('file %r in dirstate clashes with %r') % (d, f))
405 _('file %r in dirstate clashes with %r') % (d, f))
411 self._dirty = True
406 self._dirty = True
412 self._updatedfiles.add(f)
407 self._updatedfiles.add(f)
413 self._map.addfile(f, oldstate, state, mode, size, mtime)
408 self._map.addfile(f, oldstate, state, mode, size, mtime)
414
409
415 def normal(self, f):
410 def normal(self, f):
416 '''Mark a file normal and clean.'''
411 '''Mark a file normal and clean.'''
417 s = os.lstat(self._join(f))
412 s = os.lstat(self._join(f))
418 mtime = s.st_mtime
413 mtime = s.st_mtime
419 self._addpath(f, 'n', s.st_mode,
414 self._addpath(f, 'n', s.st_mode,
420 s.st_size & _rangemask, mtime & _rangemask)
415 s.st_size & _rangemask, mtime & _rangemask)
421 self._map.copymap.pop(f, None)
416 self._map.copymap.pop(f, None)
422 if f in self._map.nonnormalset:
417 if f in self._map.nonnormalset:
423 self._map.nonnormalset.remove(f)
418 self._map.nonnormalset.remove(f)
424 if mtime > self._lastnormaltime:
419 if mtime > self._lastnormaltime:
425 # Remember the most recent modification timeslot for status(),
420 # Remember the most recent modification timeslot for status(),
426 # to make sure we won't miss future size-preserving file content
421 # to make sure we won't miss future size-preserving file content
427 # modifications that happen within the same timeslot.
422 # modifications that happen within the same timeslot.
428 self._lastnormaltime = mtime
423 self._lastnormaltime = mtime
429
424
430 def normallookup(self, f):
425 def normallookup(self, f):
431 '''Mark a file normal, but possibly dirty.'''
426 '''Mark a file normal, but possibly dirty.'''
432 if self._pl[1] != nullid:
427 if self._pl[1] != nullid:
433 # if there is a merge going on and the file was either
428 # if there is a merge going on and the file was either
434 # in state 'm' (-1) or coming from other parent (-2) before
429 # in state 'm' (-1) or coming from other parent (-2) before
435 # being removed, restore that state.
430 # being removed, restore that state.
436 entry = self._map.get(f)
431 entry = self._map.get(f)
437 if entry is not None:
432 if entry is not None:
438 if entry[0] == 'r' and entry[2] in (-1, -2):
433 if entry[0] == 'r' and entry[2] in (-1, -2):
439 source = self._map.copymap.get(f)
434 source = self._map.copymap.get(f)
440 if entry[2] == -1:
435 if entry[2] == -1:
441 self.merge(f)
436 self.merge(f)
442 elif entry[2] == -2:
437 elif entry[2] == -2:
443 self.otherparent(f)
438 self.otherparent(f)
444 if source:
439 if source:
445 self.copy(source, f)
440 self.copy(source, f)
446 return
441 return
447 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
442 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
448 return
443 return
449 self._addpath(f, 'n', 0, -1, -1)
444 self._addpath(f, 'n', 0, -1, -1)
450 self._map.copymap.pop(f, None)
445 self._map.copymap.pop(f, None)
451
446
452 def otherparent(self, f):
447 def otherparent(self, f):
453 '''Mark as coming from the other parent, always dirty.'''
448 '''Mark as coming from the other parent, always dirty.'''
454 if self._pl[1] == nullid:
449 if self._pl[1] == nullid:
455 raise error.Abort(_("setting %r to other parent "
450 raise error.Abort(_("setting %r to other parent "
456 "only allowed in merges") % f)
451 "only allowed in merges") % f)
457 if f in self and self[f] == 'n':
452 if f in self and self[f] == 'n':
458 # merge-like
453 # merge-like
459 self._addpath(f, 'm', 0, -2, -1)
454 self._addpath(f, 'm', 0, -2, -1)
460 else:
455 else:
461 # add-like
456 # add-like
462 self._addpath(f, 'n', 0, -2, -1)
457 self._addpath(f, 'n', 0, -2, -1)
463 self._map.copymap.pop(f, None)
458 self._map.copymap.pop(f, None)
464
459
465 def add(self, f):
460 def add(self, f):
466 '''Mark a file added.'''
461 '''Mark a file added.'''
467 self._addpath(f, 'a', 0, -1, -1)
462 self._addpath(f, 'a', 0, -1, -1)
468 self._map.copymap.pop(f, None)
463 self._map.copymap.pop(f, None)
469
464
470 def remove(self, f):
465 def remove(self, f):
471 '''Mark a file removed.'''
466 '''Mark a file removed.'''
472 self._dirty = True
467 self._dirty = True
473 self._droppath(f)
468 self._droppath(f)
474 oldstate = self[f]
469 oldstate = self[f]
475 size = 0
470 size = 0
476 if self._pl[1] != nullid:
471 if self._pl[1] != nullid:
477 entry = self._map.get(f)
472 entry = self._map.get(f)
478 if entry is not None:
473 if entry is not None:
479 # backup the previous state
474 # backup the previous state
480 if entry[0] == 'm': # merge
475 if entry[0] == 'm': # merge
481 size = -1
476 size = -1
482 elif entry[0] == 'n' and entry[2] == -2: # other parent
477 elif entry[0] == 'n' and entry[2] == -2: # other parent
483 size = -2
478 size = -2
484 self._map.otherparentset.add(f)
479 self._map.otherparentset.add(f)
485 self._map.removefile(f, oldstate, size)
480 self._map.removefile(f, oldstate, size)
486 if size == 0:
481 if size == 0:
487 self._map.copymap.pop(f, None)
482 self._map.copymap.pop(f, None)
488
483
489 def merge(self, f):
484 def merge(self, f):
490 '''Mark a file merged.'''
485 '''Mark a file merged.'''
491 if self._pl[1] == nullid:
486 if self._pl[1] == nullid:
492 return self.normallookup(f)
487 return self.normallookup(f)
493 return self.otherparent(f)
488 return self.otherparent(f)
494
489
495 def drop(self, f):
490 def drop(self, f):
496 '''Drop a file from the dirstate'''
491 '''Drop a file from the dirstate'''
497 oldstate = self[f]
492 oldstate = self[f]
498 if self._map.dropfile(f, oldstate):
493 if self._map.dropfile(f, oldstate):
499 self._dirty = True
494 self._dirty = True
500 self._droppath(f)
495 self._droppath(f)
501 self._map.copymap.pop(f, None)
496 self._map.copymap.pop(f, None)
502
497
503 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
498 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
504 if exists is None:
499 if exists is None:
505 exists = os.path.lexists(os.path.join(self._root, path))
500 exists = os.path.lexists(os.path.join(self._root, path))
506 if not exists:
501 if not exists:
507 # Maybe a path component exists
502 # Maybe a path component exists
508 if not ignoremissing and '/' in path:
503 if not ignoremissing and '/' in path:
509 d, f = path.rsplit('/', 1)
504 d, f = path.rsplit('/', 1)
510 d = self._normalize(d, False, ignoremissing, None)
505 d = self._normalize(d, False, ignoremissing, None)
511 folded = d + "/" + f
506 folded = d + "/" + f
512 else:
507 else:
513 # No path components, preserve original case
508 # No path components, preserve original case
514 folded = path
509 folded = path
515 else:
510 else:
516 # recursively normalize leading directory components
511 # recursively normalize leading directory components
517 # against dirstate
512 # against dirstate
518 if '/' in normed:
513 if '/' in normed:
519 d, f = normed.rsplit('/', 1)
514 d, f = normed.rsplit('/', 1)
520 d = self._normalize(d, False, ignoremissing, True)
515 d = self._normalize(d, False, ignoremissing, True)
521 r = self._root + "/" + d
516 r = self._root + "/" + d
522 folded = d + "/" + util.fspath(f, r)
517 folded = d + "/" + util.fspath(f, r)
523 else:
518 else:
524 folded = util.fspath(normed, self._root)
519 folded = util.fspath(normed, self._root)
525 storemap[normed] = folded
520 storemap[normed] = folded
526
521
527 return folded
522 return folded
528
523
529 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
524 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
530 normed = util.normcase(path)
525 normed = util.normcase(path)
531 folded = self._map.filefoldmap.get(normed, None)
526 folded = self._map.filefoldmap.get(normed, None)
532 if folded is None:
527 if folded is None:
533 if isknown:
528 if isknown:
534 folded = path
529 folded = path
535 else:
530 else:
536 folded = self._discoverpath(path, normed, ignoremissing, exists,
531 folded = self._discoverpath(path, normed, ignoremissing, exists,
537 self._map.filefoldmap)
532 self._map.filefoldmap)
538 return folded
533 return folded
539
534
540 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
535 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
541 normed = util.normcase(path)
536 normed = util.normcase(path)
542 folded = self._map.filefoldmap.get(normed, None)
537 folded = self._map.filefoldmap.get(normed, None)
543 if folded is None:
538 if folded is None:
544 folded = self._map.dirfoldmap.get(normed, None)
539 folded = self._map.dirfoldmap.get(normed, None)
545 if folded is None:
540 if folded is None:
546 if isknown:
541 if isknown:
547 folded = path
542 folded = path
548 else:
543 else:
549 # store discovered result in dirfoldmap so that future
544 # store discovered result in dirfoldmap so that future
550 # normalizefile calls don't start matching directories
545 # normalizefile calls don't start matching directories
551 folded = self._discoverpath(path, normed, ignoremissing, exists,
546 folded = self._discoverpath(path, normed, ignoremissing, exists,
552 self._map.dirfoldmap)
547 self._map.dirfoldmap)
553 return folded
548 return folded
554
549
555 def normalize(self, path, isknown=False, ignoremissing=False):
550 def normalize(self, path, isknown=False, ignoremissing=False):
556 '''
551 '''
557 normalize the case of a pathname when on a casefolding filesystem
552 normalize the case of a pathname when on a casefolding filesystem
558
553
559 isknown specifies whether the filename came from walking the
554 isknown specifies whether the filename came from walking the
560 disk, to avoid extra filesystem access.
555 disk, to avoid extra filesystem access.
561
556
562 If ignoremissing is True, missing path are returned
557 If ignoremissing is True, missing path are returned
563 unchanged. Otherwise, we try harder to normalize possibly
558 unchanged. Otherwise, we try harder to normalize possibly
564 existing path components.
559 existing path components.
565
560
566 The normalized case is determined based on the following precedence:
561 The normalized case is determined based on the following precedence:
567
562
568 - version of name already stored in the dirstate
563 - version of name already stored in the dirstate
569 - version of name stored on disk
564 - version of name stored on disk
570 - version provided via command arguments
565 - version provided via command arguments
571 '''
566 '''
572
567
573 if self._checkcase:
568 if self._checkcase:
574 return self._normalize(path, isknown, ignoremissing)
569 return self._normalize(path, isknown, ignoremissing)
575 return path
570 return path
576
571
577 def clear(self):
572 def clear(self):
578 self._map.clear()
573 self._map.clear()
579 self._lastnormaltime = 0
574 self._lastnormaltime = 0
580 self._updatedfiles.clear()
575 self._updatedfiles.clear()
581 self._dirty = True
576 self._dirty = True
582
577
583 def rebuild(self, parent, allfiles, changedfiles=None):
578 def rebuild(self, parent, allfiles, changedfiles=None):
584 if changedfiles is None:
579 if changedfiles is None:
585 # Rebuild entire dirstate
580 # Rebuild entire dirstate
586 changedfiles = allfiles
581 changedfiles = allfiles
587 lastnormaltime = self._lastnormaltime
582 lastnormaltime = self._lastnormaltime
588 self.clear()
583 self.clear()
589 self._lastnormaltime = lastnormaltime
584 self._lastnormaltime = lastnormaltime
590
585
591 if self._origpl is None:
586 if self._origpl is None:
592 self._origpl = self._pl
587 self._origpl = self._pl
593 self._map.setparents(parent, nullid)
588 self._map.setparents(parent, nullid)
594 for f in changedfiles:
589 for f in changedfiles:
595 if f in allfiles:
590 if f in allfiles:
596 self.normallookup(f)
591 self.normallookup(f)
597 else:
592 else:
598 self.drop(f)
593 self.drop(f)
599
594
600 self._dirty = True
595 self._dirty = True
601
596
602 def identity(self):
597 def identity(self):
603 '''Return identity of dirstate itself to detect changing in storage
598 '''Return identity of dirstate itself to detect changing in storage
604
599
605 If identity of previous dirstate is equal to this, writing
600 If identity of previous dirstate is equal to this, writing
606 changes based on the former dirstate out can keep consistency.
601 changes based on the former dirstate out can keep consistency.
607 '''
602 '''
608 return self._map.identity
603 return self._map.identity
609
604
610 def write(self, tr):
605 def write(self, tr):
611 if not self._dirty:
606 if not self._dirty:
612 return
607 return
613
608
614 filename = self._filename
609 filename = self._filename
615 if tr:
610 if tr:
616 # 'dirstate.write()' is not only for writing in-memory
611 # 'dirstate.write()' is not only for writing in-memory
617 # changes out, but also for dropping ambiguous timestamp.
612 # changes out, but also for dropping ambiguous timestamp.
618 # delayed writing re-raise "ambiguous timestamp issue".
613 # delayed writing re-raise "ambiguous timestamp issue".
619 # See also the wiki page below for detail:
614 # See also the wiki page below for detail:
620 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
615 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
621
616
622 # emulate dropping timestamp in 'parsers.pack_dirstate'
617 # emulate dropping timestamp in 'parsers.pack_dirstate'
623 now = _getfsnow(self._opener)
618 now = _getfsnow(self._opener)
624 self._map.clearambiguoustimes(self._updatedfiles, now)
619 self._map.clearambiguoustimes(self._updatedfiles, now)
625
620
626 # emulate that all 'dirstate.normal' results are written out
621 # emulate that all 'dirstate.normal' results are written out
627 self._lastnormaltime = 0
622 self._lastnormaltime = 0
628 self._updatedfiles.clear()
623 self._updatedfiles.clear()
629
624
630 # delay writing in-memory changes out
625 # delay writing in-memory changes out
631 tr.addfilegenerator('dirstate', (self._filename,),
626 tr.addfilegenerator('dirstate', (self._filename,),
632 self._writedirstate, location='plain')
627 self._writedirstate, location='plain')
633 return
628 return
634
629
635 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
630 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
636 self._writedirstate(st)
631 self._writedirstate(st)
637
632
638 def addparentchangecallback(self, category, callback):
633 def addparentchangecallback(self, category, callback):
639 """add a callback to be called when the wd parents are changed
634 """add a callback to be called when the wd parents are changed
640
635
641 Callback will be called with the following arguments:
636 Callback will be called with the following arguments:
642 dirstate, (oldp1, oldp2), (newp1, newp2)
637 dirstate, (oldp1, oldp2), (newp1, newp2)
643
638
644 Category is a unique identifier to allow overwriting an old callback
639 Category is a unique identifier to allow overwriting an old callback
645 with a newer callback.
640 with a newer callback.
646 """
641 """
647 self._plchangecallbacks[category] = callback
642 self._plchangecallbacks[category] = callback
648
643
649 def _writedirstate(self, st):
644 def _writedirstate(self, st):
650 # notify callbacks about parents change
645 # notify callbacks about parents change
651 if self._origpl is not None and self._origpl != self._pl:
646 if self._origpl is not None and self._origpl != self._pl:
652 for c, callback in sorted(self._plchangecallbacks.iteritems()):
647 for c, callback in sorted(self._plchangecallbacks.iteritems()):
653 callback(self, self._origpl, self._pl)
648 callback(self, self._origpl, self._pl)
654 self._origpl = None
649 self._origpl = None
655 # use the modification time of the newly created temporary file as the
650 # use the modification time of the newly created temporary file as the
656 # filesystem's notion of 'now'
651 # filesystem's notion of 'now'
657 now = util.fstat(st).st_mtime & _rangemask
652 now = util.fstat(st).st_mtime & _rangemask
658
653
659 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
654 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
660 # timestamp of each entries in dirstate, because of 'now > mtime'
655 # timestamp of each entries in dirstate, because of 'now > mtime'
661 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
656 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
662 if delaywrite > 0:
657 if delaywrite > 0:
663 # do we have any files to delay for?
658 # do we have any files to delay for?
664 for f, e in self._map.iteritems():
659 for f, e in self._map.iteritems():
665 if e[0] == 'n' and e[3] == now:
660 if e[0] == 'n' and e[3] == now:
666 import time # to avoid useless import
661 import time # to avoid useless import
667 # rather than sleep n seconds, sleep until the next
662 # rather than sleep n seconds, sleep until the next
668 # multiple of n seconds
663 # multiple of n seconds
669 clock = time.time()
664 clock = time.time()
670 start = int(clock) - (int(clock) % delaywrite)
665 start = int(clock) - (int(clock) % delaywrite)
671 end = start + delaywrite
666 end = start + delaywrite
672 time.sleep(end - clock)
667 time.sleep(end - clock)
673 now = end # trust our estimate that the end is near now
668 now = end # trust our estimate that the end is near now
674 break
669 break
675
670
676 self._map.write(st, now)
671 self._map.write(st, now)
677 self._lastnormaltime = 0
672 self._lastnormaltime = 0
678 self._dirty = False
673 self._dirty = False
679
674
680 def _dirignore(self, f):
675 def _dirignore(self, f):
681 if f == '.':
676 if f == '.':
682 return False
677 return False
683 if self._ignore(f):
678 if self._ignore(f):
684 return True
679 return True
685 for p in util.finddirs(f):
680 for p in util.finddirs(f):
686 if self._ignore(p):
681 if self._ignore(p):
687 return True
682 return True
688 return False
683 return False
689
684
690 def _ignorefiles(self):
685 def _ignorefiles(self):
691 files = []
686 files = []
692 if os.path.exists(self._join('.hgignore')):
687 if os.path.exists(self._join('.hgignore')):
693 files.append(self._join('.hgignore'))
688 files.append(self._join('.hgignore'))
694 for name, path in self._ui.configitems("ui"):
689 for name, path in self._ui.configitems("ui"):
695 if name == 'ignore' or name.startswith('ignore.'):
690 if name == 'ignore' or name.startswith('ignore.'):
696 # we need to use os.path.join here rather than self._join
691 # we need to use os.path.join here rather than self._join
697 # because path is arbitrary and user-specified
692 # because path is arbitrary and user-specified
698 files.append(os.path.join(self._rootdir, util.expandpath(path)))
693 files.append(os.path.join(self._rootdir, util.expandpath(path)))
699 return files
694 return files
700
695
701 def _ignorefileandline(self, f):
696 def _ignorefileandline(self, f):
702 files = collections.deque(self._ignorefiles())
697 files = collections.deque(self._ignorefiles())
703 visited = set()
698 visited = set()
704 while files:
699 while files:
705 i = files.popleft()
700 i = files.popleft()
706 patterns = matchmod.readpatternfile(i, self._ui.warn,
701 patterns = matchmod.readpatternfile(i, self._ui.warn,
707 sourceinfo=True)
702 sourceinfo=True)
708 for pattern, lineno, line in patterns:
703 for pattern, lineno, line in patterns:
709 kind, p = matchmod._patsplit(pattern, 'glob')
704 kind, p = matchmod._patsplit(pattern, 'glob')
710 if kind == "subinclude":
705 if kind == "subinclude":
711 if p not in visited:
706 if p not in visited:
712 files.append(p)
707 files.append(p)
713 continue
708 continue
714 m = matchmod.match(self._root, '', [], [pattern],
709 m = matchmod.match(self._root, '', [], [pattern],
715 warn=self._ui.warn)
710 warn=self._ui.warn)
716 if m(f):
711 if m(f):
717 return (i, lineno, line)
712 return (i, lineno, line)
718 visited.add(i)
713 visited.add(i)
719 return (None, -1, "")
714 return (None, -1, "")
720
715
721 def _walkexplicit(self, match, subrepos):
716 def _walkexplicit(self, match, subrepos):
722 '''Get stat data about the files explicitly specified by match.
717 '''Get stat data about the files explicitly specified by match.
723
718
724 Return a triple (results, dirsfound, dirsnotfound).
719 Return a triple (results, dirsfound, dirsnotfound).
725 - results is a mapping from filename to stat result. It also contains
720 - results is a mapping from filename to stat result. It also contains
726 listings mapping subrepos and .hg to None.
721 listings mapping subrepos and .hg to None.
727 - dirsfound is a list of files found to be directories.
722 - dirsfound is a list of files found to be directories.
728 - dirsnotfound is a list of files that the dirstate thinks are
723 - dirsnotfound is a list of files that the dirstate thinks are
729 directories and that were not found.'''
724 directories and that were not found.'''
730
725
731 def badtype(mode):
726 def badtype(mode):
732 kind = _('unknown')
727 kind = _('unknown')
733 if stat.S_ISCHR(mode):
728 if stat.S_ISCHR(mode):
734 kind = _('character device')
729 kind = _('character device')
735 elif stat.S_ISBLK(mode):
730 elif stat.S_ISBLK(mode):
736 kind = _('block device')
731 kind = _('block device')
737 elif stat.S_ISFIFO(mode):
732 elif stat.S_ISFIFO(mode):
738 kind = _('fifo')
733 kind = _('fifo')
739 elif stat.S_ISSOCK(mode):
734 elif stat.S_ISSOCK(mode):
740 kind = _('socket')
735 kind = _('socket')
741 elif stat.S_ISDIR(mode):
736 elif stat.S_ISDIR(mode):
742 kind = _('directory')
737 kind = _('directory')
743 return _('unsupported file type (type is %s)') % kind
738 return _('unsupported file type (type is %s)') % kind
744
739
745 matchedir = match.explicitdir
740 matchedir = match.explicitdir
746 badfn = match.bad
741 badfn = match.bad
747 dmap = self._map
742 dmap = self._map
748 lstat = os.lstat
743 lstat = os.lstat
749 getkind = stat.S_IFMT
744 getkind = stat.S_IFMT
750 dirkind = stat.S_IFDIR
745 dirkind = stat.S_IFDIR
751 regkind = stat.S_IFREG
746 regkind = stat.S_IFREG
752 lnkkind = stat.S_IFLNK
747 lnkkind = stat.S_IFLNK
753 join = self._join
748 join = self._join
754 dirsfound = []
749 dirsfound = []
755 foundadd = dirsfound.append
750 foundadd = dirsfound.append
756 dirsnotfound = []
751 dirsnotfound = []
757 notfoundadd = dirsnotfound.append
752 notfoundadd = dirsnotfound.append
758
753
759 if not match.isexact() and self._checkcase:
754 if not match.isexact() and self._checkcase:
760 normalize = self._normalize
755 normalize = self._normalize
761 else:
756 else:
762 normalize = None
757 normalize = None
763
758
764 files = sorted(match.files())
759 files = sorted(match.files())
765 subrepos.sort()
760 subrepos.sort()
766 i, j = 0, 0
761 i, j = 0, 0
767 while i < len(files) and j < len(subrepos):
762 while i < len(files) and j < len(subrepos):
768 subpath = subrepos[j] + "/"
763 subpath = subrepos[j] + "/"
769 if files[i] < subpath:
764 if files[i] < subpath:
770 i += 1
765 i += 1
771 continue
766 continue
772 while i < len(files) and files[i].startswith(subpath):
767 while i < len(files) and files[i].startswith(subpath):
773 del files[i]
768 del files[i]
774 j += 1
769 j += 1
775
770
776 if not files or '.' in files:
771 if not files or '.' in files:
777 files = ['.']
772 files = ['.']
778 results = dict.fromkeys(subrepos)
773 results = dict.fromkeys(subrepos)
779 results['.hg'] = None
774 results['.hg'] = None
780
775
781 alldirs = None
776 alldirs = None
782 for ff in files:
777 for ff in files:
783 # constructing the foldmap is expensive, so don't do it for the
778 # constructing the foldmap is expensive, so don't do it for the
784 # common case where files is ['.']
779 # common case where files is ['.']
785 if normalize and ff != '.':
780 if normalize and ff != '.':
786 nf = normalize(ff, False, True)
781 nf = normalize(ff, False, True)
787 else:
782 else:
788 nf = ff
783 nf = ff
789 if nf in results:
784 if nf in results:
790 continue
785 continue
791
786
792 try:
787 try:
793 st = lstat(join(nf))
788 st = lstat(join(nf))
794 kind = getkind(st.st_mode)
789 kind = getkind(st.st_mode)
795 if kind == dirkind:
790 if kind == dirkind:
796 if nf in dmap:
791 if nf in dmap:
797 # file replaced by dir on disk but still in dirstate
792 # file replaced by dir on disk but still in dirstate
798 results[nf] = None
793 results[nf] = None
799 if matchedir:
794 if matchedir:
800 matchedir(nf)
795 matchedir(nf)
801 foundadd((nf, ff))
796 foundadd((nf, ff))
802 elif kind == regkind or kind == lnkkind:
797 elif kind == regkind or kind == lnkkind:
803 results[nf] = st
798 results[nf] = st
804 else:
799 else:
805 badfn(ff, badtype(kind))
800 badfn(ff, badtype(kind))
806 if nf in dmap:
801 if nf in dmap:
807 results[nf] = None
802 results[nf] = None
808 except OSError as inst: # nf not found on disk - it is dirstate only
803 except OSError as inst: # nf not found on disk - it is dirstate only
809 if nf in dmap: # does it exactly match a missing file?
804 if nf in dmap: # does it exactly match a missing file?
810 results[nf] = None
805 results[nf] = None
811 else: # does it match a missing directory?
806 else: # does it match a missing directory?
812 if alldirs is None:
807 if alldirs is None:
813 alldirs = util.dirs(dmap._map)
808 alldirs = util.dirs(dmap._map)
814 if nf in alldirs:
809 if nf in alldirs:
815 if matchedir:
810 if matchedir:
816 matchedir(nf)
811 matchedir(nf)
817 notfoundadd(nf)
812 notfoundadd(nf)
818 else:
813 else:
819 badfn(ff, encoding.strtolocal(inst.strerror))
814 badfn(ff, encoding.strtolocal(inst.strerror))
820
815
821 # Case insensitive filesystems cannot rely on lstat() failing to detect
816 # Case insensitive filesystems cannot rely on lstat() failing to detect
822 # a case-only rename. Prune the stat object for any file that does not
817 # a case-only rename. Prune the stat object for any file that does not
823 # match the case in the filesystem, if there are multiple files that
818 # match the case in the filesystem, if there are multiple files that
824 # normalize to the same path.
819 # normalize to the same path.
825 if match.isexact() and self._checkcase:
820 if match.isexact() and self._checkcase:
826 normed = {}
821 normed = {}
827
822
828 for f, st in results.iteritems():
823 for f, st in results.iteritems():
829 if st is None:
824 if st is None:
830 continue
825 continue
831
826
832 nc = util.normcase(f)
827 nc = util.normcase(f)
833 paths = normed.get(nc)
828 paths = normed.get(nc)
834
829
835 if paths is None:
830 if paths is None:
836 paths = set()
831 paths = set()
837 normed[nc] = paths
832 normed[nc] = paths
838
833
839 paths.add(f)
834 paths.add(f)
840
835
841 for norm, paths in normed.iteritems():
836 for norm, paths in normed.iteritems():
842 if len(paths) > 1:
837 if len(paths) > 1:
843 for path in paths:
838 for path in paths:
844 folded = self._discoverpath(path, norm, True, None,
839 folded = self._discoverpath(path, norm, True, None,
845 self._map.dirfoldmap)
840 self._map.dirfoldmap)
846 if path != folded:
841 if path != folded:
847 results[path] = None
842 results[path] = None
848
843
849 return results, dirsfound, dirsnotfound
844 return results, dirsfound, dirsnotfound
850
845
851 def walk(self, match, subrepos, unknown, ignored, full=True):
846 def walk(self, match, subrepos, unknown, ignored, full=True):
852 '''
847 '''
853 Walk recursively through the directory tree, finding all files
848 Walk recursively through the directory tree, finding all files
854 matched by match.
849 matched by match.
855
850
856 If full is False, maybe skip some known-clean files.
851 If full is False, maybe skip some known-clean files.
857
852
858 Return a dict mapping filename to stat-like object (either
853 Return a dict mapping filename to stat-like object (either
859 mercurial.osutil.stat instance or return value of os.stat()).
854 mercurial.osutil.stat instance or return value of os.stat()).
860
855
861 '''
856 '''
862 # full is a flag that extensions that hook into walk can use -- this
857 # full is a flag that extensions that hook into walk can use -- this
863 # implementation doesn't use it at all. This satisfies the contract
858 # implementation doesn't use it at all. This satisfies the contract
864 # because we only guarantee a "maybe".
859 # because we only guarantee a "maybe".
865
860
866 if ignored:
861 if ignored:
867 ignore = util.never
862 ignore = util.never
868 dirignore = util.never
863 dirignore = util.never
869 elif unknown:
864 elif unknown:
870 ignore = self._ignore
865 ignore = self._ignore
871 dirignore = self._dirignore
866 dirignore = self._dirignore
872 else:
867 else:
873 # if not unknown and not ignored, drop dir recursion and step 2
868 # if not unknown and not ignored, drop dir recursion and step 2
874 ignore = util.always
869 ignore = util.always
875 dirignore = util.always
870 dirignore = util.always
876
871
877 matchfn = match.matchfn
872 matchfn = match.matchfn
878 matchalways = match.always()
873 matchalways = match.always()
879 matchtdir = match.traversedir
874 matchtdir = match.traversedir
880 dmap = self._map
875 dmap = self._map
881 listdir = util.listdir
876 listdir = util.listdir
882 lstat = os.lstat
877 lstat = os.lstat
883 dirkind = stat.S_IFDIR
878 dirkind = stat.S_IFDIR
884 regkind = stat.S_IFREG
879 regkind = stat.S_IFREG
885 lnkkind = stat.S_IFLNK
880 lnkkind = stat.S_IFLNK
886 join = self._join
881 join = self._join
887
882
888 exact = skipstep3 = False
883 exact = skipstep3 = False
889 if match.isexact(): # match.exact
884 if match.isexact(): # match.exact
890 exact = True
885 exact = True
891 dirignore = util.always # skip step 2
886 dirignore = util.always # skip step 2
892 elif match.prefix(): # match.match, no patterns
887 elif match.prefix(): # match.match, no patterns
893 skipstep3 = True
888 skipstep3 = True
894
889
895 if not exact and self._checkcase:
890 if not exact and self._checkcase:
896 normalize = self._normalize
891 normalize = self._normalize
897 normalizefile = self._normalizefile
892 normalizefile = self._normalizefile
898 skipstep3 = False
893 skipstep3 = False
899 else:
894 else:
900 normalize = self._normalize
895 normalize = self._normalize
901 normalizefile = None
896 normalizefile = None
902
897
903 # step 1: find all explicit files
898 # step 1: find all explicit files
904 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
899 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
905
900
906 skipstep3 = skipstep3 and not (work or dirsnotfound)
901 skipstep3 = skipstep3 and not (work or dirsnotfound)
907 work = [d for d in work if not dirignore(d[0])]
902 work = [d for d in work if not dirignore(d[0])]
908
903
909 # step 2: visit subdirectories
904 # step 2: visit subdirectories
910 def traverse(work, alreadynormed):
905 def traverse(work, alreadynormed):
911 wadd = work.append
906 wadd = work.append
912 while work:
907 while work:
913 nd = work.pop()
908 nd = work.pop()
914 if not match.visitdir(nd):
909 if not match.visitdir(nd):
915 continue
910 continue
916 skip = None
911 skip = None
917 if nd == '.':
912 if nd == '.':
918 nd = ''
913 nd = ''
919 else:
914 else:
920 skip = '.hg'
915 skip = '.hg'
921 try:
916 try:
922 entries = listdir(join(nd), stat=True, skip=skip)
917 entries = listdir(join(nd), stat=True, skip=skip)
923 except OSError as inst:
918 except OSError as inst:
924 if inst.errno in (errno.EACCES, errno.ENOENT):
919 if inst.errno in (errno.EACCES, errno.ENOENT):
925 match.bad(self.pathto(nd),
920 match.bad(self.pathto(nd),
926 encoding.strtolocal(inst.strerror))
921 encoding.strtolocal(inst.strerror))
927 continue
922 continue
928 raise
923 raise
929 for f, kind, st in entries:
924 for f, kind, st in entries:
930 if normalizefile:
925 if normalizefile:
931 # even though f might be a directory, we're only
926 # even though f might be a directory, we're only
932 # interested in comparing it to files currently in the
927 # interested in comparing it to files currently in the
933 # dmap -- therefore normalizefile is enough
928 # dmap -- therefore normalizefile is enough
934 nf = normalizefile(nd and (nd + "/" + f) or f, True,
929 nf = normalizefile(nd and (nd + "/" + f) or f, True,
935 True)
930 True)
936 else:
931 else:
937 nf = nd and (nd + "/" + f) or f
932 nf = nd and (nd + "/" + f) or f
938 if nf not in results:
933 if nf not in results:
939 if kind == dirkind:
934 if kind == dirkind:
940 if not ignore(nf):
935 if not ignore(nf):
941 if matchtdir:
936 if matchtdir:
942 matchtdir(nf)
937 matchtdir(nf)
943 wadd(nf)
938 wadd(nf)
944 if nf in dmap and (matchalways or matchfn(nf)):
939 if nf in dmap and (matchalways or matchfn(nf)):
945 results[nf] = None
940 results[nf] = None
946 elif kind == regkind or kind == lnkkind:
941 elif kind == regkind or kind == lnkkind:
947 if nf in dmap:
942 if nf in dmap:
948 if matchalways or matchfn(nf):
943 if matchalways or matchfn(nf):
949 results[nf] = st
944 results[nf] = st
950 elif ((matchalways or matchfn(nf))
945 elif ((matchalways or matchfn(nf))
951 and not ignore(nf)):
946 and not ignore(nf)):
952 # unknown file -- normalize if necessary
947 # unknown file -- normalize if necessary
953 if not alreadynormed:
948 if not alreadynormed:
954 nf = normalize(nf, False, True)
949 nf = normalize(nf, False, True)
955 results[nf] = st
950 results[nf] = st
956 elif nf in dmap and (matchalways or matchfn(nf)):
951 elif nf in dmap and (matchalways or matchfn(nf)):
957 results[nf] = None
952 results[nf] = None
958
953
959 for nd, d in work:
954 for nd, d in work:
960 # alreadynormed means that processwork doesn't have to do any
955 # alreadynormed means that processwork doesn't have to do any
961 # expensive directory normalization
956 # expensive directory normalization
962 alreadynormed = not normalize or nd == d
957 alreadynormed = not normalize or nd == d
963 traverse([d], alreadynormed)
958 traverse([d], alreadynormed)
964
959
965 for s in subrepos:
960 for s in subrepos:
966 del results[s]
961 del results[s]
967 del results['.hg']
962 del results['.hg']
968
963
969 # step 3: visit remaining files from dmap
964 # step 3: visit remaining files from dmap
970 if not skipstep3 and not exact:
965 if not skipstep3 and not exact:
971 # If a dmap file is not in results yet, it was either
966 # If a dmap file is not in results yet, it was either
972 # a) not matching matchfn b) ignored, c) missing, or d) under a
967 # a) not matching matchfn b) ignored, c) missing, or d) under a
973 # symlink directory.
968 # symlink directory.
974 if not results and matchalways:
969 if not results and matchalways:
975 visit = [f for f in dmap]
970 visit = [f for f in dmap]
976 else:
971 else:
977 visit = [f for f in dmap if f not in results and matchfn(f)]
972 visit = [f for f in dmap if f not in results and matchfn(f)]
978 visit.sort()
973 visit.sort()
979
974
980 if unknown:
975 if unknown:
981 # unknown == True means we walked all dirs under the roots
976 # unknown == True means we walked all dirs under the roots
982 # that wasn't ignored, and everything that matched was stat'ed
977 # that wasn't ignored, and everything that matched was stat'ed
983 # and is already in results.
978 # and is already in results.
984 # The rest must thus be ignored or under a symlink.
979 # The rest must thus be ignored or under a symlink.
985 audit_path = pathutil.pathauditor(self._root, cached=True)
980 audit_path = pathutil.pathauditor(self._root, cached=True)
986
981
987 for nf in iter(visit):
982 for nf in iter(visit):
988 # If a stat for the same file was already added with a
983 # If a stat for the same file was already added with a
989 # different case, don't add one for this, since that would
984 # different case, don't add one for this, since that would
990 # make it appear as if the file exists under both names
985 # make it appear as if the file exists under both names
991 # on disk.
986 # on disk.
992 if (normalizefile and
987 if (normalizefile and
993 normalizefile(nf, True, True) in results):
988 normalizefile(nf, True, True) in results):
994 results[nf] = None
989 results[nf] = None
995 # Report ignored items in the dmap as long as they are not
990 # Report ignored items in the dmap as long as they are not
996 # under a symlink directory.
991 # under a symlink directory.
997 elif audit_path.check(nf):
992 elif audit_path.check(nf):
998 try:
993 try:
999 results[nf] = lstat(join(nf))
994 results[nf] = lstat(join(nf))
1000 # file was just ignored, no links, and exists
995 # file was just ignored, no links, and exists
1001 except OSError:
996 except OSError:
1002 # file doesn't exist
997 # file doesn't exist
1003 results[nf] = None
998 results[nf] = None
1004 else:
999 else:
1005 # It's either missing or under a symlink directory
1000 # It's either missing or under a symlink directory
1006 # which we in this case report as missing
1001 # which we in this case report as missing
1007 results[nf] = None
1002 results[nf] = None
1008 else:
1003 else:
1009 # We may not have walked the full directory tree above,
1004 # We may not have walked the full directory tree above,
1010 # so stat and check everything we missed.
1005 # so stat and check everything we missed.
1011 iv = iter(visit)
1006 iv = iter(visit)
1012 for st in util.statfiles([join(i) for i in visit]):
1007 for st in util.statfiles([join(i) for i in visit]):
1013 results[next(iv)] = st
1008 results[next(iv)] = st
1014 return results
1009 return results
1015
1010
1016 def status(self, match, subrepos, ignored, clean, unknown):
1011 def status(self, match, subrepos, ignored, clean, unknown):
1017 '''Determine the status of the working copy relative to the
1012 '''Determine the status of the working copy relative to the
1018 dirstate and return a pair of (unsure, status), where status is of type
1013 dirstate and return a pair of (unsure, status), where status is of type
1019 scmutil.status and:
1014 scmutil.status and:
1020
1015
1021 unsure:
1016 unsure:
1022 files that might have been modified since the dirstate was
1017 files that might have been modified since the dirstate was
1023 written, but need to be read to be sure (size is the same
1018 written, but need to be read to be sure (size is the same
1024 but mtime differs)
1019 but mtime differs)
1025 status.modified:
1020 status.modified:
1026 files that have definitely been modified since the dirstate
1021 files that have definitely been modified since the dirstate
1027 was written (different size or mode)
1022 was written (different size or mode)
1028 status.clean:
1023 status.clean:
1029 files that have definitely not been modified since the
1024 files that have definitely not been modified since the
1030 dirstate was written
1025 dirstate was written
1031 '''
1026 '''
1032 listignored, listclean, listunknown = ignored, clean, unknown
1027 listignored, listclean, listunknown = ignored, clean, unknown
1033 lookup, modified, added, unknown, ignored = [], [], [], [], []
1028 lookup, modified, added, unknown, ignored = [], [], [], [], []
1034 removed, deleted, clean = [], [], []
1029 removed, deleted, clean = [], [], []
1035
1030
1036 dmap = self._map
1031 dmap = self._map
1037 dmap.preload()
1032 dmap.preload()
1038 dcontains = dmap.__contains__
1033 dcontains = dmap.__contains__
1039 dget = dmap.__getitem__
1034 dget = dmap.__getitem__
1040 ladd = lookup.append # aka "unsure"
1035 ladd = lookup.append # aka "unsure"
1041 madd = modified.append
1036 madd = modified.append
1042 aadd = added.append
1037 aadd = added.append
1043 uadd = unknown.append
1038 uadd = unknown.append
1044 iadd = ignored.append
1039 iadd = ignored.append
1045 radd = removed.append
1040 radd = removed.append
1046 dadd = deleted.append
1041 dadd = deleted.append
1047 cadd = clean.append
1042 cadd = clean.append
1048 mexact = match.exact
1043 mexact = match.exact
1049 dirignore = self._dirignore
1044 dirignore = self._dirignore
1050 checkexec = self._checkexec
1045 checkexec = self._checkexec
1051 copymap = self._map.copymap
1046 copymap = self._map.copymap
1052 lastnormaltime = self._lastnormaltime
1047 lastnormaltime = self._lastnormaltime
1053
1048
1054 # We need to do full walks when either
1049 # We need to do full walks when either
1055 # - we're listing all clean files, or
1050 # - we're listing all clean files, or
1056 # - match.traversedir does something, because match.traversedir should
1051 # - match.traversedir does something, because match.traversedir should
1057 # be called for every dir in the working dir
1052 # be called for every dir in the working dir
1058 full = listclean or match.traversedir is not None
1053 full = listclean or match.traversedir is not None
1059 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1054 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1060 full=full).iteritems():
1055 full=full).iteritems():
1061 if not dcontains(fn):
1056 if not dcontains(fn):
1062 if (listignored or mexact(fn)) and dirignore(fn):
1057 if (listignored or mexact(fn)) and dirignore(fn):
1063 if listignored:
1058 if listignored:
1064 iadd(fn)
1059 iadd(fn)
1065 else:
1060 else:
1066 uadd(fn)
1061 uadd(fn)
1067 continue
1062 continue
1068
1063
1069 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1064 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1070 # written like that for performance reasons. dmap[fn] is not a
1065 # written like that for performance reasons. dmap[fn] is not a
1071 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1066 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1072 # opcode has fast paths when the value to be unpacked is a tuple or
1067 # opcode has fast paths when the value to be unpacked is a tuple or
1073 # a list, but falls back to creating a full-fledged iterator in
1068 # a list, but falls back to creating a full-fledged iterator in
1074 # general. That is much slower than simply accessing and storing the
1069 # general. That is much slower than simply accessing and storing the
1075 # tuple members one by one.
1070 # tuple members one by one.
1076 t = dget(fn)
1071 t = dget(fn)
1077 state = t[0]
1072 state = t[0]
1078 mode = t[1]
1073 mode = t[1]
1079 size = t[2]
1074 size = t[2]
1080 time = t[3]
1075 time = t[3]
1081
1076
1082 if not st and state in "nma":
1077 if not st and state in "nma":
1083 dadd(fn)
1078 dadd(fn)
1084 elif state == 'n':
1079 elif state == 'n':
1085 if (size >= 0 and
1080 if (size >= 0 and
1086 ((size != st.st_size and size != st.st_size & _rangemask)
1081 ((size != st.st_size and size != st.st_size & _rangemask)
1087 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1082 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1088 or size == -2 # other parent
1083 or size == -2 # other parent
1089 or fn in copymap):
1084 or fn in copymap):
1090 madd(fn)
1085 madd(fn)
1091 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1086 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1092 ladd(fn)
1087 ladd(fn)
1093 elif st.st_mtime == lastnormaltime:
1088 elif st.st_mtime == lastnormaltime:
1094 # fn may have just been marked as normal and it may have
1089 # fn may have just been marked as normal and it may have
1095 # changed in the same second without changing its size.
1090 # changed in the same second without changing its size.
1096 # This can happen if we quickly do multiple commits.
1091 # This can happen if we quickly do multiple commits.
1097 # Force lookup, so we don't miss such a racy file change.
1092 # Force lookup, so we don't miss such a racy file change.
1098 ladd(fn)
1093 ladd(fn)
1099 elif listclean:
1094 elif listclean:
1100 cadd(fn)
1095 cadd(fn)
1101 elif state == 'm':
1096 elif state == 'm':
1102 madd(fn)
1097 madd(fn)
1103 elif state == 'a':
1098 elif state == 'a':
1104 aadd(fn)
1099 aadd(fn)
1105 elif state == 'r':
1100 elif state == 'r':
1106 radd(fn)
1101 radd(fn)
1107
1102
1108 return (lookup, scmutil.status(modified, added, removed, deleted,
1103 return (lookup, scmutil.status(modified, added, removed, deleted,
1109 unknown, ignored, clean))
1104 unknown, ignored, clean))
1110
1105
1111 def matches(self, match):
1106 def matches(self, match):
1112 '''
1107 '''
1113 return files in the dirstate (in whatever state) filtered by match
1108 return files in the dirstate (in whatever state) filtered by match
1114 '''
1109 '''
1115 dmap = self._map
1110 dmap = self._map
1116 if match.always():
1111 if match.always():
1117 return dmap.keys()
1112 return dmap.keys()
1118 files = match.files()
1113 files = match.files()
1119 if match.isexact():
1114 if match.isexact():
1120 # fast path -- filter the other way around, since typically files is
1115 # fast path -- filter the other way around, since typically files is
1121 # much smaller than dmap
1116 # much smaller than dmap
1122 return [f for f in files if f in dmap]
1117 return [f for f in files if f in dmap]
1123 if match.prefix() and all(fn in dmap for fn in files):
1118 if match.prefix() and all(fn in dmap for fn in files):
1124 # fast path -- all the values are known to be files, so just return
1119 # fast path -- all the values are known to be files, so just return
1125 # that
1120 # that
1126 return list(files)
1121 return list(files)
1127 return [f for f in dmap if match(f)]
1122 return [f for f in dmap if match(f)]
1128
1123
1129 def _actualfilename(self, tr):
1124 def _actualfilename(self, tr):
1130 if tr:
1125 if tr:
1131 return self._pendingfilename
1126 return self._pendingfilename
1132 else:
1127 else:
1133 return self._filename
1128 return self._filename
1134
1129
1135 def savebackup(self, tr, backupname):
1130 def savebackup(self, tr, backupname):
1136 '''Save current dirstate into backup file'''
1131 '''Save current dirstate into backup file'''
1137 filename = self._actualfilename(tr)
1132 filename = self._actualfilename(tr)
1138 assert backupname != filename
1133 assert backupname != filename
1139
1134
1140 # use '_writedirstate' instead of 'write' to write changes certainly,
1135 # use '_writedirstate' instead of 'write' to write changes certainly,
1141 # because the latter omits writing out if transaction is running.
1136 # because the latter omits writing out if transaction is running.
1142 # output file will be used to create backup of dirstate at this point.
1137 # output file will be used to create backup of dirstate at this point.
1143 if self._dirty or not self._opener.exists(filename):
1138 if self._dirty or not self._opener.exists(filename):
1144 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1139 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1145 checkambig=True))
1140 checkambig=True))
1146
1141
1147 if tr:
1142 if tr:
1148 # ensure that subsequent tr.writepending returns True for
1143 # ensure that subsequent tr.writepending returns True for
1149 # changes written out above, even if dirstate is never
1144 # changes written out above, even if dirstate is never
1150 # changed after this
1145 # changed after this
1151 tr.addfilegenerator('dirstate', (self._filename,),
1146 tr.addfilegenerator('dirstate', (self._filename,),
1152 self._writedirstate, location='plain')
1147 self._writedirstate, location='plain')
1153
1148
1154 # ensure that pending file written above is unlinked at
1149 # ensure that pending file written above is unlinked at
1155 # failure, even if tr.writepending isn't invoked until the
1150 # failure, even if tr.writepending isn't invoked until the
1156 # end of this transaction
1151 # end of this transaction
1157 tr.registertmp(filename, location='plain')
1152 tr.registertmp(filename, location='plain')
1158
1153
1159 self._opener.tryunlink(backupname)
1154 self._opener.tryunlink(backupname)
1160 # hardlink backup is okay because _writedirstate is always called
1155 # hardlink backup is okay because _writedirstate is always called
1161 # with an "atomictemp=True" file.
1156 # with an "atomictemp=True" file.
1162 util.copyfile(self._opener.join(filename),
1157 util.copyfile(self._opener.join(filename),
1163 self._opener.join(backupname), hardlink=True)
1158 self._opener.join(backupname), hardlink=True)
1164
1159
1165 def restorebackup(self, tr, backupname):
1160 def restorebackup(self, tr, backupname):
1166 '''Restore dirstate by backup file'''
1161 '''Restore dirstate by backup file'''
1167 # this "invalidate()" prevents "wlock.release()" from writing
1162 # this "invalidate()" prevents "wlock.release()" from writing
1168 # changes of dirstate out after restoring from backup file
1163 # changes of dirstate out after restoring from backup file
1169 self.invalidate()
1164 self.invalidate()
1170 filename = self._actualfilename(tr)
1165 filename = self._actualfilename(tr)
1171 o = self._opener
1166 o = self._opener
1172 if util.samefile(o.join(backupname), o.join(filename)):
1167 if util.samefile(o.join(backupname), o.join(filename)):
1173 o.unlink(backupname)
1168 o.unlink(backupname)
1174 else:
1169 else:
1175 o.rename(backupname, filename, checkambig=True)
1170 o.rename(backupname, filename, checkambig=True)
1176
1171
1177 def clearbackup(self, tr, backupname):
1172 def clearbackup(self, tr, backupname):
1178 '''Clear backup file'''
1173 '''Clear backup file'''
1179 self._opener.unlink(backupname)
1174 self._opener.unlink(backupname)
1180
1175
1181 class dirstatemap(object):
1176 class dirstatemap(object):
1182 """Map encapsulating the dirstate's contents.
1177 """Map encapsulating the dirstate's contents.
1183
1178
1184 The dirstate contains the following state:
1179 The dirstate contains the following state:
1185
1180
1186 - `identity` is the identity of the dirstate file, which can be used to
1181 - `identity` is the identity of the dirstate file, which can be used to
1187 detect when changes have occurred to the dirstate file.
1182 detect when changes have occurred to the dirstate file.
1188
1183
1189 - `parents` is a pair containing the parents of the working copy. The
1184 - `parents` is a pair containing the parents of the working copy. The
1190 parents are updated by calling `setparents`.
1185 parents are updated by calling `setparents`.
1191
1186
1192 - the state map maps filenames to tuples of (state, mode, size, mtime),
1187 - the state map maps filenames to tuples of (state, mode, size, mtime),
1193 where state is a single character representing 'normal', 'added',
1188 where state is a single character representing 'normal', 'added',
1194 'removed', or 'merged'. It is read by treating the dirstate as a
1189 'removed', or 'merged'. It is read by treating the dirstate as a
1195 dict. File state is updated by calling the `addfile`, `removefile` and
1190 dict. File state is updated by calling the `addfile`, `removefile` and
1196 `dropfile` methods.
1191 `dropfile` methods.
1197
1192
1198 - `copymap` maps destination filenames to their source filename.
1193 - `copymap` maps destination filenames to their source filename.
1199
1194
1200 The dirstate also provides the following views onto the state:
1195 The dirstate also provides the following views onto the state:
1201
1196
1202 - `nonnormalset` is a set of the filenames that have state other
1197 - `nonnormalset` is a set of the filenames that have state other
1203 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1198 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1204
1199
1205 - `otherparentset` is a set of the filenames that are marked as coming
1200 - `otherparentset` is a set of the filenames that are marked as coming
1206 from the second parent when the dirstate is currently being merged.
1201 from the second parent when the dirstate is currently being merged.
1207
1202
1208 - `dirs` is a set-like object containing all the directories that contain
1203 - `dirs` is a set-like object containing all the directories that contain
1209 files in the dirstate, excluding any files that are marked as removed.
1204 files in the dirstate, excluding any files that are marked as removed.
1210
1205
1211 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1206 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1212 form that they appear as in the dirstate.
1207 form that they appear as in the dirstate.
1213
1208
1214 - `dirfoldmap` is a dict mapping normalized directory names to the
1209 - `dirfoldmap` is a dict mapping normalized directory names to the
1215 denormalized form that they appear as in the dirstate.
1210 denormalized form that they appear as in the dirstate.
1216
1217 Once instantiated, the filefoldmap and dirfoldmap views must be maintained
1218 by the caller.
1219 """
1211 """
1220
1212
1221 def __init__(self, ui, opener, root):
1213 def __init__(self, ui, opener, root):
1222 self._ui = ui
1214 self._ui = ui
1223 self._opener = opener
1215 self._opener = opener
1224 self._root = root
1216 self._root = root
1225 self._filename = 'dirstate'
1217 self._filename = 'dirstate'
1226
1218
1227 self._parents = None
1219 self._parents = None
1228 self._dirtyparents = False
1220 self._dirtyparents = False
1229
1221
1230 # for consistent view between _pl() and _read() invocations
1222 # for consistent view between _pl() and _read() invocations
1231 self._pendingmode = None
1223 self._pendingmode = None
1232
1224
1233 @propertycache
1225 @propertycache
1234 def _map(self):
1226 def _map(self):
1235 self._map = {}
1227 self._map = {}
1236 self.read()
1228 self.read()
1237 return self._map
1229 return self._map
1238
1230
1239 @propertycache
1231 @propertycache
1240 def copymap(self):
1232 def copymap(self):
1241 self.copymap = {}
1233 self.copymap = {}
1242 self._map
1234 self._map
1243 return self.copymap
1235 return self.copymap
1244
1236
1245 def clear(self):
1237 def clear(self):
1246 self._map.clear()
1238 self._map.clear()
1247 self.copymap.clear()
1239 self.copymap.clear()
1248 self.setparents(nullid, nullid)
1240 self.setparents(nullid, nullid)
1249 util.clearcachedproperty(self, "dirs")
1241 util.clearcachedproperty(self, "dirs")
1250 util.clearcachedproperty(self, "filefoldmap")
1242 util.clearcachedproperty(self, "filefoldmap")
1251 util.clearcachedproperty(self, "dirfoldmap")
1243 util.clearcachedproperty(self, "dirfoldmap")
1252 util.clearcachedproperty(self, "nonnormalset")
1244 util.clearcachedproperty(self, "nonnormalset")
1253 util.clearcachedproperty(self, "otherparentset")
1245 util.clearcachedproperty(self, "otherparentset")
1254
1246
1255 def iteritems(self):
1247 def iteritems(self):
1256 return self._map.iteritems()
1248 return self._map.iteritems()
1257
1249
1258 def __len__(self):
1250 def __len__(self):
1259 return len(self._map)
1251 return len(self._map)
1260
1252
1261 def __iter__(self):
1253 def __iter__(self):
1262 return iter(self._map)
1254 return iter(self._map)
1263
1255
1264 def get(self, key, default=None):
1256 def get(self, key, default=None):
1265 return self._map.get(key, default)
1257 return self._map.get(key, default)
1266
1258
1267 def __contains__(self, key):
1259 def __contains__(self, key):
1268 return key in self._map
1260 return key in self._map
1269
1261
1270 def __getitem__(self, key):
1262 def __getitem__(self, key):
1271 return self._map[key]
1263 return self._map[key]
1272
1264
1273 def keys(self):
1265 def keys(self):
1274 return self._map.keys()
1266 return self._map.keys()
1275
1267
1276 def preload(self):
1268 def preload(self):
1277 """Loads the underlying data, if it's not already loaded"""
1269 """Loads the underlying data, if it's not already loaded"""
1278 self._map
1270 self._map
1279
1271
1280 def addfile(self, f, oldstate, state, mode, size, mtime):
1272 def addfile(self, f, oldstate, state, mode, size, mtime):
1281 """Add a tracked file to the dirstate."""
1273 """Add a tracked file to the dirstate."""
1282 if oldstate in "?r" and "dirs" in self.__dict__:
1274 if oldstate in "?r" and "dirs" in self.__dict__:
1283 self.dirs.addpath(f)
1275 self.dirs.addpath(f)
1284 self._map[f] = dirstatetuple(state, mode, size, mtime)
1276 self._map[f] = dirstatetuple(state, mode, size, mtime)
1285 if state != 'n' or mtime == -1:
1277 if state != 'n' or mtime == -1:
1286 self.nonnormalset.add(f)
1278 self.nonnormalset.add(f)
1287 if size == -2:
1279 if size == -2:
1288 self.otherparentset.add(f)
1280 self.otherparentset.add(f)
1289
1281
1290 def removefile(self, f, oldstate, size):
1282 def removefile(self, f, oldstate, size):
1291 """
1283 """
1292 Mark a file as removed in the dirstate.
1284 Mark a file as removed in the dirstate.
1293
1285
1294 The `size` parameter is used to store sentinel values that indicate
1286 The `size` parameter is used to store sentinel values that indicate
1295 the file's previous state. In the future, we should refactor this
1287 the file's previous state. In the future, we should refactor this
1296 to be more explicit about what that state is.
1288 to be more explicit about what that state is.
1297 """
1289 """
1298 if oldstate not in "?r" and "dirs" in self.__dict__:
1290 if oldstate not in "?r" and "dirs" in self.__dict__:
1299 self.dirs.delpath(f)
1291 self.dirs.delpath(f)
1292 if "filefoldmap" in self.__dict__:
1293 normed = util.normcase(f)
1294 self.filefoldmap.pop(normed, None)
1300 self._map[f] = dirstatetuple('r', 0, size, 0)
1295 self._map[f] = dirstatetuple('r', 0, size, 0)
1301 self.nonnormalset.add(f)
1296 self.nonnormalset.add(f)
1302
1297
1303 def dropfile(self, f, oldstate):
1298 def dropfile(self, f, oldstate):
1304 """
1299 """
1305 Remove a file from the dirstate. Returns True if the file was
1300 Remove a file from the dirstate. Returns True if the file was
1306 previously recorded.
1301 previously recorded.
1307 """
1302 """
1308 exists = self._map.pop(f, None) is not None
1303 exists = self._map.pop(f, None) is not None
1309 if exists:
1304 if exists:
1310 if oldstate != "r" and "dirs" in self.__dict__:
1305 if oldstate != "r" and "dirs" in self.__dict__:
1311 self.dirs.delpath(f)
1306 self.dirs.delpath(f)
1307 if "filefoldmap" in self.__dict__:
1308 normed = util.normcase(f)
1309 self.filefoldmap.pop(normed, None)
1312 self.nonnormalset.discard(f)
1310 self.nonnormalset.discard(f)
1313 return exists
1311 return exists
1314
1312
1315 def clearambiguoustimes(self, files, now):
1313 def clearambiguoustimes(self, files, now):
1316 for f in files:
1314 for f in files:
1317 e = self.get(f)
1315 e = self.get(f)
1318 if e is not None and e[0] == 'n' and e[3] == now:
1316 if e is not None and e[0] == 'n' and e[3] == now:
1319 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1317 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1320 self.nonnormalset.add(f)
1318 self.nonnormalset.add(f)
1321
1319
1322 def nonnormalentries(self):
1320 def nonnormalentries(self):
1323 '''Compute the nonnormal dirstate entries from the dmap'''
1321 '''Compute the nonnormal dirstate entries from the dmap'''
1324 try:
1322 try:
1325 return parsers.nonnormalotherparententries(self._map)
1323 return parsers.nonnormalotherparententries(self._map)
1326 except AttributeError:
1324 except AttributeError:
1327 nonnorm = set()
1325 nonnorm = set()
1328 otherparent = set()
1326 otherparent = set()
1329 for fname, e in self._map.iteritems():
1327 for fname, e in self._map.iteritems():
1330 if e[0] != 'n' or e[3] == -1:
1328 if e[0] != 'n' or e[3] == -1:
1331 nonnorm.add(fname)
1329 nonnorm.add(fname)
1332 if e[0] == 'n' and e[2] == -2:
1330 if e[0] == 'n' and e[2] == -2:
1333 otherparent.add(fname)
1331 otherparent.add(fname)
1334 return nonnorm, otherparent
1332 return nonnorm, otherparent
1335
1333
1336 @propertycache
1334 @propertycache
1337 def filefoldmap(self):
1335 def filefoldmap(self):
1338 """Returns a dictionary mapping normalized case paths to their
1336 """Returns a dictionary mapping normalized case paths to their
1339 non-normalized versions.
1337 non-normalized versions.
1340 """
1338 """
1341 try:
1339 try:
1342 makefilefoldmap = parsers.make_file_foldmap
1340 makefilefoldmap = parsers.make_file_foldmap
1343 except AttributeError:
1341 except AttributeError:
1344 pass
1342 pass
1345 else:
1343 else:
1346 return makefilefoldmap(self._map, util.normcasespec,
1344 return makefilefoldmap(self._map, util.normcasespec,
1347 util.normcasefallback)
1345 util.normcasefallback)
1348
1346
1349 f = {}
1347 f = {}
1350 normcase = util.normcase
1348 normcase = util.normcase
1351 for name, s in self._map.iteritems():
1349 for name, s in self._map.iteritems():
1352 if s[0] != 'r':
1350 if s[0] != 'r':
1353 f[normcase(name)] = name
1351 f[normcase(name)] = name
1354 f['.'] = '.' # prevents useless util.fspath() invocation
1352 f['.'] = '.' # prevents useless util.fspath() invocation
1355 return f
1353 return f
1356
1354
1357 @propertycache
1355 @propertycache
1358 def dirs(self):
1356 def dirs(self):
1359 """Returns a set-like object containing all the directories in the
1357 """Returns a set-like object containing all the directories in the
1360 current dirstate.
1358 current dirstate.
1361 """
1359 """
1362 return util.dirs(self._map, 'r')
1360 return util.dirs(self._map, 'r')
1363
1361
1364 def _opendirstatefile(self):
1362 def _opendirstatefile(self):
1365 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1363 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1366 if self._pendingmode is not None and self._pendingmode != mode:
1364 if self._pendingmode is not None and self._pendingmode != mode:
1367 fp.close()
1365 fp.close()
1368 raise error.Abort(_('working directory state may be '
1366 raise error.Abort(_('working directory state may be '
1369 'changed parallelly'))
1367 'changed parallelly'))
1370 self._pendingmode = mode
1368 self._pendingmode = mode
1371 return fp
1369 return fp
1372
1370
1373 def parents(self):
1371 def parents(self):
1374 if not self._parents:
1372 if not self._parents:
1375 try:
1373 try:
1376 fp = self._opendirstatefile()
1374 fp = self._opendirstatefile()
1377 st = fp.read(40)
1375 st = fp.read(40)
1378 fp.close()
1376 fp.close()
1379 except IOError as err:
1377 except IOError as err:
1380 if err.errno != errno.ENOENT:
1378 if err.errno != errno.ENOENT:
1381 raise
1379 raise
1382 # File doesn't exist, so the current state is empty
1380 # File doesn't exist, so the current state is empty
1383 st = ''
1381 st = ''
1384
1382
1385 l = len(st)
1383 l = len(st)
1386 if l == 40:
1384 if l == 40:
1387 self._parents = st[:20], st[20:40]
1385 self._parents = st[:20], st[20:40]
1388 elif l == 0:
1386 elif l == 0:
1389 self._parents = [nullid, nullid]
1387 self._parents = [nullid, nullid]
1390 else:
1388 else:
1391 raise error.Abort(_('working directory state appears '
1389 raise error.Abort(_('working directory state appears '
1392 'damaged!'))
1390 'damaged!'))
1393
1391
1394 return self._parents
1392 return self._parents
1395
1393
1396 def setparents(self, p1, p2):
1394 def setparents(self, p1, p2):
1397 self._parents = (p1, p2)
1395 self._parents = (p1, p2)
1398 self._dirtyparents = True
1396 self._dirtyparents = True
1399
1397
1400 def read(self):
1398 def read(self):
1401 # ignore HG_PENDING because identity is used only for writing
1399 # ignore HG_PENDING because identity is used only for writing
1402 self.identity = util.filestat.frompath(
1400 self.identity = util.filestat.frompath(
1403 self._opener.join(self._filename))
1401 self._opener.join(self._filename))
1404
1402
1405 try:
1403 try:
1406 fp = self._opendirstatefile()
1404 fp = self._opendirstatefile()
1407 try:
1405 try:
1408 st = fp.read()
1406 st = fp.read()
1409 finally:
1407 finally:
1410 fp.close()
1408 fp.close()
1411 except IOError as err:
1409 except IOError as err:
1412 if err.errno != errno.ENOENT:
1410 if err.errno != errno.ENOENT:
1413 raise
1411 raise
1414 return
1412 return
1415 if not st:
1413 if not st:
1416 return
1414 return
1417
1415
1418 if util.safehasattr(parsers, 'dict_new_presized'):
1416 if util.safehasattr(parsers, 'dict_new_presized'):
1419 # Make an estimate of the number of files in the dirstate based on
1417 # Make an estimate of the number of files in the dirstate based on
1420 # its size. From a linear regression on a set of real-world repos,
1418 # its size. From a linear regression on a set of real-world repos,
1421 # all over 10,000 files, the size of a dirstate entry is 85
1419 # all over 10,000 files, the size of a dirstate entry is 85
1422 # bytes. The cost of resizing is significantly higher than the cost
1420 # bytes. The cost of resizing is significantly higher than the cost
1423 # of filling in a larger presized dict, so subtract 20% from the
1421 # of filling in a larger presized dict, so subtract 20% from the
1424 # size.
1422 # size.
1425 #
1423 #
1426 # This heuristic is imperfect in many ways, so in a future dirstate
1424 # This heuristic is imperfect in many ways, so in a future dirstate
1427 # format update it makes sense to just record the number of entries
1425 # format update it makes sense to just record the number of entries
1428 # on write.
1426 # on write.
1429 self._map = parsers.dict_new_presized(len(st) / 71)
1427 self._map = parsers.dict_new_presized(len(st) / 71)
1430
1428
1431 # Python's garbage collector triggers a GC each time a certain number
1429 # Python's garbage collector triggers a GC each time a certain number
1432 # of container objects (the number being defined by
1430 # of container objects (the number being defined by
1433 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1431 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1434 # for each file in the dirstate. The C version then immediately marks
1432 # for each file in the dirstate. The C version then immediately marks
1435 # them as not to be tracked by the collector. However, this has no
1433 # them as not to be tracked by the collector. However, this has no
1436 # effect on when GCs are triggered, only on what objects the GC looks
1434 # effect on when GCs are triggered, only on what objects the GC looks
1437 # into. This means that O(number of files) GCs are unavoidable.
1435 # into. This means that O(number of files) GCs are unavoidable.
1438 # Depending on when in the process's lifetime the dirstate is parsed,
1436 # Depending on when in the process's lifetime the dirstate is parsed,
1439 # this can get very expensive. As a workaround, disable GC while
1437 # this can get very expensive. As a workaround, disable GC while
1440 # parsing the dirstate.
1438 # parsing the dirstate.
1441 #
1439 #
1442 # (we cannot decorate the function directly since it is in a C module)
1440 # (we cannot decorate the function directly since it is in a C module)
1443 parse_dirstate = util.nogc(parsers.parse_dirstate)
1441 parse_dirstate = util.nogc(parsers.parse_dirstate)
1444 p = parse_dirstate(self._map, self.copymap, st)
1442 p = parse_dirstate(self._map, self.copymap, st)
1445 if not self._dirtyparents:
1443 if not self._dirtyparents:
1446 self.setparents(*p)
1444 self.setparents(*p)
1447
1445
1448 # Avoid excess attribute lookups by fast pathing certain checks
1446 # Avoid excess attribute lookups by fast pathing certain checks
1449 self.__contains__ = self._map.__contains__
1447 self.__contains__ = self._map.__contains__
1450 self.__getitem__ = self._map.__getitem__
1448 self.__getitem__ = self._map.__getitem__
1451 self.get = self._map.get
1449 self.get = self._map.get
1452
1450
1453 def write(self, st, now):
1451 def write(self, st, now):
1454 st.write(parsers.pack_dirstate(self._map, self.copymap,
1452 st.write(parsers.pack_dirstate(self._map, self.copymap,
1455 self.parents(), now))
1453 self.parents(), now))
1456 st.close()
1454 st.close()
1457 self._dirtyparents = False
1455 self._dirtyparents = False
1458 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1456 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1459
1457
1460 @propertycache
1458 @propertycache
1461 def nonnormalset(self):
1459 def nonnormalset(self):
1462 nonnorm, otherparents = self.nonnormalentries()
1460 nonnorm, otherparents = self.nonnormalentries()
1463 self.otherparentset = otherparents
1461 self.otherparentset = otherparents
1464 return nonnorm
1462 return nonnorm
1465
1463
1466 @propertycache
1464 @propertycache
1467 def otherparentset(self):
1465 def otherparentset(self):
1468 nonnorm, otherparents = self.nonnormalentries()
1466 nonnorm, otherparents = self.nonnormalentries()
1469 self.nonnormalset = nonnorm
1467 self.nonnormalset = nonnorm
1470 return otherparents
1468 return otherparents
1471
1469
1472 @propertycache
1470 @propertycache
1473 def identity(self):
1471 def identity(self):
1474 self._map
1472 self._map
1475 return self.identity
1473 return self.identity
1476
1474
1477 @propertycache
1475 @propertycache
1478 def dirfoldmap(self):
1476 def dirfoldmap(self):
1479 f = {}
1477 f = {}
1480 normcase = util.normcase
1478 normcase = util.normcase
1481 for name in self.dirs:
1479 for name in self.dirs:
1482 f[normcase(name)] = name
1480 f[normcase(name)] = name
1483 return f
1481 return f
General Comments 0
You need to be logged in to leave comments. Login now