##// END OF EJS Templates
dirstate: move special handling of files==['.'] together...
Martin von Zweigbergk -
r42527:7ada5989 default
parent child Browse files
Show More
@@ -1,1524 +1,1525 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 try:
30 try:
31 from . import rustext
31 from . import rustext
32 rustext.__name__ # force actual import (see hgdemandimport)
32 rustext.__name__ # force actual import (see hgdemandimport)
33 except ImportError:
33 except ImportError:
34 rustext = None
34 rustext = None
35
35
36 parsers = policy.importmod(r'parsers')
36 parsers = policy.importmod(r'parsers')
37
37
38 propertycache = util.propertycache
38 propertycache = util.propertycache
39 filecache = scmutil.filecache
39 filecache = scmutil.filecache
40 _rangemask = 0x7fffffff
40 _rangemask = 0x7fffffff
41
41
42 dirstatetuple = parsers.dirstatetuple
42 dirstatetuple = parsers.dirstatetuple
43
43
44 class repocache(filecache):
44 class repocache(filecache):
45 """filecache for files in .hg/"""
45 """filecache for files in .hg/"""
46 def join(self, obj, fname):
46 def join(self, obj, fname):
47 return obj._opener.join(fname)
47 return obj._opener.join(fname)
48
48
49 class rootcache(filecache):
49 class rootcache(filecache):
50 """filecache for files in the repository root"""
50 """filecache for files in the repository root"""
51 def join(self, obj, fname):
51 def join(self, obj, fname):
52 return obj._join(fname)
52 return obj._join(fname)
53
53
54 def _getfsnow(vfs):
54 def _getfsnow(vfs):
55 '''Get "now" timestamp on filesystem'''
55 '''Get "now" timestamp on filesystem'''
56 tmpfd, tmpname = vfs.mkstemp()
56 tmpfd, tmpname = vfs.mkstemp()
57 try:
57 try:
58 return os.fstat(tmpfd)[stat.ST_MTIME]
58 return os.fstat(tmpfd)[stat.ST_MTIME]
59 finally:
59 finally:
60 os.close(tmpfd)
60 os.close(tmpfd)
61 vfs.unlink(tmpname)
61 vfs.unlink(tmpname)
62
62
63 class dirstate(object):
63 class dirstate(object):
64
64
65 def __init__(self, opener, ui, root, validate, sparsematchfn):
65 def __init__(self, opener, ui, root, validate, sparsematchfn):
66 '''Create a new dirstate object.
66 '''Create a new dirstate object.
67
67
68 opener is an open()-like callable that can be used to open the
68 opener is an open()-like callable that can be used to open the
69 dirstate file; root is the root of the directory tracked by
69 dirstate file; root is the root of the directory tracked by
70 the dirstate.
70 the dirstate.
71 '''
71 '''
72 self._opener = opener
72 self._opener = opener
73 self._validate = validate
73 self._validate = validate
74 self._root = root
74 self._root = root
75 self._sparsematchfn = sparsematchfn
75 self._sparsematchfn = sparsematchfn
76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
77 # UNC path pointing to root share (issue4557)
77 # UNC path pointing to root share (issue4557)
78 self._rootdir = pathutil.normasprefix(root)
78 self._rootdir = pathutil.normasprefix(root)
79 self._dirty = False
79 self._dirty = False
80 self._lastnormaltime = 0
80 self._lastnormaltime = 0
81 self._ui = ui
81 self._ui = ui
82 self._filecache = {}
82 self._filecache = {}
83 self._parentwriters = 0
83 self._parentwriters = 0
84 self._filename = 'dirstate'
84 self._filename = 'dirstate'
85 self._pendingfilename = '%s.pending' % self._filename
85 self._pendingfilename = '%s.pending' % self._filename
86 self._plchangecallbacks = {}
86 self._plchangecallbacks = {}
87 self._origpl = None
87 self._origpl = None
88 self._updatedfiles = set()
88 self._updatedfiles = set()
89 self._mapcls = dirstatemap
89 self._mapcls = dirstatemap
90 # Access and cache cwd early, so we don't access it for the first time
90 # Access and cache cwd early, so we don't access it for the first time
91 # after a working-copy update caused it to not exist (accessing it then
91 # after a working-copy update caused it to not exist (accessing it then
92 # raises an exception).
92 # raises an exception).
93 self._cwd
93 self._cwd
94
94
95 @contextlib.contextmanager
95 @contextlib.contextmanager
96 def parentchange(self):
96 def parentchange(self):
97 '''Context manager for handling dirstate parents.
97 '''Context manager for handling dirstate parents.
98
98
99 If an exception occurs in the scope of the context manager,
99 If an exception occurs in the scope of the context manager,
100 the incoherent dirstate won't be written when wlock is
100 the incoherent dirstate won't be written when wlock is
101 released.
101 released.
102 '''
102 '''
103 self._parentwriters += 1
103 self._parentwriters += 1
104 yield
104 yield
105 # Typically we want the "undo" step of a context manager in a
105 # Typically we want the "undo" step of a context manager in a
106 # finally block so it happens even when an exception
106 # finally block so it happens even when an exception
107 # occurs. In this case, however, we only want to decrement
107 # occurs. In this case, however, we only want to decrement
108 # parentwriters if the code in the with statement exits
108 # parentwriters if the code in the with statement exits
109 # normally, so we don't have a try/finally here on purpose.
109 # normally, so we don't have a try/finally here on purpose.
110 self._parentwriters -= 1
110 self._parentwriters -= 1
111
111
112 def pendingparentchange(self):
112 def pendingparentchange(self):
113 '''Returns true if the dirstate is in the middle of a set of changes
113 '''Returns true if the dirstate is in the middle of a set of changes
114 that modify the dirstate parent.
114 that modify the dirstate parent.
115 '''
115 '''
116 return self._parentwriters > 0
116 return self._parentwriters > 0
117
117
118 @propertycache
118 @propertycache
119 def _map(self):
119 def _map(self):
120 """Return the dirstate contents (see documentation for dirstatemap)."""
120 """Return the dirstate contents (see documentation for dirstatemap)."""
121 self._map = self._mapcls(self._ui, self._opener, self._root)
121 self._map = self._mapcls(self._ui, self._opener, self._root)
122 return self._map
122 return self._map
123
123
124 @property
124 @property
125 def _sparsematcher(self):
125 def _sparsematcher(self):
126 """The matcher for the sparse checkout.
126 """The matcher for the sparse checkout.
127
127
128 The working directory may not include every file from a manifest. The
128 The working directory may not include every file from a manifest. The
129 matcher obtained by this property will match a path if it is to be
129 matcher obtained by this property will match a path if it is to be
130 included in the working directory.
130 included in the working directory.
131 """
131 """
132 # TODO there is potential to cache this property. For now, the matcher
132 # TODO there is potential to cache this property. For now, the matcher
133 # is resolved on every access. (But the called function does use a
133 # is resolved on every access. (But the called function does use a
134 # cache to keep the lookup fast.)
134 # cache to keep the lookup fast.)
135 return self._sparsematchfn()
135 return self._sparsematchfn()
136
136
137 @repocache('branch')
137 @repocache('branch')
138 def _branch(self):
138 def _branch(self):
139 try:
139 try:
140 return self._opener.read("branch").strip() or "default"
140 return self._opener.read("branch").strip() or "default"
141 except IOError as inst:
141 except IOError as inst:
142 if inst.errno != errno.ENOENT:
142 if inst.errno != errno.ENOENT:
143 raise
143 raise
144 return "default"
144 return "default"
145
145
146 @property
146 @property
147 def _pl(self):
147 def _pl(self):
148 return self._map.parents()
148 return self._map.parents()
149
149
150 def hasdir(self, d):
150 def hasdir(self, d):
151 return self._map.hastrackeddir(d)
151 return self._map.hastrackeddir(d)
152
152
153 @rootcache('.hgignore')
153 @rootcache('.hgignore')
154 def _ignore(self):
154 def _ignore(self):
155 files = self._ignorefiles()
155 files = self._ignorefiles()
156 if not files:
156 if not files:
157 return matchmod.never()
157 return matchmod.never()
158
158
159 pats = ['include:%s' % f for f in files]
159 pats = ['include:%s' % f for f in files]
160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
161
161
162 @propertycache
162 @propertycache
163 def _slash(self):
163 def _slash(self):
164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
165
165
166 @propertycache
166 @propertycache
167 def _checklink(self):
167 def _checklink(self):
168 return util.checklink(self._root)
168 return util.checklink(self._root)
169
169
170 @propertycache
170 @propertycache
171 def _checkexec(self):
171 def _checkexec(self):
172 return util.checkexec(self._root)
172 return util.checkexec(self._root)
173
173
174 @propertycache
174 @propertycache
175 def _checkcase(self):
175 def _checkcase(self):
176 return not util.fscasesensitive(self._join('.hg'))
176 return not util.fscasesensitive(self._join('.hg'))
177
177
178 def _join(self, f):
178 def _join(self, f):
179 # much faster than os.path.join()
179 # much faster than os.path.join()
180 # it's safe because f is always a relative path
180 # it's safe because f is always a relative path
181 return self._rootdir + f
181 return self._rootdir + f
182
182
183 def flagfunc(self, buildfallback):
183 def flagfunc(self, buildfallback):
184 if self._checklink and self._checkexec:
184 if self._checklink and self._checkexec:
185 def f(x):
185 def f(x):
186 try:
186 try:
187 st = os.lstat(self._join(x))
187 st = os.lstat(self._join(x))
188 if util.statislink(st):
188 if util.statislink(st):
189 return 'l'
189 return 'l'
190 if util.statisexec(st):
190 if util.statisexec(st):
191 return 'x'
191 return 'x'
192 except OSError:
192 except OSError:
193 pass
193 pass
194 return ''
194 return ''
195 return f
195 return f
196
196
197 fallback = buildfallback()
197 fallback = buildfallback()
198 if self._checklink:
198 if self._checklink:
199 def f(x):
199 def f(x):
200 if os.path.islink(self._join(x)):
200 if os.path.islink(self._join(x)):
201 return 'l'
201 return 'l'
202 if 'x' in fallback(x):
202 if 'x' in fallback(x):
203 return 'x'
203 return 'x'
204 return ''
204 return ''
205 return f
205 return f
206 if self._checkexec:
206 if self._checkexec:
207 def f(x):
207 def f(x):
208 if 'l' in fallback(x):
208 if 'l' in fallback(x):
209 return 'l'
209 return 'l'
210 if util.isexec(self._join(x)):
210 if util.isexec(self._join(x)):
211 return 'x'
211 return 'x'
212 return ''
212 return ''
213 return f
213 return f
214 else:
214 else:
215 return fallback
215 return fallback
216
216
217 @propertycache
217 @propertycache
218 def _cwd(self):
218 def _cwd(self):
219 # internal config: ui.forcecwd
219 # internal config: ui.forcecwd
220 forcecwd = self._ui.config('ui', 'forcecwd')
220 forcecwd = self._ui.config('ui', 'forcecwd')
221 if forcecwd:
221 if forcecwd:
222 return forcecwd
222 return forcecwd
223 return encoding.getcwd()
223 return encoding.getcwd()
224
224
225 def getcwd(self):
225 def getcwd(self):
226 '''Return the path from which a canonical path is calculated.
226 '''Return the path from which a canonical path is calculated.
227
227
228 This path should be used to resolve file patterns or to convert
228 This path should be used to resolve file patterns or to convert
229 canonical paths back to file paths for display. It shouldn't be
229 canonical paths back to file paths for display. It shouldn't be
230 used to get real file paths. Use vfs functions instead.
230 used to get real file paths. Use vfs functions instead.
231 '''
231 '''
232 cwd = self._cwd
232 cwd = self._cwd
233 if cwd == self._root:
233 if cwd == self._root:
234 return ''
234 return ''
235 # self._root ends with a path separator if self._root is '/' or 'C:\'
235 # self._root ends with a path separator if self._root is '/' or 'C:\'
236 rootsep = self._root
236 rootsep = self._root
237 if not util.endswithsep(rootsep):
237 if not util.endswithsep(rootsep):
238 rootsep += pycompat.ossep
238 rootsep += pycompat.ossep
239 if cwd.startswith(rootsep):
239 if cwd.startswith(rootsep):
240 return cwd[len(rootsep):]
240 return cwd[len(rootsep):]
241 else:
241 else:
242 # we're outside the repo. return an absolute path.
242 # we're outside the repo. return an absolute path.
243 return cwd
243 return cwd
244
244
245 def pathto(self, f, cwd=None):
245 def pathto(self, f, cwd=None):
246 if cwd is None:
246 if cwd is None:
247 cwd = self.getcwd()
247 cwd = self.getcwd()
248 path = util.pathto(self._root, cwd, f)
248 path = util.pathto(self._root, cwd, f)
249 if self._slash:
249 if self._slash:
250 return util.pconvert(path)
250 return util.pconvert(path)
251 return path
251 return path
252
252
253 def __getitem__(self, key):
253 def __getitem__(self, key):
254 '''Return the current state of key (a filename) in the dirstate.
254 '''Return the current state of key (a filename) in the dirstate.
255
255
256 States are:
256 States are:
257 n normal
257 n normal
258 m needs merging
258 m needs merging
259 r marked for removal
259 r marked for removal
260 a marked for addition
260 a marked for addition
261 ? not tracked
261 ? not tracked
262 '''
262 '''
263 return self._map.get(key, ("?",))[0]
263 return self._map.get(key, ("?",))[0]
264
264
265 def __contains__(self, key):
265 def __contains__(self, key):
266 return key in self._map
266 return key in self._map
267
267
268 def __iter__(self):
268 def __iter__(self):
269 return iter(sorted(self._map))
269 return iter(sorted(self._map))
270
270
271 def items(self):
271 def items(self):
272 return self._map.iteritems()
272 return self._map.iteritems()
273
273
274 iteritems = items
274 iteritems = items
275
275
276 def parents(self):
276 def parents(self):
277 return [self._validate(p) for p in self._pl]
277 return [self._validate(p) for p in self._pl]
278
278
279 def p1(self):
279 def p1(self):
280 return self._validate(self._pl[0])
280 return self._validate(self._pl[0])
281
281
282 def p2(self):
282 def p2(self):
283 return self._validate(self._pl[1])
283 return self._validate(self._pl[1])
284
284
285 def branch(self):
285 def branch(self):
286 return encoding.tolocal(self._branch)
286 return encoding.tolocal(self._branch)
287
287
288 def setparents(self, p1, p2=nullid):
288 def setparents(self, p1, p2=nullid):
289 """Set dirstate parents to p1 and p2.
289 """Set dirstate parents to p1 and p2.
290
290
291 When moving from two parents to one, 'm' merged entries a
291 When moving from two parents to one, 'm' merged entries a
292 adjusted to normal and previous copy records discarded and
292 adjusted to normal and previous copy records discarded and
293 returned by the call.
293 returned by the call.
294
294
295 See localrepo.setparents()
295 See localrepo.setparents()
296 """
296 """
297 if self._parentwriters == 0:
297 if self._parentwriters == 0:
298 raise ValueError("cannot set dirstate parent outside of "
298 raise ValueError("cannot set dirstate parent outside of "
299 "dirstate.parentchange context manager")
299 "dirstate.parentchange context manager")
300
300
301 self._dirty = True
301 self._dirty = True
302 oldp2 = self._pl[1]
302 oldp2 = self._pl[1]
303 if self._origpl is None:
303 if self._origpl is None:
304 self._origpl = self._pl
304 self._origpl = self._pl
305 self._map.setparents(p1, p2)
305 self._map.setparents(p1, p2)
306 copies = {}
306 copies = {}
307 if oldp2 != nullid and p2 == nullid:
307 if oldp2 != nullid and p2 == nullid:
308 candidatefiles = self._map.nonnormalset.union(
308 candidatefiles = self._map.nonnormalset.union(
309 self._map.otherparentset)
309 self._map.otherparentset)
310 for f in candidatefiles:
310 for f in candidatefiles:
311 s = self._map.get(f)
311 s = self._map.get(f)
312 if s is None:
312 if s is None:
313 continue
313 continue
314
314
315 # Discard 'm' markers when moving away from a merge state
315 # Discard 'm' markers when moving away from a merge state
316 if s[0] == 'm':
316 if s[0] == 'm':
317 source = self._map.copymap.get(f)
317 source = self._map.copymap.get(f)
318 if source:
318 if source:
319 copies[f] = source
319 copies[f] = source
320 self.normallookup(f)
320 self.normallookup(f)
321 # Also fix up otherparent markers
321 # Also fix up otherparent markers
322 elif s[0] == 'n' and s[2] == -2:
322 elif s[0] == 'n' and s[2] == -2:
323 source = self._map.copymap.get(f)
323 source = self._map.copymap.get(f)
324 if source:
324 if source:
325 copies[f] = source
325 copies[f] = source
326 self.add(f)
326 self.add(f)
327 return copies
327 return copies
328
328
329 def setbranch(self, branch):
329 def setbranch(self, branch):
330 self.__class__._branch.set(self, encoding.fromlocal(branch))
330 self.__class__._branch.set(self, encoding.fromlocal(branch))
331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
332 try:
332 try:
333 f.write(self._branch + '\n')
333 f.write(self._branch + '\n')
334 f.close()
334 f.close()
335
335
336 # make sure filecache has the correct stat info for _branch after
336 # make sure filecache has the correct stat info for _branch after
337 # replacing the underlying file
337 # replacing the underlying file
338 ce = self._filecache['_branch']
338 ce = self._filecache['_branch']
339 if ce:
339 if ce:
340 ce.refresh()
340 ce.refresh()
341 except: # re-raises
341 except: # re-raises
342 f.discard()
342 f.discard()
343 raise
343 raise
344
344
345 def invalidate(self):
345 def invalidate(self):
346 '''Causes the next access to reread the dirstate.
346 '''Causes the next access to reread the dirstate.
347
347
348 This is different from localrepo.invalidatedirstate() because it always
348 This is different from localrepo.invalidatedirstate() because it always
349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
350 check whether the dirstate has changed before rereading it.'''
350 check whether the dirstate has changed before rereading it.'''
351
351
352 for a in (r"_map", r"_branch", r"_ignore"):
352 for a in (r"_map", r"_branch", r"_ignore"):
353 if a in self.__dict__:
353 if a in self.__dict__:
354 delattr(self, a)
354 delattr(self, a)
355 self._lastnormaltime = 0
355 self._lastnormaltime = 0
356 self._dirty = False
356 self._dirty = False
357 self._updatedfiles.clear()
357 self._updatedfiles.clear()
358 self._parentwriters = 0
358 self._parentwriters = 0
359 self._origpl = None
359 self._origpl = None
360
360
361 def copy(self, source, dest):
361 def copy(self, source, dest):
362 """Mark dest as a copy of source. Unmark dest if source is None."""
362 """Mark dest as a copy of source. Unmark dest if source is None."""
363 if source == dest:
363 if source == dest:
364 return
364 return
365 self._dirty = True
365 self._dirty = True
366 if source is not None:
366 if source is not None:
367 self._map.copymap[dest] = source
367 self._map.copymap[dest] = source
368 self._updatedfiles.add(source)
368 self._updatedfiles.add(source)
369 self._updatedfiles.add(dest)
369 self._updatedfiles.add(dest)
370 elif self._map.copymap.pop(dest, None):
370 elif self._map.copymap.pop(dest, None):
371 self._updatedfiles.add(dest)
371 self._updatedfiles.add(dest)
372
372
373 def copied(self, file):
373 def copied(self, file):
374 return self._map.copymap.get(file, None)
374 return self._map.copymap.get(file, None)
375
375
376 def copies(self):
376 def copies(self):
377 return self._map.copymap
377 return self._map.copymap
378
378
379 def _addpath(self, f, state, mode, size, mtime):
379 def _addpath(self, f, state, mode, size, mtime):
380 oldstate = self[f]
380 oldstate = self[f]
381 if state == 'a' or oldstate == 'r':
381 if state == 'a' or oldstate == 'r':
382 scmutil.checkfilename(f)
382 scmutil.checkfilename(f)
383 if self._map.hastrackeddir(f):
383 if self._map.hastrackeddir(f):
384 raise error.Abort(_('directory %r already in dirstate') %
384 raise error.Abort(_('directory %r already in dirstate') %
385 pycompat.bytestr(f))
385 pycompat.bytestr(f))
386 # shadows
386 # shadows
387 for d in util.finddirs(f):
387 for d in util.finddirs(f):
388 if self._map.hastrackeddir(d):
388 if self._map.hastrackeddir(d):
389 break
389 break
390 entry = self._map.get(d)
390 entry = self._map.get(d)
391 if entry is not None and entry[0] != 'r':
391 if entry is not None and entry[0] != 'r':
392 raise error.Abort(
392 raise error.Abort(
393 _('file %r in dirstate clashes with %r') %
393 _('file %r in dirstate clashes with %r') %
394 (pycompat.bytestr(d), pycompat.bytestr(f)))
394 (pycompat.bytestr(d), pycompat.bytestr(f)))
395 self._dirty = True
395 self._dirty = True
396 self._updatedfiles.add(f)
396 self._updatedfiles.add(f)
397 self._map.addfile(f, oldstate, state, mode, size, mtime)
397 self._map.addfile(f, oldstate, state, mode, size, mtime)
398
398
399 def normal(self, f):
399 def normal(self, f):
400 '''Mark a file normal and clean.'''
400 '''Mark a file normal and clean.'''
401 s = os.lstat(self._join(f))
401 s = os.lstat(self._join(f))
402 mtime = s[stat.ST_MTIME]
402 mtime = s[stat.ST_MTIME]
403 self._addpath(f, 'n', s.st_mode,
403 self._addpath(f, 'n', s.st_mode,
404 s.st_size & _rangemask, mtime & _rangemask)
404 s.st_size & _rangemask, mtime & _rangemask)
405 self._map.copymap.pop(f, None)
405 self._map.copymap.pop(f, None)
406 if f in self._map.nonnormalset:
406 if f in self._map.nonnormalset:
407 self._map.nonnormalset.remove(f)
407 self._map.nonnormalset.remove(f)
408 if mtime > self._lastnormaltime:
408 if mtime > self._lastnormaltime:
409 # Remember the most recent modification timeslot for status(),
409 # Remember the most recent modification timeslot for status(),
410 # to make sure we won't miss future size-preserving file content
410 # to make sure we won't miss future size-preserving file content
411 # modifications that happen within the same timeslot.
411 # modifications that happen within the same timeslot.
412 self._lastnormaltime = mtime
412 self._lastnormaltime = mtime
413
413
414 def normallookup(self, f):
414 def normallookup(self, f):
415 '''Mark a file normal, but possibly dirty.'''
415 '''Mark a file normal, but possibly dirty.'''
416 if self._pl[1] != nullid:
416 if self._pl[1] != nullid:
417 # if there is a merge going on and the file was either
417 # if there is a merge going on and the file was either
418 # in state 'm' (-1) or coming from other parent (-2) before
418 # in state 'm' (-1) or coming from other parent (-2) before
419 # being removed, restore that state.
419 # being removed, restore that state.
420 entry = self._map.get(f)
420 entry = self._map.get(f)
421 if entry is not None:
421 if entry is not None:
422 if entry[0] == 'r' and entry[2] in (-1, -2):
422 if entry[0] == 'r' and entry[2] in (-1, -2):
423 source = self._map.copymap.get(f)
423 source = self._map.copymap.get(f)
424 if entry[2] == -1:
424 if entry[2] == -1:
425 self.merge(f)
425 self.merge(f)
426 elif entry[2] == -2:
426 elif entry[2] == -2:
427 self.otherparent(f)
427 self.otherparent(f)
428 if source:
428 if source:
429 self.copy(source, f)
429 self.copy(source, f)
430 return
430 return
431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
432 return
432 return
433 self._addpath(f, 'n', 0, -1, -1)
433 self._addpath(f, 'n', 0, -1, -1)
434 self._map.copymap.pop(f, None)
434 self._map.copymap.pop(f, None)
435
435
436 def otherparent(self, f):
436 def otherparent(self, f):
437 '''Mark as coming from the other parent, always dirty.'''
437 '''Mark as coming from the other parent, always dirty.'''
438 if self._pl[1] == nullid:
438 if self._pl[1] == nullid:
439 raise error.Abort(_("setting %r to other parent "
439 raise error.Abort(_("setting %r to other parent "
440 "only allowed in merges") % f)
440 "only allowed in merges") % f)
441 if f in self and self[f] == 'n':
441 if f in self and self[f] == 'n':
442 # merge-like
442 # merge-like
443 self._addpath(f, 'm', 0, -2, -1)
443 self._addpath(f, 'm', 0, -2, -1)
444 else:
444 else:
445 # add-like
445 # add-like
446 self._addpath(f, 'n', 0, -2, -1)
446 self._addpath(f, 'n', 0, -2, -1)
447 self._map.copymap.pop(f, None)
447 self._map.copymap.pop(f, None)
448
448
449 def add(self, f):
449 def add(self, f):
450 '''Mark a file added.'''
450 '''Mark a file added.'''
451 self._addpath(f, 'a', 0, -1, -1)
451 self._addpath(f, 'a', 0, -1, -1)
452 self._map.copymap.pop(f, None)
452 self._map.copymap.pop(f, None)
453
453
454 def remove(self, f):
454 def remove(self, f):
455 '''Mark a file removed.'''
455 '''Mark a file removed.'''
456 self._dirty = True
456 self._dirty = True
457 oldstate = self[f]
457 oldstate = self[f]
458 size = 0
458 size = 0
459 if self._pl[1] != nullid:
459 if self._pl[1] != nullid:
460 entry = self._map.get(f)
460 entry = self._map.get(f)
461 if entry is not None:
461 if entry is not None:
462 # backup the previous state
462 # backup the previous state
463 if entry[0] == 'm': # merge
463 if entry[0] == 'm': # merge
464 size = -1
464 size = -1
465 elif entry[0] == 'n' and entry[2] == -2: # other parent
465 elif entry[0] == 'n' and entry[2] == -2: # other parent
466 size = -2
466 size = -2
467 self._map.otherparentset.add(f)
467 self._map.otherparentset.add(f)
468 self._updatedfiles.add(f)
468 self._updatedfiles.add(f)
469 self._map.removefile(f, oldstate, size)
469 self._map.removefile(f, oldstate, size)
470 if size == 0:
470 if size == 0:
471 self._map.copymap.pop(f, None)
471 self._map.copymap.pop(f, None)
472
472
473 def merge(self, f):
473 def merge(self, f):
474 '''Mark a file merged.'''
474 '''Mark a file merged.'''
475 if self._pl[1] == nullid:
475 if self._pl[1] == nullid:
476 return self.normallookup(f)
476 return self.normallookup(f)
477 return self.otherparent(f)
477 return self.otherparent(f)
478
478
479 def drop(self, f):
479 def drop(self, f):
480 '''Drop a file from the dirstate'''
480 '''Drop a file from the dirstate'''
481 oldstate = self[f]
481 oldstate = self[f]
482 if self._map.dropfile(f, oldstate):
482 if self._map.dropfile(f, oldstate):
483 self._dirty = True
483 self._dirty = True
484 self._updatedfiles.add(f)
484 self._updatedfiles.add(f)
485 self._map.copymap.pop(f, None)
485 self._map.copymap.pop(f, None)
486
486
487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
488 if exists is None:
488 if exists is None:
489 exists = os.path.lexists(os.path.join(self._root, path))
489 exists = os.path.lexists(os.path.join(self._root, path))
490 if not exists:
490 if not exists:
491 # Maybe a path component exists
491 # Maybe a path component exists
492 if not ignoremissing and '/' in path:
492 if not ignoremissing and '/' in path:
493 d, f = path.rsplit('/', 1)
493 d, f = path.rsplit('/', 1)
494 d = self._normalize(d, False, ignoremissing, None)
494 d = self._normalize(d, False, ignoremissing, None)
495 folded = d + "/" + f
495 folded = d + "/" + f
496 else:
496 else:
497 # No path components, preserve original case
497 # No path components, preserve original case
498 folded = path
498 folded = path
499 else:
499 else:
500 # recursively normalize leading directory components
500 # recursively normalize leading directory components
501 # against dirstate
501 # against dirstate
502 if '/' in normed:
502 if '/' in normed:
503 d, f = normed.rsplit('/', 1)
503 d, f = normed.rsplit('/', 1)
504 d = self._normalize(d, False, ignoremissing, True)
504 d = self._normalize(d, False, ignoremissing, True)
505 r = self._root + "/" + d
505 r = self._root + "/" + d
506 folded = d + "/" + util.fspath(f, r)
506 folded = d + "/" + util.fspath(f, r)
507 else:
507 else:
508 folded = util.fspath(normed, self._root)
508 folded = util.fspath(normed, self._root)
509 storemap[normed] = folded
509 storemap[normed] = folded
510
510
511 return folded
511 return folded
512
512
513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
514 normed = util.normcase(path)
514 normed = util.normcase(path)
515 folded = self._map.filefoldmap.get(normed, None)
515 folded = self._map.filefoldmap.get(normed, None)
516 if folded is None:
516 if folded is None:
517 if isknown:
517 if isknown:
518 folded = path
518 folded = path
519 else:
519 else:
520 folded = self._discoverpath(path, normed, ignoremissing, exists,
520 folded = self._discoverpath(path, normed, ignoremissing, exists,
521 self._map.filefoldmap)
521 self._map.filefoldmap)
522 return folded
522 return folded
523
523
524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
525 normed = util.normcase(path)
525 normed = util.normcase(path)
526 folded = self._map.filefoldmap.get(normed, None)
526 folded = self._map.filefoldmap.get(normed, None)
527 if folded is None:
527 if folded is None:
528 folded = self._map.dirfoldmap.get(normed, None)
528 folded = self._map.dirfoldmap.get(normed, None)
529 if folded is None:
529 if folded is None:
530 if isknown:
530 if isknown:
531 folded = path
531 folded = path
532 else:
532 else:
533 # store discovered result in dirfoldmap so that future
533 # store discovered result in dirfoldmap so that future
534 # normalizefile calls don't start matching directories
534 # normalizefile calls don't start matching directories
535 folded = self._discoverpath(path, normed, ignoremissing, exists,
535 folded = self._discoverpath(path, normed, ignoremissing, exists,
536 self._map.dirfoldmap)
536 self._map.dirfoldmap)
537 return folded
537 return folded
538
538
539 def normalize(self, path, isknown=False, ignoremissing=False):
539 def normalize(self, path, isknown=False, ignoremissing=False):
540 '''
540 '''
541 normalize the case of a pathname when on a casefolding filesystem
541 normalize the case of a pathname when on a casefolding filesystem
542
542
543 isknown specifies whether the filename came from walking the
543 isknown specifies whether the filename came from walking the
544 disk, to avoid extra filesystem access.
544 disk, to avoid extra filesystem access.
545
545
546 If ignoremissing is True, missing path are returned
546 If ignoremissing is True, missing path are returned
547 unchanged. Otherwise, we try harder to normalize possibly
547 unchanged. Otherwise, we try harder to normalize possibly
548 existing path components.
548 existing path components.
549
549
550 The normalized case is determined based on the following precedence:
550 The normalized case is determined based on the following precedence:
551
551
552 - version of name already stored in the dirstate
552 - version of name already stored in the dirstate
553 - version of name stored on disk
553 - version of name stored on disk
554 - version provided via command arguments
554 - version provided via command arguments
555 '''
555 '''
556
556
557 if self._checkcase:
557 if self._checkcase:
558 return self._normalize(path, isknown, ignoremissing)
558 return self._normalize(path, isknown, ignoremissing)
559 return path
559 return path
560
560
561 def clear(self):
561 def clear(self):
562 self._map.clear()
562 self._map.clear()
563 self._lastnormaltime = 0
563 self._lastnormaltime = 0
564 self._updatedfiles.clear()
564 self._updatedfiles.clear()
565 self._dirty = True
565 self._dirty = True
566
566
567 def rebuild(self, parent, allfiles, changedfiles=None):
567 def rebuild(self, parent, allfiles, changedfiles=None):
568 if changedfiles is None:
568 if changedfiles is None:
569 # Rebuild entire dirstate
569 # Rebuild entire dirstate
570 changedfiles = allfiles
570 changedfiles = allfiles
571 lastnormaltime = self._lastnormaltime
571 lastnormaltime = self._lastnormaltime
572 self.clear()
572 self.clear()
573 self._lastnormaltime = lastnormaltime
573 self._lastnormaltime = lastnormaltime
574
574
575 if self._origpl is None:
575 if self._origpl is None:
576 self._origpl = self._pl
576 self._origpl = self._pl
577 self._map.setparents(parent, nullid)
577 self._map.setparents(parent, nullid)
578 for f in changedfiles:
578 for f in changedfiles:
579 if f in allfiles:
579 if f in allfiles:
580 self.normallookup(f)
580 self.normallookup(f)
581 else:
581 else:
582 self.drop(f)
582 self.drop(f)
583
583
584 self._dirty = True
584 self._dirty = True
585
585
586 def identity(self):
586 def identity(self):
587 '''Return identity of dirstate itself to detect changing in storage
587 '''Return identity of dirstate itself to detect changing in storage
588
588
589 If identity of previous dirstate is equal to this, writing
589 If identity of previous dirstate is equal to this, writing
590 changes based on the former dirstate out can keep consistency.
590 changes based on the former dirstate out can keep consistency.
591 '''
591 '''
592 return self._map.identity
592 return self._map.identity
593
593
594 def write(self, tr):
594 def write(self, tr):
595 if not self._dirty:
595 if not self._dirty:
596 return
596 return
597
597
598 filename = self._filename
598 filename = self._filename
599 if tr:
599 if tr:
600 # 'dirstate.write()' is not only for writing in-memory
600 # 'dirstate.write()' is not only for writing in-memory
601 # changes out, but also for dropping ambiguous timestamp.
601 # changes out, but also for dropping ambiguous timestamp.
602 # delayed writing re-raise "ambiguous timestamp issue".
602 # delayed writing re-raise "ambiguous timestamp issue".
603 # See also the wiki page below for detail:
603 # See also the wiki page below for detail:
604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
605
605
606 # emulate dropping timestamp in 'parsers.pack_dirstate'
606 # emulate dropping timestamp in 'parsers.pack_dirstate'
607 now = _getfsnow(self._opener)
607 now = _getfsnow(self._opener)
608 self._map.clearambiguoustimes(self._updatedfiles, now)
608 self._map.clearambiguoustimes(self._updatedfiles, now)
609
609
610 # emulate that all 'dirstate.normal' results are written out
610 # emulate that all 'dirstate.normal' results are written out
611 self._lastnormaltime = 0
611 self._lastnormaltime = 0
612 self._updatedfiles.clear()
612 self._updatedfiles.clear()
613
613
614 # delay writing in-memory changes out
614 # delay writing in-memory changes out
615 tr.addfilegenerator('dirstate', (self._filename,),
615 tr.addfilegenerator('dirstate', (self._filename,),
616 self._writedirstate, location='plain')
616 self._writedirstate, location='plain')
617 return
617 return
618
618
619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
620 self._writedirstate(st)
620 self._writedirstate(st)
621
621
622 def addparentchangecallback(self, category, callback):
622 def addparentchangecallback(self, category, callback):
623 """add a callback to be called when the wd parents are changed
623 """add a callback to be called when the wd parents are changed
624
624
625 Callback will be called with the following arguments:
625 Callback will be called with the following arguments:
626 dirstate, (oldp1, oldp2), (newp1, newp2)
626 dirstate, (oldp1, oldp2), (newp1, newp2)
627
627
628 Category is a unique identifier to allow overwriting an old callback
628 Category is a unique identifier to allow overwriting an old callback
629 with a newer callback.
629 with a newer callback.
630 """
630 """
631 self._plchangecallbacks[category] = callback
631 self._plchangecallbacks[category] = callback
632
632
633 def _writedirstate(self, st):
633 def _writedirstate(self, st):
634 # notify callbacks about parents change
634 # notify callbacks about parents change
635 if self._origpl is not None and self._origpl != self._pl:
635 if self._origpl is not None and self._origpl != self._pl:
636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
637 callback(self, self._origpl, self._pl)
637 callback(self, self._origpl, self._pl)
638 self._origpl = None
638 self._origpl = None
639 # use the modification time of the newly created temporary file as the
639 # use the modification time of the newly created temporary file as the
640 # filesystem's notion of 'now'
640 # filesystem's notion of 'now'
641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
642
642
643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
644 # timestamp of each entries in dirstate, because of 'now > mtime'
644 # timestamp of each entries in dirstate, because of 'now > mtime'
645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
646 if delaywrite > 0:
646 if delaywrite > 0:
647 # do we have any files to delay for?
647 # do we have any files to delay for?
648 for f, e in self._map.iteritems():
648 for f, e in self._map.iteritems():
649 if e[0] == 'n' and e[3] == now:
649 if e[0] == 'n' and e[3] == now:
650 import time # to avoid useless import
650 import time # to avoid useless import
651 # rather than sleep n seconds, sleep until the next
651 # rather than sleep n seconds, sleep until the next
652 # multiple of n seconds
652 # multiple of n seconds
653 clock = time.time()
653 clock = time.time()
654 start = int(clock) - (int(clock) % delaywrite)
654 start = int(clock) - (int(clock) % delaywrite)
655 end = start + delaywrite
655 end = start + delaywrite
656 time.sleep(end - clock)
656 time.sleep(end - clock)
657 now = end # trust our estimate that the end is near now
657 now = end # trust our estimate that the end is near now
658 break
658 break
659
659
660 self._map.write(st, now)
660 self._map.write(st, now)
661 self._lastnormaltime = 0
661 self._lastnormaltime = 0
662 self._dirty = False
662 self._dirty = False
663
663
664 def _dirignore(self, f):
664 def _dirignore(self, f):
665 if f == '.':
665 if f == '.':
666 return False
666 return False
667 if self._ignore(f):
667 if self._ignore(f):
668 return True
668 return True
669 for p in util.finddirs(f):
669 for p in util.finddirs(f):
670 if self._ignore(p):
670 if self._ignore(p):
671 return True
671 return True
672 return False
672 return False
673
673
674 def _ignorefiles(self):
674 def _ignorefiles(self):
675 files = []
675 files = []
676 if os.path.exists(self._join('.hgignore')):
676 if os.path.exists(self._join('.hgignore')):
677 files.append(self._join('.hgignore'))
677 files.append(self._join('.hgignore'))
678 for name, path in self._ui.configitems("ui"):
678 for name, path in self._ui.configitems("ui"):
679 if name == 'ignore' or name.startswith('ignore.'):
679 if name == 'ignore' or name.startswith('ignore.'):
680 # we need to use os.path.join here rather than self._join
680 # we need to use os.path.join here rather than self._join
681 # because path is arbitrary and user-specified
681 # because path is arbitrary and user-specified
682 files.append(os.path.join(self._rootdir, util.expandpath(path)))
682 files.append(os.path.join(self._rootdir, util.expandpath(path)))
683 return files
683 return files
684
684
685 def _ignorefileandline(self, f):
685 def _ignorefileandline(self, f):
686 files = collections.deque(self._ignorefiles())
686 files = collections.deque(self._ignorefiles())
687 visited = set()
687 visited = set()
688 while files:
688 while files:
689 i = files.popleft()
689 i = files.popleft()
690 patterns = matchmod.readpatternfile(i, self._ui.warn,
690 patterns = matchmod.readpatternfile(i, self._ui.warn,
691 sourceinfo=True)
691 sourceinfo=True)
692 for pattern, lineno, line in patterns:
692 for pattern, lineno, line in patterns:
693 kind, p = matchmod._patsplit(pattern, 'glob')
693 kind, p = matchmod._patsplit(pattern, 'glob')
694 if kind == "subinclude":
694 if kind == "subinclude":
695 if p not in visited:
695 if p not in visited:
696 files.append(p)
696 files.append(p)
697 continue
697 continue
698 m = matchmod.match(self._root, '', [], [pattern],
698 m = matchmod.match(self._root, '', [], [pattern],
699 warn=self._ui.warn)
699 warn=self._ui.warn)
700 if m(f):
700 if m(f):
701 return (i, lineno, line)
701 return (i, lineno, line)
702 visited.add(i)
702 visited.add(i)
703 return (None, -1, "")
703 return (None, -1, "")
704
704
705 def _walkexplicit(self, match, subrepos):
705 def _walkexplicit(self, match, subrepos):
706 '''Get stat data about the files explicitly specified by match.
706 '''Get stat data about the files explicitly specified by match.
707
707
708 Return a triple (results, dirsfound, dirsnotfound).
708 Return a triple (results, dirsfound, dirsnotfound).
709 - results is a mapping from filename to stat result. It also contains
709 - results is a mapping from filename to stat result. It also contains
710 listings mapping subrepos and .hg to None.
710 listings mapping subrepos and .hg to None.
711 - dirsfound is a list of files found to be directories.
711 - dirsfound is a list of files found to be directories.
712 - dirsnotfound is a list of files that the dirstate thinks are
712 - dirsnotfound is a list of files that the dirstate thinks are
713 directories and that were not found.'''
713 directories and that were not found.'''
714
714
715 def badtype(mode):
715 def badtype(mode):
716 kind = _('unknown')
716 kind = _('unknown')
717 if stat.S_ISCHR(mode):
717 if stat.S_ISCHR(mode):
718 kind = _('character device')
718 kind = _('character device')
719 elif stat.S_ISBLK(mode):
719 elif stat.S_ISBLK(mode):
720 kind = _('block device')
720 kind = _('block device')
721 elif stat.S_ISFIFO(mode):
721 elif stat.S_ISFIFO(mode):
722 kind = _('fifo')
722 kind = _('fifo')
723 elif stat.S_ISSOCK(mode):
723 elif stat.S_ISSOCK(mode):
724 kind = _('socket')
724 kind = _('socket')
725 elif stat.S_ISDIR(mode):
725 elif stat.S_ISDIR(mode):
726 kind = _('directory')
726 kind = _('directory')
727 return _('unsupported file type (type is %s)') % kind
727 return _('unsupported file type (type is %s)') % kind
728
728
729 matchedir = match.explicitdir
729 matchedir = match.explicitdir
730 badfn = match.bad
730 badfn = match.bad
731 dmap = self._map
731 dmap = self._map
732 lstat = os.lstat
732 lstat = os.lstat
733 getkind = stat.S_IFMT
733 getkind = stat.S_IFMT
734 dirkind = stat.S_IFDIR
734 dirkind = stat.S_IFDIR
735 regkind = stat.S_IFREG
735 regkind = stat.S_IFREG
736 lnkkind = stat.S_IFLNK
736 lnkkind = stat.S_IFLNK
737 join = self._join
737 join = self._join
738 dirsfound = []
738 dirsfound = []
739 foundadd = dirsfound.append
739 foundadd = dirsfound.append
740 dirsnotfound = []
740 dirsnotfound = []
741 notfoundadd = dirsnotfound.append
741 notfoundadd = dirsnotfound.append
742
742
743 if not match.isexact() and self._checkcase:
743 if not match.isexact() and self._checkcase:
744 normalize = self._normalize
744 normalize = self._normalize
745 else:
745 else:
746 normalize = None
746 normalize = None
747
747
748 files = sorted(match.files())
748 files = sorted(match.files())
749 subrepos.sort()
749 subrepos.sort()
750 i, j = 0, 0
750 i, j = 0, 0
751 while i < len(files) and j < len(subrepos):
751 while i < len(files) and j < len(subrepos):
752 subpath = subrepos[j] + "/"
752 subpath = subrepos[j] + "/"
753 if files[i] < subpath:
753 if files[i] < subpath:
754 i += 1
754 i += 1
755 continue
755 continue
756 while i < len(files) and files[i].startswith(subpath):
756 while i < len(files) and files[i].startswith(subpath):
757 del files[i]
757 del files[i]
758 j += 1
758 j += 1
759
759
760 if not files or '.' in files:
760 if not files or '.' in files:
761 files = ['.']
761 files = ['.']
762 # constructing the foldmap is expensive, so don't do it for the
763 # common case where files is ['.']
764 normalize = None
762 results = dict.fromkeys(subrepos)
765 results = dict.fromkeys(subrepos)
763 results['.hg'] = None
766 results['.hg'] = None
764
767
765 for ff in files:
768 for ff in files:
766 # constructing the foldmap is expensive, so don't do it for the
769 if normalize:
767 # common case where files is ['.']
768 if normalize and ff != '.':
769 nf = normalize(ff, False, True)
770 nf = normalize(ff, False, True)
770 else:
771 else:
771 nf = ff
772 nf = ff
772 if nf in results:
773 if nf in results:
773 continue
774 continue
774
775
775 try:
776 try:
776 st = lstat(join(nf))
777 st = lstat(join(nf))
777 kind = getkind(st.st_mode)
778 kind = getkind(st.st_mode)
778 if kind == dirkind:
779 if kind == dirkind:
779 if nf in dmap:
780 if nf in dmap:
780 # file replaced by dir on disk but still in dirstate
781 # file replaced by dir on disk but still in dirstate
781 results[nf] = None
782 results[nf] = None
782 if matchedir:
783 if matchedir:
783 matchedir(nf)
784 matchedir(nf)
784 foundadd((nf, ff))
785 foundadd((nf, ff))
785 elif kind == regkind or kind == lnkkind:
786 elif kind == regkind or kind == lnkkind:
786 results[nf] = st
787 results[nf] = st
787 else:
788 else:
788 badfn(ff, badtype(kind))
789 badfn(ff, badtype(kind))
789 if nf in dmap:
790 if nf in dmap:
790 results[nf] = None
791 results[nf] = None
791 except OSError as inst: # nf not found on disk - it is dirstate only
792 except OSError as inst: # nf not found on disk - it is dirstate only
792 if nf in dmap: # does it exactly match a missing file?
793 if nf in dmap: # does it exactly match a missing file?
793 results[nf] = None
794 results[nf] = None
794 else: # does it match a missing directory?
795 else: # does it match a missing directory?
795 if self._map.hasdir(nf):
796 if self._map.hasdir(nf):
796 if matchedir:
797 if matchedir:
797 matchedir(nf)
798 matchedir(nf)
798 notfoundadd(nf)
799 notfoundadd(nf)
799 else:
800 else:
800 badfn(ff, encoding.strtolocal(inst.strerror))
801 badfn(ff, encoding.strtolocal(inst.strerror))
801
802
802 # match.files() may contain explicitly-specified paths that shouldn't
803 # match.files() may contain explicitly-specified paths that shouldn't
803 # be taken; drop them from the list of files found. dirsfound/notfound
804 # be taken; drop them from the list of files found. dirsfound/notfound
804 # aren't filtered here because they will be tested later.
805 # aren't filtered here because they will be tested later.
805 if match.anypats():
806 if match.anypats():
806 for f in list(results):
807 for f in list(results):
807 if f == '.hg' or f in subrepos:
808 if f == '.hg' or f in subrepos:
808 # keep sentinel to disable further out-of-repo walks
809 # keep sentinel to disable further out-of-repo walks
809 continue
810 continue
810 if not match(f):
811 if not match(f):
811 del results[f]
812 del results[f]
812
813
813 # Case insensitive filesystems cannot rely on lstat() failing to detect
814 # Case insensitive filesystems cannot rely on lstat() failing to detect
814 # a case-only rename. Prune the stat object for any file that does not
815 # a case-only rename. Prune the stat object for any file that does not
815 # match the case in the filesystem, if there are multiple files that
816 # match the case in the filesystem, if there are multiple files that
816 # normalize to the same path.
817 # normalize to the same path.
817 if match.isexact() and self._checkcase:
818 if match.isexact() and self._checkcase:
818 normed = {}
819 normed = {}
819
820
820 for f, st in results.iteritems():
821 for f, st in results.iteritems():
821 if st is None:
822 if st is None:
822 continue
823 continue
823
824
824 nc = util.normcase(f)
825 nc = util.normcase(f)
825 paths = normed.get(nc)
826 paths = normed.get(nc)
826
827
827 if paths is None:
828 if paths is None:
828 paths = set()
829 paths = set()
829 normed[nc] = paths
830 normed[nc] = paths
830
831
831 paths.add(f)
832 paths.add(f)
832
833
833 for norm, paths in normed.iteritems():
834 for norm, paths in normed.iteritems():
834 if len(paths) > 1:
835 if len(paths) > 1:
835 for path in paths:
836 for path in paths:
836 folded = self._discoverpath(path, norm, True, None,
837 folded = self._discoverpath(path, norm, True, None,
837 self._map.dirfoldmap)
838 self._map.dirfoldmap)
838 if path != folded:
839 if path != folded:
839 results[path] = None
840 results[path] = None
840
841
841 return results, dirsfound, dirsnotfound
842 return results, dirsfound, dirsnotfound
842
843
843 def walk(self, match, subrepos, unknown, ignored, full=True):
844 def walk(self, match, subrepos, unknown, ignored, full=True):
844 '''
845 '''
845 Walk recursively through the directory tree, finding all files
846 Walk recursively through the directory tree, finding all files
846 matched by match.
847 matched by match.
847
848
848 If full is False, maybe skip some known-clean files.
849 If full is False, maybe skip some known-clean files.
849
850
850 Return a dict mapping filename to stat-like object (either
851 Return a dict mapping filename to stat-like object (either
851 mercurial.osutil.stat instance or return value of os.stat()).
852 mercurial.osutil.stat instance or return value of os.stat()).
852
853
853 '''
854 '''
854 # full is a flag that extensions that hook into walk can use -- this
855 # full is a flag that extensions that hook into walk can use -- this
855 # implementation doesn't use it at all. This satisfies the contract
856 # implementation doesn't use it at all. This satisfies the contract
856 # because we only guarantee a "maybe".
857 # because we only guarantee a "maybe".
857
858
858 if ignored:
859 if ignored:
859 ignore = util.never
860 ignore = util.never
860 dirignore = util.never
861 dirignore = util.never
861 elif unknown:
862 elif unknown:
862 ignore = self._ignore
863 ignore = self._ignore
863 dirignore = self._dirignore
864 dirignore = self._dirignore
864 else:
865 else:
865 # if not unknown and not ignored, drop dir recursion and step 2
866 # if not unknown and not ignored, drop dir recursion and step 2
866 ignore = util.always
867 ignore = util.always
867 dirignore = util.always
868 dirignore = util.always
868
869
869 matchfn = match.matchfn
870 matchfn = match.matchfn
870 matchalways = match.always()
871 matchalways = match.always()
871 matchtdir = match.traversedir
872 matchtdir = match.traversedir
872 dmap = self._map
873 dmap = self._map
873 listdir = util.listdir
874 listdir = util.listdir
874 lstat = os.lstat
875 lstat = os.lstat
875 dirkind = stat.S_IFDIR
876 dirkind = stat.S_IFDIR
876 regkind = stat.S_IFREG
877 regkind = stat.S_IFREG
877 lnkkind = stat.S_IFLNK
878 lnkkind = stat.S_IFLNK
878 join = self._join
879 join = self._join
879
880
880 exact = skipstep3 = False
881 exact = skipstep3 = False
881 if match.isexact(): # match.exact
882 if match.isexact(): # match.exact
882 exact = True
883 exact = True
883 dirignore = util.always # skip step 2
884 dirignore = util.always # skip step 2
884 elif match.prefix(): # match.match, no patterns
885 elif match.prefix(): # match.match, no patterns
885 skipstep3 = True
886 skipstep3 = True
886
887
887 if not exact and self._checkcase:
888 if not exact and self._checkcase:
888 normalize = self._normalize
889 normalize = self._normalize
889 normalizefile = self._normalizefile
890 normalizefile = self._normalizefile
890 skipstep3 = False
891 skipstep3 = False
891 else:
892 else:
892 normalize = self._normalize
893 normalize = self._normalize
893 normalizefile = None
894 normalizefile = None
894
895
895 # step 1: find all explicit files
896 # step 1: find all explicit files
896 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
897 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
897
898
898 skipstep3 = skipstep3 and not (work or dirsnotfound)
899 skipstep3 = skipstep3 and not (work or dirsnotfound)
899 work = [d for d in work if not dirignore(d[0])]
900 work = [d for d in work if not dirignore(d[0])]
900
901
901 # step 2: visit subdirectories
902 # step 2: visit subdirectories
902 def traverse(work, alreadynormed):
903 def traverse(work, alreadynormed):
903 wadd = work.append
904 wadd = work.append
904 while work:
905 while work:
905 nd = work.pop()
906 nd = work.pop()
906 visitentries = match.visitchildrenset(nd)
907 visitentries = match.visitchildrenset(nd)
907 if not visitentries:
908 if not visitentries:
908 continue
909 continue
909 if visitentries == 'this' or visitentries == 'all':
910 if visitentries == 'this' or visitentries == 'all':
910 visitentries = None
911 visitentries = None
911 skip = None
912 skip = None
912 if nd == '.':
913 if nd == '.':
913 nd = ''
914 nd = ''
914 else:
915 else:
915 skip = '.hg'
916 skip = '.hg'
916 try:
917 try:
917 entries = listdir(join(nd), stat=True, skip=skip)
918 entries = listdir(join(nd), stat=True, skip=skip)
918 except OSError as inst:
919 except OSError as inst:
919 if inst.errno in (errno.EACCES, errno.ENOENT):
920 if inst.errno in (errno.EACCES, errno.ENOENT):
920 match.bad(self.pathto(nd),
921 match.bad(self.pathto(nd),
921 encoding.strtolocal(inst.strerror))
922 encoding.strtolocal(inst.strerror))
922 continue
923 continue
923 raise
924 raise
924 for f, kind, st in entries:
925 for f, kind, st in entries:
925 # Some matchers may return files in the visitentries set,
926 # Some matchers may return files in the visitentries set,
926 # instead of 'this', if the matcher explicitly mentions them
927 # instead of 'this', if the matcher explicitly mentions them
927 # and is not an exactmatcher. This is acceptable; we do not
928 # and is not an exactmatcher. This is acceptable; we do not
928 # make any hard assumptions about file-or-directory below
929 # make any hard assumptions about file-or-directory below
929 # based on the presence of `f` in visitentries. If
930 # based on the presence of `f` in visitentries. If
930 # visitchildrenset returned a set, we can always skip the
931 # visitchildrenset returned a set, we can always skip the
931 # entries *not* in the set it provided regardless of whether
932 # entries *not* in the set it provided regardless of whether
932 # they're actually a file or a directory.
933 # they're actually a file or a directory.
933 if visitentries and f not in visitentries:
934 if visitentries and f not in visitentries:
934 continue
935 continue
935 if normalizefile:
936 if normalizefile:
936 # even though f might be a directory, we're only
937 # even though f might be a directory, we're only
937 # interested in comparing it to files currently in the
938 # interested in comparing it to files currently in the
938 # dmap -- therefore normalizefile is enough
939 # dmap -- therefore normalizefile is enough
939 nf = normalizefile(nd and (nd + "/" + f) or f, True,
940 nf = normalizefile(nd and (nd + "/" + f) or f, True,
940 True)
941 True)
941 else:
942 else:
942 nf = nd and (nd + "/" + f) or f
943 nf = nd and (nd + "/" + f) or f
943 if nf not in results:
944 if nf not in results:
944 if kind == dirkind:
945 if kind == dirkind:
945 if not ignore(nf):
946 if not ignore(nf):
946 if matchtdir:
947 if matchtdir:
947 matchtdir(nf)
948 matchtdir(nf)
948 wadd(nf)
949 wadd(nf)
949 if nf in dmap and (matchalways or matchfn(nf)):
950 if nf in dmap and (matchalways or matchfn(nf)):
950 results[nf] = None
951 results[nf] = None
951 elif kind == regkind or kind == lnkkind:
952 elif kind == regkind or kind == lnkkind:
952 if nf in dmap:
953 if nf in dmap:
953 if matchalways or matchfn(nf):
954 if matchalways or matchfn(nf):
954 results[nf] = st
955 results[nf] = st
955 elif ((matchalways or matchfn(nf))
956 elif ((matchalways or matchfn(nf))
956 and not ignore(nf)):
957 and not ignore(nf)):
957 # unknown file -- normalize if necessary
958 # unknown file -- normalize if necessary
958 if not alreadynormed:
959 if not alreadynormed:
959 nf = normalize(nf, False, True)
960 nf = normalize(nf, False, True)
960 results[nf] = st
961 results[nf] = st
961 elif nf in dmap and (matchalways or matchfn(nf)):
962 elif nf in dmap and (matchalways or matchfn(nf)):
962 results[nf] = None
963 results[nf] = None
963
964
964 for nd, d in work:
965 for nd, d in work:
965 # alreadynormed means that processwork doesn't have to do any
966 # alreadynormed means that processwork doesn't have to do any
966 # expensive directory normalization
967 # expensive directory normalization
967 alreadynormed = not normalize or nd == d
968 alreadynormed = not normalize or nd == d
968 traverse([d], alreadynormed)
969 traverse([d], alreadynormed)
969
970
970 for s in subrepos:
971 for s in subrepos:
971 del results[s]
972 del results[s]
972 del results['.hg']
973 del results['.hg']
973
974
974 # step 3: visit remaining files from dmap
975 # step 3: visit remaining files from dmap
975 if not skipstep3 and not exact:
976 if not skipstep3 and not exact:
976 # If a dmap file is not in results yet, it was either
977 # If a dmap file is not in results yet, it was either
977 # a) not matching matchfn b) ignored, c) missing, or d) under a
978 # a) not matching matchfn b) ignored, c) missing, or d) under a
978 # symlink directory.
979 # symlink directory.
979 if not results and matchalways:
980 if not results and matchalways:
980 visit = [f for f in dmap]
981 visit = [f for f in dmap]
981 else:
982 else:
982 visit = [f for f in dmap if f not in results and matchfn(f)]
983 visit = [f for f in dmap if f not in results and matchfn(f)]
983 visit.sort()
984 visit.sort()
984
985
985 if unknown:
986 if unknown:
986 # unknown == True means we walked all dirs under the roots
987 # unknown == True means we walked all dirs under the roots
987 # that wasn't ignored, and everything that matched was stat'ed
988 # that wasn't ignored, and everything that matched was stat'ed
988 # and is already in results.
989 # and is already in results.
989 # The rest must thus be ignored or under a symlink.
990 # The rest must thus be ignored or under a symlink.
990 audit_path = pathutil.pathauditor(self._root, cached=True)
991 audit_path = pathutil.pathauditor(self._root, cached=True)
991
992
992 for nf in iter(visit):
993 for nf in iter(visit):
993 # If a stat for the same file was already added with a
994 # If a stat for the same file was already added with a
994 # different case, don't add one for this, since that would
995 # different case, don't add one for this, since that would
995 # make it appear as if the file exists under both names
996 # make it appear as if the file exists under both names
996 # on disk.
997 # on disk.
997 if (normalizefile and
998 if (normalizefile and
998 normalizefile(nf, True, True) in results):
999 normalizefile(nf, True, True) in results):
999 results[nf] = None
1000 results[nf] = None
1000 # Report ignored items in the dmap as long as they are not
1001 # Report ignored items in the dmap as long as they are not
1001 # under a symlink directory.
1002 # under a symlink directory.
1002 elif audit_path.check(nf):
1003 elif audit_path.check(nf):
1003 try:
1004 try:
1004 results[nf] = lstat(join(nf))
1005 results[nf] = lstat(join(nf))
1005 # file was just ignored, no links, and exists
1006 # file was just ignored, no links, and exists
1006 except OSError:
1007 except OSError:
1007 # file doesn't exist
1008 # file doesn't exist
1008 results[nf] = None
1009 results[nf] = None
1009 else:
1010 else:
1010 # It's either missing or under a symlink directory
1011 # It's either missing or under a symlink directory
1011 # which we in this case report as missing
1012 # which we in this case report as missing
1012 results[nf] = None
1013 results[nf] = None
1013 else:
1014 else:
1014 # We may not have walked the full directory tree above,
1015 # We may not have walked the full directory tree above,
1015 # so stat and check everything we missed.
1016 # so stat and check everything we missed.
1016 iv = iter(visit)
1017 iv = iter(visit)
1017 for st in util.statfiles([join(i) for i in visit]):
1018 for st in util.statfiles([join(i) for i in visit]):
1018 results[next(iv)] = st
1019 results[next(iv)] = st
1019 return results
1020 return results
1020
1021
1021 def status(self, match, subrepos, ignored, clean, unknown):
1022 def status(self, match, subrepos, ignored, clean, unknown):
1022 '''Determine the status of the working copy relative to the
1023 '''Determine the status of the working copy relative to the
1023 dirstate and return a pair of (unsure, status), where status is of type
1024 dirstate and return a pair of (unsure, status), where status is of type
1024 scmutil.status and:
1025 scmutil.status and:
1025
1026
1026 unsure:
1027 unsure:
1027 files that might have been modified since the dirstate was
1028 files that might have been modified since the dirstate was
1028 written, but need to be read to be sure (size is the same
1029 written, but need to be read to be sure (size is the same
1029 but mtime differs)
1030 but mtime differs)
1030 status.modified:
1031 status.modified:
1031 files that have definitely been modified since the dirstate
1032 files that have definitely been modified since the dirstate
1032 was written (different size or mode)
1033 was written (different size or mode)
1033 status.clean:
1034 status.clean:
1034 files that have definitely not been modified since the
1035 files that have definitely not been modified since the
1035 dirstate was written
1036 dirstate was written
1036 '''
1037 '''
1037 listignored, listclean, listunknown = ignored, clean, unknown
1038 listignored, listclean, listunknown = ignored, clean, unknown
1038 lookup, modified, added, unknown, ignored = [], [], [], [], []
1039 lookup, modified, added, unknown, ignored = [], [], [], [], []
1039 removed, deleted, clean = [], [], []
1040 removed, deleted, clean = [], [], []
1040
1041
1041 dmap = self._map
1042 dmap = self._map
1042 dmap.preload()
1043 dmap.preload()
1043 dcontains = dmap.__contains__
1044 dcontains = dmap.__contains__
1044 dget = dmap.__getitem__
1045 dget = dmap.__getitem__
1045 ladd = lookup.append # aka "unsure"
1046 ladd = lookup.append # aka "unsure"
1046 madd = modified.append
1047 madd = modified.append
1047 aadd = added.append
1048 aadd = added.append
1048 uadd = unknown.append
1049 uadd = unknown.append
1049 iadd = ignored.append
1050 iadd = ignored.append
1050 radd = removed.append
1051 radd = removed.append
1051 dadd = deleted.append
1052 dadd = deleted.append
1052 cadd = clean.append
1053 cadd = clean.append
1053 mexact = match.exact
1054 mexact = match.exact
1054 dirignore = self._dirignore
1055 dirignore = self._dirignore
1055 checkexec = self._checkexec
1056 checkexec = self._checkexec
1056 copymap = self._map.copymap
1057 copymap = self._map.copymap
1057 lastnormaltime = self._lastnormaltime
1058 lastnormaltime = self._lastnormaltime
1058
1059
1059 # We need to do full walks when either
1060 # We need to do full walks when either
1060 # - we're listing all clean files, or
1061 # - we're listing all clean files, or
1061 # - match.traversedir does something, because match.traversedir should
1062 # - match.traversedir does something, because match.traversedir should
1062 # be called for every dir in the working dir
1063 # be called for every dir in the working dir
1063 full = listclean or match.traversedir is not None
1064 full = listclean or match.traversedir is not None
1064 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1065 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1065 full=full).iteritems():
1066 full=full).iteritems():
1066 if not dcontains(fn):
1067 if not dcontains(fn):
1067 if (listignored or mexact(fn)) and dirignore(fn):
1068 if (listignored or mexact(fn)) and dirignore(fn):
1068 if listignored:
1069 if listignored:
1069 iadd(fn)
1070 iadd(fn)
1070 else:
1071 else:
1071 uadd(fn)
1072 uadd(fn)
1072 continue
1073 continue
1073
1074
1074 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1075 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1075 # written like that for performance reasons. dmap[fn] is not a
1076 # written like that for performance reasons. dmap[fn] is not a
1076 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1077 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1077 # opcode has fast paths when the value to be unpacked is a tuple or
1078 # opcode has fast paths when the value to be unpacked is a tuple or
1078 # a list, but falls back to creating a full-fledged iterator in
1079 # a list, but falls back to creating a full-fledged iterator in
1079 # general. That is much slower than simply accessing and storing the
1080 # general. That is much slower than simply accessing and storing the
1080 # tuple members one by one.
1081 # tuple members one by one.
1081 t = dget(fn)
1082 t = dget(fn)
1082 state = t[0]
1083 state = t[0]
1083 mode = t[1]
1084 mode = t[1]
1084 size = t[2]
1085 size = t[2]
1085 time = t[3]
1086 time = t[3]
1086
1087
1087 if not st and state in "nma":
1088 if not st and state in "nma":
1088 dadd(fn)
1089 dadd(fn)
1089 elif state == 'n':
1090 elif state == 'n':
1090 if (size >= 0 and
1091 if (size >= 0 and
1091 ((size != st.st_size and size != st.st_size & _rangemask)
1092 ((size != st.st_size and size != st.st_size & _rangemask)
1092 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1093 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1093 or size == -2 # other parent
1094 or size == -2 # other parent
1094 or fn in copymap):
1095 or fn in copymap):
1095 madd(fn)
1096 madd(fn)
1096 elif (time != st[stat.ST_MTIME]
1097 elif (time != st[stat.ST_MTIME]
1097 and time != st[stat.ST_MTIME] & _rangemask):
1098 and time != st[stat.ST_MTIME] & _rangemask):
1098 ladd(fn)
1099 ladd(fn)
1099 elif st[stat.ST_MTIME] == lastnormaltime:
1100 elif st[stat.ST_MTIME] == lastnormaltime:
1100 # fn may have just been marked as normal and it may have
1101 # fn may have just been marked as normal and it may have
1101 # changed in the same second without changing its size.
1102 # changed in the same second without changing its size.
1102 # This can happen if we quickly do multiple commits.
1103 # This can happen if we quickly do multiple commits.
1103 # Force lookup, so we don't miss such a racy file change.
1104 # Force lookup, so we don't miss such a racy file change.
1104 ladd(fn)
1105 ladd(fn)
1105 elif listclean:
1106 elif listclean:
1106 cadd(fn)
1107 cadd(fn)
1107 elif state == 'm':
1108 elif state == 'm':
1108 madd(fn)
1109 madd(fn)
1109 elif state == 'a':
1110 elif state == 'a':
1110 aadd(fn)
1111 aadd(fn)
1111 elif state == 'r':
1112 elif state == 'r':
1112 radd(fn)
1113 radd(fn)
1113
1114
1114 return (lookup, scmutil.status(modified, added, removed, deleted,
1115 return (lookup, scmutil.status(modified, added, removed, deleted,
1115 unknown, ignored, clean))
1116 unknown, ignored, clean))
1116
1117
1117 def matches(self, match):
1118 def matches(self, match):
1118 '''
1119 '''
1119 return files in the dirstate (in whatever state) filtered by match
1120 return files in the dirstate (in whatever state) filtered by match
1120 '''
1121 '''
1121 dmap = self._map
1122 dmap = self._map
1122 if match.always():
1123 if match.always():
1123 return dmap.keys()
1124 return dmap.keys()
1124 files = match.files()
1125 files = match.files()
1125 if match.isexact():
1126 if match.isexact():
1126 # fast path -- filter the other way around, since typically files is
1127 # fast path -- filter the other way around, since typically files is
1127 # much smaller than dmap
1128 # much smaller than dmap
1128 return [f for f in files if f in dmap]
1129 return [f for f in files if f in dmap]
1129 if match.prefix() and all(fn in dmap for fn in files):
1130 if match.prefix() and all(fn in dmap for fn in files):
1130 # fast path -- all the values are known to be files, so just return
1131 # fast path -- all the values are known to be files, so just return
1131 # that
1132 # that
1132 return list(files)
1133 return list(files)
1133 return [f for f in dmap if match(f)]
1134 return [f for f in dmap if match(f)]
1134
1135
1135 def _actualfilename(self, tr):
1136 def _actualfilename(self, tr):
1136 if tr:
1137 if tr:
1137 return self._pendingfilename
1138 return self._pendingfilename
1138 else:
1139 else:
1139 return self._filename
1140 return self._filename
1140
1141
1141 def savebackup(self, tr, backupname):
1142 def savebackup(self, tr, backupname):
1142 '''Save current dirstate into backup file'''
1143 '''Save current dirstate into backup file'''
1143 filename = self._actualfilename(tr)
1144 filename = self._actualfilename(tr)
1144 assert backupname != filename
1145 assert backupname != filename
1145
1146
1146 # use '_writedirstate' instead of 'write' to write changes certainly,
1147 # use '_writedirstate' instead of 'write' to write changes certainly,
1147 # because the latter omits writing out if transaction is running.
1148 # because the latter omits writing out if transaction is running.
1148 # output file will be used to create backup of dirstate at this point.
1149 # output file will be used to create backup of dirstate at this point.
1149 if self._dirty or not self._opener.exists(filename):
1150 if self._dirty or not self._opener.exists(filename):
1150 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1151 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1151 checkambig=True))
1152 checkambig=True))
1152
1153
1153 if tr:
1154 if tr:
1154 # ensure that subsequent tr.writepending returns True for
1155 # ensure that subsequent tr.writepending returns True for
1155 # changes written out above, even if dirstate is never
1156 # changes written out above, even if dirstate is never
1156 # changed after this
1157 # changed after this
1157 tr.addfilegenerator('dirstate', (self._filename,),
1158 tr.addfilegenerator('dirstate', (self._filename,),
1158 self._writedirstate, location='plain')
1159 self._writedirstate, location='plain')
1159
1160
1160 # ensure that pending file written above is unlinked at
1161 # ensure that pending file written above is unlinked at
1161 # failure, even if tr.writepending isn't invoked until the
1162 # failure, even if tr.writepending isn't invoked until the
1162 # end of this transaction
1163 # end of this transaction
1163 tr.registertmp(filename, location='plain')
1164 tr.registertmp(filename, location='plain')
1164
1165
1165 self._opener.tryunlink(backupname)
1166 self._opener.tryunlink(backupname)
1166 # hardlink backup is okay because _writedirstate is always called
1167 # hardlink backup is okay because _writedirstate is always called
1167 # with an "atomictemp=True" file.
1168 # with an "atomictemp=True" file.
1168 util.copyfile(self._opener.join(filename),
1169 util.copyfile(self._opener.join(filename),
1169 self._opener.join(backupname), hardlink=True)
1170 self._opener.join(backupname), hardlink=True)
1170
1171
1171 def restorebackup(self, tr, backupname):
1172 def restorebackup(self, tr, backupname):
1172 '''Restore dirstate by backup file'''
1173 '''Restore dirstate by backup file'''
1173 # this "invalidate()" prevents "wlock.release()" from writing
1174 # this "invalidate()" prevents "wlock.release()" from writing
1174 # changes of dirstate out after restoring from backup file
1175 # changes of dirstate out after restoring from backup file
1175 self.invalidate()
1176 self.invalidate()
1176 filename = self._actualfilename(tr)
1177 filename = self._actualfilename(tr)
1177 o = self._opener
1178 o = self._opener
1178 if util.samefile(o.join(backupname), o.join(filename)):
1179 if util.samefile(o.join(backupname), o.join(filename)):
1179 o.unlink(backupname)
1180 o.unlink(backupname)
1180 else:
1181 else:
1181 o.rename(backupname, filename, checkambig=True)
1182 o.rename(backupname, filename, checkambig=True)
1182
1183
1183 def clearbackup(self, tr, backupname):
1184 def clearbackup(self, tr, backupname):
1184 '''Clear backup file'''
1185 '''Clear backup file'''
1185 self._opener.unlink(backupname)
1186 self._opener.unlink(backupname)
1186
1187
1187 class dirstatemap(object):
1188 class dirstatemap(object):
1188 """Map encapsulating the dirstate's contents.
1189 """Map encapsulating the dirstate's contents.
1189
1190
1190 The dirstate contains the following state:
1191 The dirstate contains the following state:
1191
1192
1192 - `identity` is the identity of the dirstate file, which can be used to
1193 - `identity` is the identity of the dirstate file, which can be used to
1193 detect when changes have occurred to the dirstate file.
1194 detect when changes have occurred to the dirstate file.
1194
1195
1195 - `parents` is a pair containing the parents of the working copy. The
1196 - `parents` is a pair containing the parents of the working copy. The
1196 parents are updated by calling `setparents`.
1197 parents are updated by calling `setparents`.
1197
1198
1198 - the state map maps filenames to tuples of (state, mode, size, mtime),
1199 - the state map maps filenames to tuples of (state, mode, size, mtime),
1199 where state is a single character representing 'normal', 'added',
1200 where state is a single character representing 'normal', 'added',
1200 'removed', or 'merged'. It is read by treating the dirstate as a
1201 'removed', or 'merged'. It is read by treating the dirstate as a
1201 dict. File state is updated by calling the `addfile`, `removefile` and
1202 dict. File state is updated by calling the `addfile`, `removefile` and
1202 `dropfile` methods.
1203 `dropfile` methods.
1203
1204
1204 - `copymap` maps destination filenames to their source filename.
1205 - `copymap` maps destination filenames to their source filename.
1205
1206
1206 The dirstate also provides the following views onto the state:
1207 The dirstate also provides the following views onto the state:
1207
1208
1208 - `nonnormalset` is a set of the filenames that have state other
1209 - `nonnormalset` is a set of the filenames that have state other
1209 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1210 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1210
1211
1211 - `otherparentset` is a set of the filenames that are marked as coming
1212 - `otherparentset` is a set of the filenames that are marked as coming
1212 from the second parent when the dirstate is currently being merged.
1213 from the second parent when the dirstate is currently being merged.
1213
1214
1214 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1215 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1215 form that they appear as in the dirstate.
1216 form that they appear as in the dirstate.
1216
1217
1217 - `dirfoldmap` is a dict mapping normalized directory names to the
1218 - `dirfoldmap` is a dict mapping normalized directory names to the
1218 denormalized form that they appear as in the dirstate.
1219 denormalized form that they appear as in the dirstate.
1219 """
1220 """
1220
1221
1221 def __init__(self, ui, opener, root):
1222 def __init__(self, ui, opener, root):
1222 self._ui = ui
1223 self._ui = ui
1223 self._opener = opener
1224 self._opener = opener
1224 self._root = root
1225 self._root = root
1225 self._filename = 'dirstate'
1226 self._filename = 'dirstate'
1226
1227
1227 self._parents = None
1228 self._parents = None
1228 self._dirtyparents = False
1229 self._dirtyparents = False
1229
1230
1230 # for consistent view between _pl() and _read() invocations
1231 # for consistent view between _pl() and _read() invocations
1231 self._pendingmode = None
1232 self._pendingmode = None
1232
1233
1233 @propertycache
1234 @propertycache
1234 def _map(self):
1235 def _map(self):
1235 self._map = {}
1236 self._map = {}
1236 self.read()
1237 self.read()
1237 return self._map
1238 return self._map
1238
1239
1239 @propertycache
1240 @propertycache
1240 def copymap(self):
1241 def copymap(self):
1241 self.copymap = {}
1242 self.copymap = {}
1242 self._map
1243 self._map
1243 return self.copymap
1244 return self.copymap
1244
1245
1245 def clear(self):
1246 def clear(self):
1246 self._map.clear()
1247 self._map.clear()
1247 self.copymap.clear()
1248 self.copymap.clear()
1248 self.setparents(nullid, nullid)
1249 self.setparents(nullid, nullid)
1249 util.clearcachedproperty(self, "_dirs")
1250 util.clearcachedproperty(self, "_dirs")
1250 util.clearcachedproperty(self, "_alldirs")
1251 util.clearcachedproperty(self, "_alldirs")
1251 util.clearcachedproperty(self, "filefoldmap")
1252 util.clearcachedproperty(self, "filefoldmap")
1252 util.clearcachedproperty(self, "dirfoldmap")
1253 util.clearcachedproperty(self, "dirfoldmap")
1253 util.clearcachedproperty(self, "nonnormalset")
1254 util.clearcachedproperty(self, "nonnormalset")
1254 util.clearcachedproperty(self, "otherparentset")
1255 util.clearcachedproperty(self, "otherparentset")
1255
1256
1256 def items(self):
1257 def items(self):
1257 return self._map.iteritems()
1258 return self._map.iteritems()
1258
1259
1259 # forward for python2,3 compat
1260 # forward for python2,3 compat
1260 iteritems = items
1261 iteritems = items
1261
1262
1262 def __len__(self):
1263 def __len__(self):
1263 return len(self._map)
1264 return len(self._map)
1264
1265
1265 def __iter__(self):
1266 def __iter__(self):
1266 return iter(self._map)
1267 return iter(self._map)
1267
1268
1268 def get(self, key, default=None):
1269 def get(self, key, default=None):
1269 return self._map.get(key, default)
1270 return self._map.get(key, default)
1270
1271
1271 def __contains__(self, key):
1272 def __contains__(self, key):
1272 return key in self._map
1273 return key in self._map
1273
1274
1274 def __getitem__(self, key):
1275 def __getitem__(self, key):
1275 return self._map[key]
1276 return self._map[key]
1276
1277
1277 def keys(self):
1278 def keys(self):
1278 return self._map.keys()
1279 return self._map.keys()
1279
1280
1280 def preload(self):
1281 def preload(self):
1281 """Loads the underlying data, if it's not already loaded"""
1282 """Loads the underlying data, if it's not already loaded"""
1282 self._map
1283 self._map
1283
1284
1284 def addfile(self, f, oldstate, state, mode, size, mtime):
1285 def addfile(self, f, oldstate, state, mode, size, mtime):
1285 """Add a tracked file to the dirstate."""
1286 """Add a tracked file to the dirstate."""
1286 if oldstate in "?r" and r"_dirs" in self.__dict__:
1287 if oldstate in "?r" and r"_dirs" in self.__dict__:
1287 self._dirs.addpath(f)
1288 self._dirs.addpath(f)
1288 if oldstate == "?" and r"_alldirs" in self.__dict__:
1289 if oldstate == "?" and r"_alldirs" in self.__dict__:
1289 self._alldirs.addpath(f)
1290 self._alldirs.addpath(f)
1290 self._map[f] = dirstatetuple(state, mode, size, mtime)
1291 self._map[f] = dirstatetuple(state, mode, size, mtime)
1291 if state != 'n' or mtime == -1:
1292 if state != 'n' or mtime == -1:
1292 self.nonnormalset.add(f)
1293 self.nonnormalset.add(f)
1293 if size == -2:
1294 if size == -2:
1294 self.otherparentset.add(f)
1295 self.otherparentset.add(f)
1295
1296
1296 def removefile(self, f, oldstate, size):
1297 def removefile(self, f, oldstate, size):
1297 """
1298 """
1298 Mark a file as removed in the dirstate.
1299 Mark a file as removed in the dirstate.
1299
1300
1300 The `size` parameter is used to store sentinel values that indicate
1301 The `size` parameter is used to store sentinel values that indicate
1301 the file's previous state. In the future, we should refactor this
1302 the file's previous state. In the future, we should refactor this
1302 to be more explicit about what that state is.
1303 to be more explicit about what that state is.
1303 """
1304 """
1304 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1305 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1305 self._dirs.delpath(f)
1306 self._dirs.delpath(f)
1306 if oldstate == "?" and r"_alldirs" in self.__dict__:
1307 if oldstate == "?" and r"_alldirs" in self.__dict__:
1307 self._alldirs.addpath(f)
1308 self._alldirs.addpath(f)
1308 if r"filefoldmap" in self.__dict__:
1309 if r"filefoldmap" in self.__dict__:
1309 normed = util.normcase(f)
1310 normed = util.normcase(f)
1310 self.filefoldmap.pop(normed, None)
1311 self.filefoldmap.pop(normed, None)
1311 self._map[f] = dirstatetuple('r', 0, size, 0)
1312 self._map[f] = dirstatetuple('r', 0, size, 0)
1312 self.nonnormalset.add(f)
1313 self.nonnormalset.add(f)
1313
1314
1314 def dropfile(self, f, oldstate):
1315 def dropfile(self, f, oldstate):
1315 """
1316 """
1316 Remove a file from the dirstate. Returns True if the file was
1317 Remove a file from the dirstate. Returns True if the file was
1317 previously recorded.
1318 previously recorded.
1318 """
1319 """
1319 exists = self._map.pop(f, None) is not None
1320 exists = self._map.pop(f, None) is not None
1320 if exists:
1321 if exists:
1321 if oldstate != "r" and r"_dirs" in self.__dict__:
1322 if oldstate != "r" and r"_dirs" in self.__dict__:
1322 self._dirs.delpath(f)
1323 self._dirs.delpath(f)
1323 if r"_alldirs" in self.__dict__:
1324 if r"_alldirs" in self.__dict__:
1324 self._alldirs.delpath(f)
1325 self._alldirs.delpath(f)
1325 if r"filefoldmap" in self.__dict__:
1326 if r"filefoldmap" in self.__dict__:
1326 normed = util.normcase(f)
1327 normed = util.normcase(f)
1327 self.filefoldmap.pop(normed, None)
1328 self.filefoldmap.pop(normed, None)
1328 self.nonnormalset.discard(f)
1329 self.nonnormalset.discard(f)
1329 return exists
1330 return exists
1330
1331
1331 def clearambiguoustimes(self, files, now):
1332 def clearambiguoustimes(self, files, now):
1332 for f in files:
1333 for f in files:
1333 e = self.get(f)
1334 e = self.get(f)
1334 if e is not None and e[0] == 'n' and e[3] == now:
1335 if e is not None and e[0] == 'n' and e[3] == now:
1335 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1336 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1336 self.nonnormalset.add(f)
1337 self.nonnormalset.add(f)
1337
1338
1338 def nonnormalentries(self):
1339 def nonnormalentries(self):
1339 '''Compute the nonnormal dirstate entries from the dmap'''
1340 '''Compute the nonnormal dirstate entries from the dmap'''
1340 try:
1341 try:
1341 return parsers.nonnormalotherparententries(self._map)
1342 return parsers.nonnormalotherparententries(self._map)
1342 except AttributeError:
1343 except AttributeError:
1343 nonnorm = set()
1344 nonnorm = set()
1344 otherparent = set()
1345 otherparent = set()
1345 for fname, e in self._map.iteritems():
1346 for fname, e in self._map.iteritems():
1346 if e[0] != 'n' or e[3] == -1:
1347 if e[0] != 'n' or e[3] == -1:
1347 nonnorm.add(fname)
1348 nonnorm.add(fname)
1348 if e[0] == 'n' and e[2] == -2:
1349 if e[0] == 'n' and e[2] == -2:
1349 otherparent.add(fname)
1350 otherparent.add(fname)
1350 return nonnorm, otherparent
1351 return nonnorm, otherparent
1351
1352
1352 @propertycache
1353 @propertycache
1353 def filefoldmap(self):
1354 def filefoldmap(self):
1354 """Returns a dictionary mapping normalized case paths to their
1355 """Returns a dictionary mapping normalized case paths to their
1355 non-normalized versions.
1356 non-normalized versions.
1356 """
1357 """
1357 try:
1358 try:
1358 makefilefoldmap = parsers.make_file_foldmap
1359 makefilefoldmap = parsers.make_file_foldmap
1359 except AttributeError:
1360 except AttributeError:
1360 pass
1361 pass
1361 else:
1362 else:
1362 return makefilefoldmap(self._map, util.normcasespec,
1363 return makefilefoldmap(self._map, util.normcasespec,
1363 util.normcasefallback)
1364 util.normcasefallback)
1364
1365
1365 f = {}
1366 f = {}
1366 normcase = util.normcase
1367 normcase = util.normcase
1367 for name, s in self._map.iteritems():
1368 for name, s in self._map.iteritems():
1368 if s[0] != 'r':
1369 if s[0] != 'r':
1369 f[normcase(name)] = name
1370 f[normcase(name)] = name
1370 f['.'] = '.' # prevents useless util.fspath() invocation
1371 f['.'] = '.' # prevents useless util.fspath() invocation
1371 return f
1372 return f
1372
1373
1373 def hastrackeddir(self, d):
1374 def hastrackeddir(self, d):
1374 """
1375 """
1375 Returns True if the dirstate contains a tracked (not removed) file
1376 Returns True if the dirstate contains a tracked (not removed) file
1376 in this directory.
1377 in this directory.
1377 """
1378 """
1378 return d in self._dirs
1379 return d in self._dirs
1379
1380
1380 def hasdir(self, d):
1381 def hasdir(self, d):
1381 """
1382 """
1382 Returns True if the dirstate contains a file (tracked or removed)
1383 Returns True if the dirstate contains a file (tracked or removed)
1383 in this directory.
1384 in this directory.
1384 """
1385 """
1385 return d in self._alldirs
1386 return d in self._alldirs
1386
1387
1387 @propertycache
1388 @propertycache
1388 def _dirs(self):
1389 def _dirs(self):
1389 return util.dirs(self._map, 'r')
1390 return util.dirs(self._map, 'r')
1390
1391
1391 @propertycache
1392 @propertycache
1392 def _alldirs(self):
1393 def _alldirs(self):
1393 return util.dirs(self._map)
1394 return util.dirs(self._map)
1394
1395
1395 def _opendirstatefile(self):
1396 def _opendirstatefile(self):
1396 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1397 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1397 if self._pendingmode is not None and self._pendingmode != mode:
1398 if self._pendingmode is not None and self._pendingmode != mode:
1398 fp.close()
1399 fp.close()
1399 raise error.Abort(_('working directory state may be '
1400 raise error.Abort(_('working directory state may be '
1400 'changed parallelly'))
1401 'changed parallelly'))
1401 self._pendingmode = mode
1402 self._pendingmode = mode
1402 return fp
1403 return fp
1403
1404
1404 def parents(self):
1405 def parents(self):
1405 if not self._parents:
1406 if not self._parents:
1406 try:
1407 try:
1407 fp = self._opendirstatefile()
1408 fp = self._opendirstatefile()
1408 st = fp.read(40)
1409 st = fp.read(40)
1409 fp.close()
1410 fp.close()
1410 except IOError as err:
1411 except IOError as err:
1411 if err.errno != errno.ENOENT:
1412 if err.errno != errno.ENOENT:
1412 raise
1413 raise
1413 # File doesn't exist, so the current state is empty
1414 # File doesn't exist, so the current state is empty
1414 st = ''
1415 st = ''
1415
1416
1416 l = len(st)
1417 l = len(st)
1417 if l == 40:
1418 if l == 40:
1418 self._parents = (st[:20], st[20:40])
1419 self._parents = (st[:20], st[20:40])
1419 elif l == 0:
1420 elif l == 0:
1420 self._parents = (nullid, nullid)
1421 self._parents = (nullid, nullid)
1421 else:
1422 else:
1422 raise error.Abort(_('working directory state appears '
1423 raise error.Abort(_('working directory state appears '
1423 'damaged!'))
1424 'damaged!'))
1424
1425
1425 return self._parents
1426 return self._parents
1426
1427
1427 def setparents(self, p1, p2):
1428 def setparents(self, p1, p2):
1428 self._parents = (p1, p2)
1429 self._parents = (p1, p2)
1429 self._dirtyparents = True
1430 self._dirtyparents = True
1430
1431
1431 def read(self):
1432 def read(self):
1432 # ignore HG_PENDING because identity is used only for writing
1433 # ignore HG_PENDING because identity is used only for writing
1433 self.identity = util.filestat.frompath(
1434 self.identity = util.filestat.frompath(
1434 self._opener.join(self._filename))
1435 self._opener.join(self._filename))
1435
1436
1436 try:
1437 try:
1437 fp = self._opendirstatefile()
1438 fp = self._opendirstatefile()
1438 try:
1439 try:
1439 st = fp.read()
1440 st = fp.read()
1440 finally:
1441 finally:
1441 fp.close()
1442 fp.close()
1442 except IOError as err:
1443 except IOError as err:
1443 if err.errno != errno.ENOENT:
1444 if err.errno != errno.ENOENT:
1444 raise
1445 raise
1445 return
1446 return
1446 if not st:
1447 if not st:
1447 return
1448 return
1448
1449
1449 if util.safehasattr(parsers, 'dict_new_presized'):
1450 if util.safehasattr(parsers, 'dict_new_presized'):
1450 # Make an estimate of the number of files in the dirstate based on
1451 # Make an estimate of the number of files in the dirstate based on
1451 # its size. From a linear regression on a set of real-world repos,
1452 # its size. From a linear regression on a set of real-world repos,
1452 # all over 10,000 files, the size of a dirstate entry is 85
1453 # all over 10,000 files, the size of a dirstate entry is 85
1453 # bytes. The cost of resizing is significantly higher than the cost
1454 # bytes. The cost of resizing is significantly higher than the cost
1454 # of filling in a larger presized dict, so subtract 20% from the
1455 # of filling in a larger presized dict, so subtract 20% from the
1455 # size.
1456 # size.
1456 #
1457 #
1457 # This heuristic is imperfect in many ways, so in a future dirstate
1458 # This heuristic is imperfect in many ways, so in a future dirstate
1458 # format update it makes sense to just record the number of entries
1459 # format update it makes sense to just record the number of entries
1459 # on write.
1460 # on write.
1460 self._map = parsers.dict_new_presized(len(st) // 71)
1461 self._map = parsers.dict_new_presized(len(st) // 71)
1461
1462
1462 # Python's garbage collector triggers a GC each time a certain number
1463 # Python's garbage collector triggers a GC each time a certain number
1463 # of container objects (the number being defined by
1464 # of container objects (the number being defined by
1464 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1465 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1465 # for each file in the dirstate. The C version then immediately marks
1466 # for each file in the dirstate. The C version then immediately marks
1466 # them as not to be tracked by the collector. However, this has no
1467 # them as not to be tracked by the collector. However, this has no
1467 # effect on when GCs are triggered, only on what objects the GC looks
1468 # effect on when GCs are triggered, only on what objects the GC looks
1468 # into. This means that O(number of files) GCs are unavoidable.
1469 # into. This means that O(number of files) GCs are unavoidable.
1469 # Depending on when in the process's lifetime the dirstate is parsed,
1470 # Depending on when in the process's lifetime the dirstate is parsed,
1470 # this can get very expensive. As a workaround, disable GC while
1471 # this can get very expensive. As a workaround, disable GC while
1471 # parsing the dirstate.
1472 # parsing the dirstate.
1472 #
1473 #
1473 # (we cannot decorate the function directly since it is in a C module)
1474 # (we cannot decorate the function directly since it is in a C module)
1474 if rustext is not None:
1475 if rustext is not None:
1475 parse_dirstate = rustext.dirstate.parse_dirstate
1476 parse_dirstate = rustext.dirstate.parse_dirstate
1476 else:
1477 else:
1477 parse_dirstate = parsers.parse_dirstate
1478 parse_dirstate = parsers.parse_dirstate
1478
1479
1479 parse_dirstate = util.nogc(parse_dirstate)
1480 parse_dirstate = util.nogc(parse_dirstate)
1480 p = parse_dirstate(self._map, self.copymap, st)
1481 p = parse_dirstate(self._map, self.copymap, st)
1481 if not self._dirtyparents:
1482 if not self._dirtyparents:
1482 self.setparents(*p)
1483 self.setparents(*p)
1483
1484
1484 # Avoid excess attribute lookups by fast pathing certain checks
1485 # Avoid excess attribute lookups by fast pathing certain checks
1485 self.__contains__ = self._map.__contains__
1486 self.__contains__ = self._map.__contains__
1486 self.__getitem__ = self._map.__getitem__
1487 self.__getitem__ = self._map.__getitem__
1487 self.get = self._map.get
1488 self.get = self._map.get
1488
1489
1489 def write(self, st, now):
1490 def write(self, st, now):
1490 if rustext is not None:
1491 if rustext is not None:
1491 pack_dirstate = rustext.dirstate.pack_dirstate
1492 pack_dirstate = rustext.dirstate.pack_dirstate
1492 else:
1493 else:
1493 pack_dirstate = parsers.pack_dirstate
1494 pack_dirstate = parsers.pack_dirstate
1494
1495
1495 st.write(pack_dirstate(self._map, self.copymap,
1496 st.write(pack_dirstate(self._map, self.copymap,
1496 self.parents(), now))
1497 self.parents(), now))
1497 st.close()
1498 st.close()
1498 self._dirtyparents = False
1499 self._dirtyparents = False
1499 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1500 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1500
1501
1501 @propertycache
1502 @propertycache
1502 def nonnormalset(self):
1503 def nonnormalset(self):
1503 nonnorm, otherparents = self.nonnormalentries()
1504 nonnorm, otherparents = self.nonnormalentries()
1504 self.otherparentset = otherparents
1505 self.otherparentset = otherparents
1505 return nonnorm
1506 return nonnorm
1506
1507
1507 @propertycache
1508 @propertycache
1508 def otherparentset(self):
1509 def otherparentset(self):
1509 nonnorm, otherparents = self.nonnormalentries()
1510 nonnorm, otherparents = self.nonnormalentries()
1510 self.nonnormalset = nonnorm
1511 self.nonnormalset = nonnorm
1511 return otherparents
1512 return otherparents
1512
1513
1513 @propertycache
1514 @propertycache
1514 def identity(self):
1515 def identity(self):
1515 self._map
1516 self._map
1516 return self.identity
1517 return self.identity
1517
1518
1518 @propertycache
1519 @propertycache
1519 def dirfoldmap(self):
1520 def dirfoldmap(self):
1520 f = {}
1521 f = {}
1521 normcase = util.normcase
1522 normcase = util.normcase
1522 for name in self._dirs:
1523 for name in self._dirs:
1523 f[normcase(name)] = name
1524 f[normcase(name)] = name
1524 return f
1525 return f
General Comments 0
You need to be logged in to leave comments. Login now