##// END OF EJS Templates
dirstate: drop workaround for '.' matching root directory...
Martin von Zweigbergk -
r42529:448486e1 default
parent child Browse files
Show More
@@ -1,1523 +1,1521
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 try:
30 try:
31 from . import rustext
31 from . import rustext
32 rustext.__name__ # force actual import (see hgdemandimport)
32 rustext.__name__ # force actual import (see hgdemandimport)
33 except ImportError:
33 except ImportError:
34 rustext = None
34 rustext = None
35
35
36 parsers = policy.importmod(r'parsers')
36 parsers = policy.importmod(r'parsers')
37
37
38 propertycache = util.propertycache
38 propertycache = util.propertycache
39 filecache = scmutil.filecache
39 filecache = scmutil.filecache
40 _rangemask = 0x7fffffff
40 _rangemask = 0x7fffffff
41
41
42 dirstatetuple = parsers.dirstatetuple
42 dirstatetuple = parsers.dirstatetuple
43
43
44 class repocache(filecache):
44 class repocache(filecache):
45 """filecache for files in .hg/"""
45 """filecache for files in .hg/"""
46 def join(self, obj, fname):
46 def join(self, obj, fname):
47 return obj._opener.join(fname)
47 return obj._opener.join(fname)
48
48
49 class rootcache(filecache):
49 class rootcache(filecache):
50 """filecache for files in the repository root"""
50 """filecache for files in the repository root"""
51 def join(self, obj, fname):
51 def join(self, obj, fname):
52 return obj._join(fname)
52 return obj._join(fname)
53
53
54 def _getfsnow(vfs):
54 def _getfsnow(vfs):
55 '''Get "now" timestamp on filesystem'''
55 '''Get "now" timestamp on filesystem'''
56 tmpfd, tmpname = vfs.mkstemp()
56 tmpfd, tmpname = vfs.mkstemp()
57 try:
57 try:
58 return os.fstat(tmpfd)[stat.ST_MTIME]
58 return os.fstat(tmpfd)[stat.ST_MTIME]
59 finally:
59 finally:
60 os.close(tmpfd)
60 os.close(tmpfd)
61 vfs.unlink(tmpname)
61 vfs.unlink(tmpname)
62
62
63 class dirstate(object):
63 class dirstate(object):
64
64
65 def __init__(self, opener, ui, root, validate, sparsematchfn):
65 def __init__(self, opener, ui, root, validate, sparsematchfn):
66 '''Create a new dirstate object.
66 '''Create a new dirstate object.
67
67
68 opener is an open()-like callable that can be used to open the
68 opener is an open()-like callable that can be used to open the
69 dirstate file; root is the root of the directory tracked by
69 dirstate file; root is the root of the directory tracked by
70 the dirstate.
70 the dirstate.
71 '''
71 '''
72 self._opener = opener
72 self._opener = opener
73 self._validate = validate
73 self._validate = validate
74 self._root = root
74 self._root = root
75 self._sparsematchfn = sparsematchfn
75 self._sparsematchfn = sparsematchfn
76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
76 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
77 # UNC path pointing to root share (issue4557)
77 # UNC path pointing to root share (issue4557)
78 self._rootdir = pathutil.normasprefix(root)
78 self._rootdir = pathutil.normasprefix(root)
79 self._dirty = False
79 self._dirty = False
80 self._lastnormaltime = 0
80 self._lastnormaltime = 0
81 self._ui = ui
81 self._ui = ui
82 self._filecache = {}
82 self._filecache = {}
83 self._parentwriters = 0
83 self._parentwriters = 0
84 self._filename = 'dirstate'
84 self._filename = 'dirstate'
85 self._pendingfilename = '%s.pending' % self._filename
85 self._pendingfilename = '%s.pending' % self._filename
86 self._plchangecallbacks = {}
86 self._plchangecallbacks = {}
87 self._origpl = None
87 self._origpl = None
88 self._updatedfiles = set()
88 self._updatedfiles = set()
89 self._mapcls = dirstatemap
89 self._mapcls = dirstatemap
90 # Access and cache cwd early, so we don't access it for the first time
90 # Access and cache cwd early, so we don't access it for the first time
91 # after a working-copy update caused it to not exist (accessing it then
91 # after a working-copy update caused it to not exist (accessing it then
92 # raises an exception).
92 # raises an exception).
93 self._cwd
93 self._cwd
94
94
95 @contextlib.contextmanager
95 @contextlib.contextmanager
96 def parentchange(self):
96 def parentchange(self):
97 '''Context manager for handling dirstate parents.
97 '''Context manager for handling dirstate parents.
98
98
99 If an exception occurs in the scope of the context manager,
99 If an exception occurs in the scope of the context manager,
100 the incoherent dirstate won't be written when wlock is
100 the incoherent dirstate won't be written when wlock is
101 released.
101 released.
102 '''
102 '''
103 self._parentwriters += 1
103 self._parentwriters += 1
104 yield
104 yield
105 # Typically we want the "undo" step of a context manager in a
105 # Typically we want the "undo" step of a context manager in a
106 # finally block so it happens even when an exception
106 # finally block so it happens even when an exception
107 # occurs. In this case, however, we only want to decrement
107 # occurs. In this case, however, we only want to decrement
108 # parentwriters if the code in the with statement exits
108 # parentwriters if the code in the with statement exits
109 # normally, so we don't have a try/finally here on purpose.
109 # normally, so we don't have a try/finally here on purpose.
110 self._parentwriters -= 1
110 self._parentwriters -= 1
111
111
112 def pendingparentchange(self):
112 def pendingparentchange(self):
113 '''Returns true if the dirstate is in the middle of a set of changes
113 '''Returns true if the dirstate is in the middle of a set of changes
114 that modify the dirstate parent.
114 that modify the dirstate parent.
115 '''
115 '''
116 return self._parentwriters > 0
116 return self._parentwriters > 0
117
117
118 @propertycache
118 @propertycache
119 def _map(self):
119 def _map(self):
120 """Return the dirstate contents (see documentation for dirstatemap)."""
120 """Return the dirstate contents (see documentation for dirstatemap)."""
121 self._map = self._mapcls(self._ui, self._opener, self._root)
121 self._map = self._mapcls(self._ui, self._opener, self._root)
122 return self._map
122 return self._map
123
123
124 @property
124 @property
125 def _sparsematcher(self):
125 def _sparsematcher(self):
126 """The matcher for the sparse checkout.
126 """The matcher for the sparse checkout.
127
127
128 The working directory may not include every file from a manifest. The
128 The working directory may not include every file from a manifest. The
129 matcher obtained by this property will match a path if it is to be
129 matcher obtained by this property will match a path if it is to be
130 included in the working directory.
130 included in the working directory.
131 """
131 """
132 # TODO there is potential to cache this property. For now, the matcher
132 # TODO there is potential to cache this property. For now, the matcher
133 # is resolved on every access. (But the called function does use a
133 # is resolved on every access. (But the called function does use a
134 # cache to keep the lookup fast.)
134 # cache to keep the lookup fast.)
135 return self._sparsematchfn()
135 return self._sparsematchfn()
136
136
137 @repocache('branch')
137 @repocache('branch')
138 def _branch(self):
138 def _branch(self):
139 try:
139 try:
140 return self._opener.read("branch").strip() or "default"
140 return self._opener.read("branch").strip() or "default"
141 except IOError as inst:
141 except IOError as inst:
142 if inst.errno != errno.ENOENT:
142 if inst.errno != errno.ENOENT:
143 raise
143 raise
144 return "default"
144 return "default"
145
145
146 @property
146 @property
147 def _pl(self):
147 def _pl(self):
148 return self._map.parents()
148 return self._map.parents()
149
149
150 def hasdir(self, d):
150 def hasdir(self, d):
151 return self._map.hastrackeddir(d)
151 return self._map.hastrackeddir(d)
152
152
153 @rootcache('.hgignore')
153 @rootcache('.hgignore')
154 def _ignore(self):
154 def _ignore(self):
155 files = self._ignorefiles()
155 files = self._ignorefiles()
156 if not files:
156 if not files:
157 return matchmod.never()
157 return matchmod.never()
158
158
159 pats = ['include:%s' % f for f in files]
159 pats = ['include:%s' % f for f in files]
160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
160 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
161
161
162 @propertycache
162 @propertycache
163 def _slash(self):
163 def _slash(self):
164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
164 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
165
165
166 @propertycache
166 @propertycache
167 def _checklink(self):
167 def _checklink(self):
168 return util.checklink(self._root)
168 return util.checklink(self._root)
169
169
170 @propertycache
170 @propertycache
171 def _checkexec(self):
171 def _checkexec(self):
172 return util.checkexec(self._root)
172 return util.checkexec(self._root)
173
173
174 @propertycache
174 @propertycache
175 def _checkcase(self):
175 def _checkcase(self):
176 return not util.fscasesensitive(self._join('.hg'))
176 return not util.fscasesensitive(self._join('.hg'))
177
177
178 def _join(self, f):
178 def _join(self, f):
179 # much faster than os.path.join()
179 # much faster than os.path.join()
180 # it's safe because f is always a relative path
180 # it's safe because f is always a relative path
181 return self._rootdir + f
181 return self._rootdir + f
182
182
183 def flagfunc(self, buildfallback):
183 def flagfunc(self, buildfallback):
184 if self._checklink and self._checkexec:
184 if self._checklink and self._checkexec:
185 def f(x):
185 def f(x):
186 try:
186 try:
187 st = os.lstat(self._join(x))
187 st = os.lstat(self._join(x))
188 if util.statislink(st):
188 if util.statislink(st):
189 return 'l'
189 return 'l'
190 if util.statisexec(st):
190 if util.statisexec(st):
191 return 'x'
191 return 'x'
192 except OSError:
192 except OSError:
193 pass
193 pass
194 return ''
194 return ''
195 return f
195 return f
196
196
197 fallback = buildfallback()
197 fallback = buildfallback()
198 if self._checklink:
198 if self._checklink:
199 def f(x):
199 def f(x):
200 if os.path.islink(self._join(x)):
200 if os.path.islink(self._join(x)):
201 return 'l'
201 return 'l'
202 if 'x' in fallback(x):
202 if 'x' in fallback(x):
203 return 'x'
203 return 'x'
204 return ''
204 return ''
205 return f
205 return f
206 if self._checkexec:
206 if self._checkexec:
207 def f(x):
207 def f(x):
208 if 'l' in fallback(x):
208 if 'l' in fallback(x):
209 return 'l'
209 return 'l'
210 if util.isexec(self._join(x)):
210 if util.isexec(self._join(x)):
211 return 'x'
211 return 'x'
212 return ''
212 return ''
213 return f
213 return f
214 else:
214 else:
215 return fallback
215 return fallback
216
216
217 @propertycache
217 @propertycache
218 def _cwd(self):
218 def _cwd(self):
219 # internal config: ui.forcecwd
219 # internal config: ui.forcecwd
220 forcecwd = self._ui.config('ui', 'forcecwd')
220 forcecwd = self._ui.config('ui', 'forcecwd')
221 if forcecwd:
221 if forcecwd:
222 return forcecwd
222 return forcecwd
223 return encoding.getcwd()
223 return encoding.getcwd()
224
224
225 def getcwd(self):
225 def getcwd(self):
226 '''Return the path from which a canonical path is calculated.
226 '''Return the path from which a canonical path is calculated.
227
227
228 This path should be used to resolve file patterns or to convert
228 This path should be used to resolve file patterns or to convert
229 canonical paths back to file paths for display. It shouldn't be
229 canonical paths back to file paths for display. It shouldn't be
230 used to get real file paths. Use vfs functions instead.
230 used to get real file paths. Use vfs functions instead.
231 '''
231 '''
232 cwd = self._cwd
232 cwd = self._cwd
233 if cwd == self._root:
233 if cwd == self._root:
234 return ''
234 return ''
235 # self._root ends with a path separator if self._root is '/' or 'C:\'
235 # self._root ends with a path separator if self._root is '/' or 'C:\'
236 rootsep = self._root
236 rootsep = self._root
237 if not util.endswithsep(rootsep):
237 if not util.endswithsep(rootsep):
238 rootsep += pycompat.ossep
238 rootsep += pycompat.ossep
239 if cwd.startswith(rootsep):
239 if cwd.startswith(rootsep):
240 return cwd[len(rootsep):]
240 return cwd[len(rootsep):]
241 else:
241 else:
242 # we're outside the repo. return an absolute path.
242 # we're outside the repo. return an absolute path.
243 return cwd
243 return cwd
244
244
245 def pathto(self, f, cwd=None):
245 def pathto(self, f, cwd=None):
246 if cwd is None:
246 if cwd is None:
247 cwd = self.getcwd()
247 cwd = self.getcwd()
248 path = util.pathto(self._root, cwd, f)
248 path = util.pathto(self._root, cwd, f)
249 if self._slash:
249 if self._slash:
250 return util.pconvert(path)
250 return util.pconvert(path)
251 return path
251 return path
252
252
253 def __getitem__(self, key):
253 def __getitem__(self, key):
254 '''Return the current state of key (a filename) in the dirstate.
254 '''Return the current state of key (a filename) in the dirstate.
255
255
256 States are:
256 States are:
257 n normal
257 n normal
258 m needs merging
258 m needs merging
259 r marked for removal
259 r marked for removal
260 a marked for addition
260 a marked for addition
261 ? not tracked
261 ? not tracked
262 '''
262 '''
263 return self._map.get(key, ("?",))[0]
263 return self._map.get(key, ("?",))[0]
264
264
265 def __contains__(self, key):
265 def __contains__(self, key):
266 return key in self._map
266 return key in self._map
267
267
268 def __iter__(self):
268 def __iter__(self):
269 return iter(sorted(self._map))
269 return iter(sorted(self._map))
270
270
271 def items(self):
271 def items(self):
272 return self._map.iteritems()
272 return self._map.iteritems()
273
273
274 iteritems = items
274 iteritems = items
275
275
276 def parents(self):
276 def parents(self):
277 return [self._validate(p) for p in self._pl]
277 return [self._validate(p) for p in self._pl]
278
278
279 def p1(self):
279 def p1(self):
280 return self._validate(self._pl[0])
280 return self._validate(self._pl[0])
281
281
282 def p2(self):
282 def p2(self):
283 return self._validate(self._pl[1])
283 return self._validate(self._pl[1])
284
284
285 def branch(self):
285 def branch(self):
286 return encoding.tolocal(self._branch)
286 return encoding.tolocal(self._branch)
287
287
288 def setparents(self, p1, p2=nullid):
288 def setparents(self, p1, p2=nullid):
289 """Set dirstate parents to p1 and p2.
289 """Set dirstate parents to p1 and p2.
290
290
291 When moving from two parents to one, 'm' merged entries a
291 When moving from two parents to one, 'm' merged entries a
292 adjusted to normal and previous copy records discarded and
292 adjusted to normal and previous copy records discarded and
293 returned by the call.
293 returned by the call.
294
294
295 See localrepo.setparents()
295 See localrepo.setparents()
296 """
296 """
297 if self._parentwriters == 0:
297 if self._parentwriters == 0:
298 raise ValueError("cannot set dirstate parent outside of "
298 raise ValueError("cannot set dirstate parent outside of "
299 "dirstate.parentchange context manager")
299 "dirstate.parentchange context manager")
300
300
301 self._dirty = True
301 self._dirty = True
302 oldp2 = self._pl[1]
302 oldp2 = self._pl[1]
303 if self._origpl is None:
303 if self._origpl is None:
304 self._origpl = self._pl
304 self._origpl = self._pl
305 self._map.setparents(p1, p2)
305 self._map.setparents(p1, p2)
306 copies = {}
306 copies = {}
307 if oldp2 != nullid and p2 == nullid:
307 if oldp2 != nullid and p2 == nullid:
308 candidatefiles = self._map.nonnormalset.union(
308 candidatefiles = self._map.nonnormalset.union(
309 self._map.otherparentset)
309 self._map.otherparentset)
310 for f in candidatefiles:
310 for f in candidatefiles:
311 s = self._map.get(f)
311 s = self._map.get(f)
312 if s is None:
312 if s is None:
313 continue
313 continue
314
314
315 # Discard 'm' markers when moving away from a merge state
315 # Discard 'm' markers when moving away from a merge state
316 if s[0] == 'm':
316 if s[0] == 'm':
317 source = self._map.copymap.get(f)
317 source = self._map.copymap.get(f)
318 if source:
318 if source:
319 copies[f] = source
319 copies[f] = source
320 self.normallookup(f)
320 self.normallookup(f)
321 # Also fix up otherparent markers
321 # Also fix up otherparent markers
322 elif s[0] == 'n' and s[2] == -2:
322 elif s[0] == 'n' and s[2] == -2:
323 source = self._map.copymap.get(f)
323 source = self._map.copymap.get(f)
324 if source:
324 if source:
325 copies[f] = source
325 copies[f] = source
326 self.add(f)
326 self.add(f)
327 return copies
327 return copies
328
328
329 def setbranch(self, branch):
329 def setbranch(self, branch):
330 self.__class__._branch.set(self, encoding.fromlocal(branch))
330 self.__class__._branch.set(self, encoding.fromlocal(branch))
331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
331 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
332 try:
332 try:
333 f.write(self._branch + '\n')
333 f.write(self._branch + '\n')
334 f.close()
334 f.close()
335
335
336 # make sure filecache has the correct stat info for _branch after
336 # make sure filecache has the correct stat info for _branch after
337 # replacing the underlying file
337 # replacing the underlying file
338 ce = self._filecache['_branch']
338 ce = self._filecache['_branch']
339 if ce:
339 if ce:
340 ce.refresh()
340 ce.refresh()
341 except: # re-raises
341 except: # re-raises
342 f.discard()
342 f.discard()
343 raise
343 raise
344
344
345 def invalidate(self):
345 def invalidate(self):
346 '''Causes the next access to reread the dirstate.
346 '''Causes the next access to reread the dirstate.
347
347
348 This is different from localrepo.invalidatedirstate() because it always
348 This is different from localrepo.invalidatedirstate() because it always
349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
349 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
350 check whether the dirstate has changed before rereading it.'''
350 check whether the dirstate has changed before rereading it.'''
351
351
352 for a in (r"_map", r"_branch", r"_ignore"):
352 for a in (r"_map", r"_branch", r"_ignore"):
353 if a in self.__dict__:
353 if a in self.__dict__:
354 delattr(self, a)
354 delattr(self, a)
355 self._lastnormaltime = 0
355 self._lastnormaltime = 0
356 self._dirty = False
356 self._dirty = False
357 self._updatedfiles.clear()
357 self._updatedfiles.clear()
358 self._parentwriters = 0
358 self._parentwriters = 0
359 self._origpl = None
359 self._origpl = None
360
360
361 def copy(self, source, dest):
361 def copy(self, source, dest):
362 """Mark dest as a copy of source. Unmark dest if source is None."""
362 """Mark dest as a copy of source. Unmark dest if source is None."""
363 if source == dest:
363 if source == dest:
364 return
364 return
365 self._dirty = True
365 self._dirty = True
366 if source is not None:
366 if source is not None:
367 self._map.copymap[dest] = source
367 self._map.copymap[dest] = source
368 self._updatedfiles.add(source)
368 self._updatedfiles.add(source)
369 self._updatedfiles.add(dest)
369 self._updatedfiles.add(dest)
370 elif self._map.copymap.pop(dest, None):
370 elif self._map.copymap.pop(dest, None):
371 self._updatedfiles.add(dest)
371 self._updatedfiles.add(dest)
372
372
373 def copied(self, file):
373 def copied(self, file):
374 return self._map.copymap.get(file, None)
374 return self._map.copymap.get(file, None)
375
375
376 def copies(self):
376 def copies(self):
377 return self._map.copymap
377 return self._map.copymap
378
378
379 def _addpath(self, f, state, mode, size, mtime):
379 def _addpath(self, f, state, mode, size, mtime):
380 oldstate = self[f]
380 oldstate = self[f]
381 if state == 'a' or oldstate == 'r':
381 if state == 'a' or oldstate == 'r':
382 scmutil.checkfilename(f)
382 scmutil.checkfilename(f)
383 if self._map.hastrackeddir(f):
383 if self._map.hastrackeddir(f):
384 raise error.Abort(_('directory %r already in dirstate') %
384 raise error.Abort(_('directory %r already in dirstate') %
385 pycompat.bytestr(f))
385 pycompat.bytestr(f))
386 # shadows
386 # shadows
387 for d in util.finddirs(f):
387 for d in util.finddirs(f):
388 if self._map.hastrackeddir(d):
388 if self._map.hastrackeddir(d):
389 break
389 break
390 entry = self._map.get(d)
390 entry = self._map.get(d)
391 if entry is not None and entry[0] != 'r':
391 if entry is not None and entry[0] != 'r':
392 raise error.Abort(
392 raise error.Abort(
393 _('file %r in dirstate clashes with %r') %
393 _('file %r in dirstate clashes with %r') %
394 (pycompat.bytestr(d), pycompat.bytestr(f)))
394 (pycompat.bytestr(d), pycompat.bytestr(f)))
395 self._dirty = True
395 self._dirty = True
396 self._updatedfiles.add(f)
396 self._updatedfiles.add(f)
397 self._map.addfile(f, oldstate, state, mode, size, mtime)
397 self._map.addfile(f, oldstate, state, mode, size, mtime)
398
398
399 def normal(self, f):
399 def normal(self, f):
400 '''Mark a file normal and clean.'''
400 '''Mark a file normal and clean.'''
401 s = os.lstat(self._join(f))
401 s = os.lstat(self._join(f))
402 mtime = s[stat.ST_MTIME]
402 mtime = s[stat.ST_MTIME]
403 self._addpath(f, 'n', s.st_mode,
403 self._addpath(f, 'n', s.st_mode,
404 s.st_size & _rangemask, mtime & _rangemask)
404 s.st_size & _rangemask, mtime & _rangemask)
405 self._map.copymap.pop(f, None)
405 self._map.copymap.pop(f, None)
406 if f in self._map.nonnormalset:
406 if f in self._map.nonnormalset:
407 self._map.nonnormalset.remove(f)
407 self._map.nonnormalset.remove(f)
408 if mtime > self._lastnormaltime:
408 if mtime > self._lastnormaltime:
409 # Remember the most recent modification timeslot for status(),
409 # Remember the most recent modification timeslot for status(),
410 # to make sure we won't miss future size-preserving file content
410 # to make sure we won't miss future size-preserving file content
411 # modifications that happen within the same timeslot.
411 # modifications that happen within the same timeslot.
412 self._lastnormaltime = mtime
412 self._lastnormaltime = mtime
413
413
414 def normallookup(self, f):
414 def normallookup(self, f):
415 '''Mark a file normal, but possibly dirty.'''
415 '''Mark a file normal, but possibly dirty.'''
416 if self._pl[1] != nullid:
416 if self._pl[1] != nullid:
417 # if there is a merge going on and the file was either
417 # if there is a merge going on and the file was either
418 # in state 'm' (-1) or coming from other parent (-2) before
418 # in state 'm' (-1) or coming from other parent (-2) before
419 # being removed, restore that state.
419 # being removed, restore that state.
420 entry = self._map.get(f)
420 entry = self._map.get(f)
421 if entry is not None:
421 if entry is not None:
422 if entry[0] == 'r' and entry[2] in (-1, -2):
422 if entry[0] == 'r' and entry[2] in (-1, -2):
423 source = self._map.copymap.get(f)
423 source = self._map.copymap.get(f)
424 if entry[2] == -1:
424 if entry[2] == -1:
425 self.merge(f)
425 self.merge(f)
426 elif entry[2] == -2:
426 elif entry[2] == -2:
427 self.otherparent(f)
427 self.otherparent(f)
428 if source:
428 if source:
429 self.copy(source, f)
429 self.copy(source, f)
430 return
430 return
431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
431 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
432 return
432 return
433 self._addpath(f, 'n', 0, -1, -1)
433 self._addpath(f, 'n', 0, -1, -1)
434 self._map.copymap.pop(f, None)
434 self._map.copymap.pop(f, None)
435
435
436 def otherparent(self, f):
436 def otherparent(self, f):
437 '''Mark as coming from the other parent, always dirty.'''
437 '''Mark as coming from the other parent, always dirty.'''
438 if self._pl[1] == nullid:
438 if self._pl[1] == nullid:
439 raise error.Abort(_("setting %r to other parent "
439 raise error.Abort(_("setting %r to other parent "
440 "only allowed in merges") % f)
440 "only allowed in merges") % f)
441 if f in self and self[f] == 'n':
441 if f in self and self[f] == 'n':
442 # merge-like
442 # merge-like
443 self._addpath(f, 'm', 0, -2, -1)
443 self._addpath(f, 'm', 0, -2, -1)
444 else:
444 else:
445 # add-like
445 # add-like
446 self._addpath(f, 'n', 0, -2, -1)
446 self._addpath(f, 'n', 0, -2, -1)
447 self._map.copymap.pop(f, None)
447 self._map.copymap.pop(f, None)
448
448
449 def add(self, f):
449 def add(self, f):
450 '''Mark a file added.'''
450 '''Mark a file added.'''
451 self._addpath(f, 'a', 0, -1, -1)
451 self._addpath(f, 'a', 0, -1, -1)
452 self._map.copymap.pop(f, None)
452 self._map.copymap.pop(f, None)
453
453
454 def remove(self, f):
454 def remove(self, f):
455 '''Mark a file removed.'''
455 '''Mark a file removed.'''
456 self._dirty = True
456 self._dirty = True
457 oldstate = self[f]
457 oldstate = self[f]
458 size = 0
458 size = 0
459 if self._pl[1] != nullid:
459 if self._pl[1] != nullid:
460 entry = self._map.get(f)
460 entry = self._map.get(f)
461 if entry is not None:
461 if entry is not None:
462 # backup the previous state
462 # backup the previous state
463 if entry[0] == 'm': # merge
463 if entry[0] == 'm': # merge
464 size = -1
464 size = -1
465 elif entry[0] == 'n' and entry[2] == -2: # other parent
465 elif entry[0] == 'n' and entry[2] == -2: # other parent
466 size = -2
466 size = -2
467 self._map.otherparentset.add(f)
467 self._map.otherparentset.add(f)
468 self._updatedfiles.add(f)
468 self._updatedfiles.add(f)
469 self._map.removefile(f, oldstate, size)
469 self._map.removefile(f, oldstate, size)
470 if size == 0:
470 if size == 0:
471 self._map.copymap.pop(f, None)
471 self._map.copymap.pop(f, None)
472
472
473 def merge(self, f):
473 def merge(self, f):
474 '''Mark a file merged.'''
474 '''Mark a file merged.'''
475 if self._pl[1] == nullid:
475 if self._pl[1] == nullid:
476 return self.normallookup(f)
476 return self.normallookup(f)
477 return self.otherparent(f)
477 return self.otherparent(f)
478
478
479 def drop(self, f):
479 def drop(self, f):
480 '''Drop a file from the dirstate'''
480 '''Drop a file from the dirstate'''
481 oldstate = self[f]
481 oldstate = self[f]
482 if self._map.dropfile(f, oldstate):
482 if self._map.dropfile(f, oldstate):
483 self._dirty = True
483 self._dirty = True
484 self._updatedfiles.add(f)
484 self._updatedfiles.add(f)
485 self._map.copymap.pop(f, None)
485 self._map.copymap.pop(f, None)
486
486
487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
487 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
488 if exists is None:
488 if exists is None:
489 exists = os.path.lexists(os.path.join(self._root, path))
489 exists = os.path.lexists(os.path.join(self._root, path))
490 if not exists:
490 if not exists:
491 # Maybe a path component exists
491 # Maybe a path component exists
492 if not ignoremissing and '/' in path:
492 if not ignoremissing and '/' in path:
493 d, f = path.rsplit('/', 1)
493 d, f = path.rsplit('/', 1)
494 d = self._normalize(d, False, ignoremissing, None)
494 d = self._normalize(d, False, ignoremissing, None)
495 folded = d + "/" + f
495 folded = d + "/" + f
496 else:
496 else:
497 # No path components, preserve original case
497 # No path components, preserve original case
498 folded = path
498 folded = path
499 else:
499 else:
500 # recursively normalize leading directory components
500 # recursively normalize leading directory components
501 # against dirstate
501 # against dirstate
502 if '/' in normed:
502 if '/' in normed:
503 d, f = normed.rsplit('/', 1)
503 d, f = normed.rsplit('/', 1)
504 d = self._normalize(d, False, ignoremissing, True)
504 d = self._normalize(d, False, ignoremissing, True)
505 r = self._root + "/" + d
505 r = self._root + "/" + d
506 folded = d + "/" + util.fspath(f, r)
506 folded = d + "/" + util.fspath(f, r)
507 else:
507 else:
508 folded = util.fspath(normed, self._root)
508 folded = util.fspath(normed, self._root)
509 storemap[normed] = folded
509 storemap[normed] = folded
510
510
511 return folded
511 return folded
512
512
513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
513 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
514 normed = util.normcase(path)
514 normed = util.normcase(path)
515 folded = self._map.filefoldmap.get(normed, None)
515 folded = self._map.filefoldmap.get(normed, None)
516 if folded is None:
516 if folded is None:
517 if isknown:
517 if isknown:
518 folded = path
518 folded = path
519 else:
519 else:
520 folded = self._discoverpath(path, normed, ignoremissing, exists,
520 folded = self._discoverpath(path, normed, ignoremissing, exists,
521 self._map.filefoldmap)
521 self._map.filefoldmap)
522 return folded
522 return folded
523
523
524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
524 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
525 normed = util.normcase(path)
525 normed = util.normcase(path)
526 folded = self._map.filefoldmap.get(normed, None)
526 folded = self._map.filefoldmap.get(normed, None)
527 if folded is None:
527 if folded is None:
528 folded = self._map.dirfoldmap.get(normed, None)
528 folded = self._map.dirfoldmap.get(normed, None)
529 if folded is None:
529 if folded is None:
530 if isknown:
530 if isknown:
531 folded = path
531 folded = path
532 else:
532 else:
533 # store discovered result in dirfoldmap so that future
533 # store discovered result in dirfoldmap so that future
534 # normalizefile calls don't start matching directories
534 # normalizefile calls don't start matching directories
535 folded = self._discoverpath(path, normed, ignoremissing, exists,
535 folded = self._discoverpath(path, normed, ignoremissing, exists,
536 self._map.dirfoldmap)
536 self._map.dirfoldmap)
537 return folded
537 return folded
538
538
539 def normalize(self, path, isknown=False, ignoremissing=False):
539 def normalize(self, path, isknown=False, ignoremissing=False):
540 '''
540 '''
541 normalize the case of a pathname when on a casefolding filesystem
541 normalize the case of a pathname when on a casefolding filesystem
542
542
543 isknown specifies whether the filename came from walking the
543 isknown specifies whether the filename came from walking the
544 disk, to avoid extra filesystem access.
544 disk, to avoid extra filesystem access.
545
545
546 If ignoremissing is True, missing path are returned
546 If ignoremissing is True, missing path are returned
547 unchanged. Otherwise, we try harder to normalize possibly
547 unchanged. Otherwise, we try harder to normalize possibly
548 existing path components.
548 existing path components.
549
549
550 The normalized case is determined based on the following precedence:
550 The normalized case is determined based on the following precedence:
551
551
552 - version of name already stored in the dirstate
552 - version of name already stored in the dirstate
553 - version of name stored on disk
553 - version of name stored on disk
554 - version provided via command arguments
554 - version provided via command arguments
555 '''
555 '''
556
556
557 if self._checkcase:
557 if self._checkcase:
558 return self._normalize(path, isknown, ignoremissing)
558 return self._normalize(path, isknown, ignoremissing)
559 return path
559 return path
560
560
561 def clear(self):
561 def clear(self):
562 self._map.clear()
562 self._map.clear()
563 self._lastnormaltime = 0
563 self._lastnormaltime = 0
564 self._updatedfiles.clear()
564 self._updatedfiles.clear()
565 self._dirty = True
565 self._dirty = True
566
566
567 def rebuild(self, parent, allfiles, changedfiles=None):
567 def rebuild(self, parent, allfiles, changedfiles=None):
568 if changedfiles is None:
568 if changedfiles is None:
569 # Rebuild entire dirstate
569 # Rebuild entire dirstate
570 changedfiles = allfiles
570 changedfiles = allfiles
571 lastnormaltime = self._lastnormaltime
571 lastnormaltime = self._lastnormaltime
572 self.clear()
572 self.clear()
573 self._lastnormaltime = lastnormaltime
573 self._lastnormaltime = lastnormaltime
574
574
575 if self._origpl is None:
575 if self._origpl is None:
576 self._origpl = self._pl
576 self._origpl = self._pl
577 self._map.setparents(parent, nullid)
577 self._map.setparents(parent, nullid)
578 for f in changedfiles:
578 for f in changedfiles:
579 if f in allfiles:
579 if f in allfiles:
580 self.normallookup(f)
580 self.normallookup(f)
581 else:
581 else:
582 self.drop(f)
582 self.drop(f)
583
583
584 self._dirty = True
584 self._dirty = True
585
585
586 def identity(self):
586 def identity(self):
587 '''Return identity of dirstate itself to detect changing in storage
587 '''Return identity of dirstate itself to detect changing in storage
588
588
589 If identity of previous dirstate is equal to this, writing
589 If identity of previous dirstate is equal to this, writing
590 changes based on the former dirstate out can keep consistency.
590 changes based on the former dirstate out can keep consistency.
591 '''
591 '''
592 return self._map.identity
592 return self._map.identity
593
593
594 def write(self, tr):
594 def write(self, tr):
595 if not self._dirty:
595 if not self._dirty:
596 return
596 return
597
597
598 filename = self._filename
598 filename = self._filename
599 if tr:
599 if tr:
600 # 'dirstate.write()' is not only for writing in-memory
600 # 'dirstate.write()' is not only for writing in-memory
601 # changes out, but also for dropping ambiguous timestamp.
601 # changes out, but also for dropping ambiguous timestamp.
602 # delayed writing re-raise "ambiguous timestamp issue".
602 # delayed writing re-raise "ambiguous timestamp issue".
603 # See also the wiki page below for detail:
603 # See also the wiki page below for detail:
604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
604 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
605
605
606 # emulate dropping timestamp in 'parsers.pack_dirstate'
606 # emulate dropping timestamp in 'parsers.pack_dirstate'
607 now = _getfsnow(self._opener)
607 now = _getfsnow(self._opener)
608 self._map.clearambiguoustimes(self._updatedfiles, now)
608 self._map.clearambiguoustimes(self._updatedfiles, now)
609
609
610 # emulate that all 'dirstate.normal' results are written out
610 # emulate that all 'dirstate.normal' results are written out
611 self._lastnormaltime = 0
611 self._lastnormaltime = 0
612 self._updatedfiles.clear()
612 self._updatedfiles.clear()
613
613
614 # delay writing in-memory changes out
614 # delay writing in-memory changes out
615 tr.addfilegenerator('dirstate', (self._filename,),
615 tr.addfilegenerator('dirstate', (self._filename,),
616 self._writedirstate, location='plain')
616 self._writedirstate, location='plain')
617 return
617 return
618
618
619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
619 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
620 self._writedirstate(st)
620 self._writedirstate(st)
621
621
622 def addparentchangecallback(self, category, callback):
622 def addparentchangecallback(self, category, callback):
623 """add a callback to be called when the wd parents are changed
623 """add a callback to be called when the wd parents are changed
624
624
625 Callback will be called with the following arguments:
625 Callback will be called with the following arguments:
626 dirstate, (oldp1, oldp2), (newp1, newp2)
626 dirstate, (oldp1, oldp2), (newp1, newp2)
627
627
628 Category is a unique identifier to allow overwriting an old callback
628 Category is a unique identifier to allow overwriting an old callback
629 with a newer callback.
629 with a newer callback.
630 """
630 """
631 self._plchangecallbacks[category] = callback
631 self._plchangecallbacks[category] = callback
632
632
633 def _writedirstate(self, st):
633 def _writedirstate(self, st):
634 # notify callbacks about parents change
634 # notify callbacks about parents change
635 if self._origpl is not None and self._origpl != self._pl:
635 if self._origpl is not None and self._origpl != self._pl:
636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
636 for c, callback in sorted(self._plchangecallbacks.iteritems()):
637 callback(self, self._origpl, self._pl)
637 callback(self, self._origpl, self._pl)
638 self._origpl = None
638 self._origpl = None
639 # use the modification time of the newly created temporary file as the
639 # use the modification time of the newly created temporary file as the
640 # filesystem's notion of 'now'
640 # filesystem's notion of 'now'
641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
641 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
642
642
643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
643 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
644 # timestamp of each entries in dirstate, because of 'now > mtime'
644 # timestamp of each entries in dirstate, because of 'now > mtime'
645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
645 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
646 if delaywrite > 0:
646 if delaywrite > 0:
647 # do we have any files to delay for?
647 # do we have any files to delay for?
648 for f, e in self._map.iteritems():
648 for f, e in self._map.iteritems():
649 if e[0] == 'n' and e[3] == now:
649 if e[0] == 'n' and e[3] == now:
650 import time # to avoid useless import
650 import time # to avoid useless import
651 # rather than sleep n seconds, sleep until the next
651 # rather than sleep n seconds, sleep until the next
652 # multiple of n seconds
652 # multiple of n seconds
653 clock = time.time()
653 clock = time.time()
654 start = int(clock) - (int(clock) % delaywrite)
654 start = int(clock) - (int(clock) % delaywrite)
655 end = start + delaywrite
655 end = start + delaywrite
656 time.sleep(end - clock)
656 time.sleep(end - clock)
657 now = end # trust our estimate that the end is near now
657 now = end # trust our estimate that the end is near now
658 break
658 break
659
659
660 self._map.write(st, now)
660 self._map.write(st, now)
661 self._lastnormaltime = 0
661 self._lastnormaltime = 0
662 self._dirty = False
662 self._dirty = False
663
663
664 def _dirignore(self, f):
664 def _dirignore(self, f):
665 if f == '.':
666 return False
667 if self._ignore(f):
665 if self._ignore(f):
668 return True
666 return True
669 for p in util.finddirs(f):
667 for p in util.finddirs(f):
670 if self._ignore(p):
668 if self._ignore(p):
671 return True
669 return True
672 return False
670 return False
673
671
674 def _ignorefiles(self):
672 def _ignorefiles(self):
675 files = []
673 files = []
676 if os.path.exists(self._join('.hgignore')):
674 if os.path.exists(self._join('.hgignore')):
677 files.append(self._join('.hgignore'))
675 files.append(self._join('.hgignore'))
678 for name, path in self._ui.configitems("ui"):
676 for name, path in self._ui.configitems("ui"):
679 if name == 'ignore' or name.startswith('ignore.'):
677 if name == 'ignore' or name.startswith('ignore.'):
680 # we need to use os.path.join here rather than self._join
678 # we need to use os.path.join here rather than self._join
681 # because path is arbitrary and user-specified
679 # because path is arbitrary and user-specified
682 files.append(os.path.join(self._rootdir, util.expandpath(path)))
680 files.append(os.path.join(self._rootdir, util.expandpath(path)))
683 return files
681 return files
684
682
685 def _ignorefileandline(self, f):
683 def _ignorefileandline(self, f):
686 files = collections.deque(self._ignorefiles())
684 files = collections.deque(self._ignorefiles())
687 visited = set()
685 visited = set()
688 while files:
686 while files:
689 i = files.popleft()
687 i = files.popleft()
690 patterns = matchmod.readpatternfile(i, self._ui.warn,
688 patterns = matchmod.readpatternfile(i, self._ui.warn,
691 sourceinfo=True)
689 sourceinfo=True)
692 for pattern, lineno, line in patterns:
690 for pattern, lineno, line in patterns:
693 kind, p = matchmod._patsplit(pattern, 'glob')
691 kind, p = matchmod._patsplit(pattern, 'glob')
694 if kind == "subinclude":
692 if kind == "subinclude":
695 if p not in visited:
693 if p not in visited:
696 files.append(p)
694 files.append(p)
697 continue
695 continue
698 m = matchmod.match(self._root, '', [], [pattern],
696 m = matchmod.match(self._root, '', [], [pattern],
699 warn=self._ui.warn)
697 warn=self._ui.warn)
700 if m(f):
698 if m(f):
701 return (i, lineno, line)
699 return (i, lineno, line)
702 visited.add(i)
700 visited.add(i)
703 return (None, -1, "")
701 return (None, -1, "")
704
702
705 def _walkexplicit(self, match, subrepos):
703 def _walkexplicit(self, match, subrepos):
706 '''Get stat data about the files explicitly specified by match.
704 '''Get stat data about the files explicitly specified by match.
707
705
708 Return a triple (results, dirsfound, dirsnotfound).
706 Return a triple (results, dirsfound, dirsnotfound).
709 - results is a mapping from filename to stat result. It also contains
707 - results is a mapping from filename to stat result. It also contains
710 listings mapping subrepos and .hg to None.
708 listings mapping subrepos and .hg to None.
711 - dirsfound is a list of files found to be directories.
709 - dirsfound is a list of files found to be directories.
712 - dirsnotfound is a list of files that the dirstate thinks are
710 - dirsnotfound is a list of files that the dirstate thinks are
713 directories and that were not found.'''
711 directories and that were not found.'''
714
712
715 def badtype(mode):
713 def badtype(mode):
716 kind = _('unknown')
714 kind = _('unknown')
717 if stat.S_ISCHR(mode):
715 if stat.S_ISCHR(mode):
718 kind = _('character device')
716 kind = _('character device')
719 elif stat.S_ISBLK(mode):
717 elif stat.S_ISBLK(mode):
720 kind = _('block device')
718 kind = _('block device')
721 elif stat.S_ISFIFO(mode):
719 elif stat.S_ISFIFO(mode):
722 kind = _('fifo')
720 kind = _('fifo')
723 elif stat.S_ISSOCK(mode):
721 elif stat.S_ISSOCK(mode):
724 kind = _('socket')
722 kind = _('socket')
725 elif stat.S_ISDIR(mode):
723 elif stat.S_ISDIR(mode):
726 kind = _('directory')
724 kind = _('directory')
727 return _('unsupported file type (type is %s)') % kind
725 return _('unsupported file type (type is %s)') % kind
728
726
729 matchedir = match.explicitdir
727 matchedir = match.explicitdir
730 badfn = match.bad
728 badfn = match.bad
731 dmap = self._map
729 dmap = self._map
732 lstat = os.lstat
730 lstat = os.lstat
733 getkind = stat.S_IFMT
731 getkind = stat.S_IFMT
734 dirkind = stat.S_IFDIR
732 dirkind = stat.S_IFDIR
735 regkind = stat.S_IFREG
733 regkind = stat.S_IFREG
736 lnkkind = stat.S_IFLNK
734 lnkkind = stat.S_IFLNK
737 join = self._join
735 join = self._join
738 dirsfound = []
736 dirsfound = []
739 foundadd = dirsfound.append
737 foundadd = dirsfound.append
740 dirsnotfound = []
738 dirsnotfound = []
741 notfoundadd = dirsnotfound.append
739 notfoundadd = dirsnotfound.append
742
740
743 if not match.isexact() and self._checkcase:
741 if not match.isexact() and self._checkcase:
744 normalize = self._normalize
742 normalize = self._normalize
745 else:
743 else:
746 normalize = None
744 normalize = None
747
745
748 files = sorted(match.files())
746 files = sorted(match.files())
749 subrepos.sort()
747 subrepos.sort()
750 i, j = 0, 0
748 i, j = 0, 0
751 while i < len(files) and j < len(subrepos):
749 while i < len(files) and j < len(subrepos):
752 subpath = subrepos[j] + "/"
750 subpath = subrepos[j] + "/"
753 if files[i] < subpath:
751 if files[i] < subpath:
754 i += 1
752 i += 1
755 continue
753 continue
756 while i < len(files) and files[i].startswith(subpath):
754 while i < len(files) and files[i].startswith(subpath):
757 del files[i]
755 del files[i]
758 j += 1
756 j += 1
759
757
760 if not files or '' in files:
758 if not files or '' in files:
761 files = ['']
759 files = ['']
762 # constructing the foldmap is expensive, so don't do it for the
760 # constructing the foldmap is expensive, so don't do it for the
763 # common case where files is ['']
761 # common case where files is ['']
764 normalize = None
762 normalize = None
765 results = dict.fromkeys(subrepos)
763 results = dict.fromkeys(subrepos)
766 results['.hg'] = None
764 results['.hg'] = None
767
765
768 for ff in files:
766 for ff in files:
769 if normalize:
767 if normalize:
770 nf = normalize(ff, False, True)
768 nf = normalize(ff, False, True)
771 else:
769 else:
772 nf = ff
770 nf = ff
773 if nf in results:
771 if nf in results:
774 continue
772 continue
775
773
776 try:
774 try:
777 st = lstat(join(nf))
775 st = lstat(join(nf))
778 kind = getkind(st.st_mode)
776 kind = getkind(st.st_mode)
779 if kind == dirkind:
777 if kind == dirkind:
780 if nf in dmap:
778 if nf in dmap:
781 # file replaced by dir on disk but still in dirstate
779 # file replaced by dir on disk but still in dirstate
782 results[nf] = None
780 results[nf] = None
783 if matchedir:
781 if matchedir:
784 matchedir(nf)
782 matchedir(nf)
785 foundadd((nf, ff))
783 foundadd((nf, ff))
786 elif kind == regkind or kind == lnkkind:
784 elif kind == regkind or kind == lnkkind:
787 results[nf] = st
785 results[nf] = st
788 else:
786 else:
789 badfn(ff, badtype(kind))
787 badfn(ff, badtype(kind))
790 if nf in dmap:
788 if nf in dmap:
791 results[nf] = None
789 results[nf] = None
792 except OSError as inst: # nf not found on disk - it is dirstate only
790 except OSError as inst: # nf not found on disk - it is dirstate only
793 if nf in dmap: # does it exactly match a missing file?
791 if nf in dmap: # does it exactly match a missing file?
794 results[nf] = None
792 results[nf] = None
795 else: # does it match a missing directory?
793 else: # does it match a missing directory?
796 if self._map.hasdir(nf):
794 if self._map.hasdir(nf):
797 if matchedir:
795 if matchedir:
798 matchedir(nf)
796 matchedir(nf)
799 notfoundadd(nf)
797 notfoundadd(nf)
800 else:
798 else:
801 badfn(ff, encoding.strtolocal(inst.strerror))
799 badfn(ff, encoding.strtolocal(inst.strerror))
802
800
803 # match.files() may contain explicitly-specified paths that shouldn't
801 # match.files() may contain explicitly-specified paths that shouldn't
804 # be taken; drop them from the list of files found. dirsfound/notfound
802 # be taken; drop them from the list of files found. dirsfound/notfound
805 # aren't filtered here because they will be tested later.
803 # aren't filtered here because they will be tested later.
806 if match.anypats():
804 if match.anypats():
807 for f in list(results):
805 for f in list(results):
808 if f == '.hg' or f in subrepos:
806 if f == '.hg' or f in subrepos:
809 # keep sentinel to disable further out-of-repo walks
807 # keep sentinel to disable further out-of-repo walks
810 continue
808 continue
811 if not match(f):
809 if not match(f):
812 del results[f]
810 del results[f]
813
811
814 # Case insensitive filesystems cannot rely on lstat() failing to detect
812 # Case insensitive filesystems cannot rely on lstat() failing to detect
815 # a case-only rename. Prune the stat object for any file that does not
813 # a case-only rename. Prune the stat object for any file that does not
816 # match the case in the filesystem, if there are multiple files that
814 # match the case in the filesystem, if there are multiple files that
817 # normalize to the same path.
815 # normalize to the same path.
818 if match.isexact() and self._checkcase:
816 if match.isexact() and self._checkcase:
819 normed = {}
817 normed = {}
820
818
821 for f, st in results.iteritems():
819 for f, st in results.iteritems():
822 if st is None:
820 if st is None:
823 continue
821 continue
824
822
825 nc = util.normcase(f)
823 nc = util.normcase(f)
826 paths = normed.get(nc)
824 paths = normed.get(nc)
827
825
828 if paths is None:
826 if paths is None:
829 paths = set()
827 paths = set()
830 normed[nc] = paths
828 normed[nc] = paths
831
829
832 paths.add(f)
830 paths.add(f)
833
831
834 for norm, paths in normed.iteritems():
832 for norm, paths in normed.iteritems():
835 if len(paths) > 1:
833 if len(paths) > 1:
836 for path in paths:
834 for path in paths:
837 folded = self._discoverpath(path, norm, True, None,
835 folded = self._discoverpath(path, norm, True, None,
838 self._map.dirfoldmap)
836 self._map.dirfoldmap)
839 if path != folded:
837 if path != folded:
840 results[path] = None
838 results[path] = None
841
839
842 return results, dirsfound, dirsnotfound
840 return results, dirsfound, dirsnotfound
843
841
844 def walk(self, match, subrepos, unknown, ignored, full=True):
842 def walk(self, match, subrepos, unknown, ignored, full=True):
845 '''
843 '''
846 Walk recursively through the directory tree, finding all files
844 Walk recursively through the directory tree, finding all files
847 matched by match.
845 matched by match.
848
846
849 If full is False, maybe skip some known-clean files.
847 If full is False, maybe skip some known-clean files.
850
848
851 Return a dict mapping filename to stat-like object (either
849 Return a dict mapping filename to stat-like object (either
852 mercurial.osutil.stat instance or return value of os.stat()).
850 mercurial.osutil.stat instance or return value of os.stat()).
853
851
854 '''
852 '''
855 # full is a flag that extensions that hook into walk can use -- this
853 # full is a flag that extensions that hook into walk can use -- this
856 # implementation doesn't use it at all. This satisfies the contract
854 # implementation doesn't use it at all. This satisfies the contract
857 # because we only guarantee a "maybe".
855 # because we only guarantee a "maybe".
858
856
859 if ignored:
857 if ignored:
860 ignore = util.never
858 ignore = util.never
861 dirignore = util.never
859 dirignore = util.never
862 elif unknown:
860 elif unknown:
863 ignore = self._ignore
861 ignore = self._ignore
864 dirignore = self._dirignore
862 dirignore = self._dirignore
865 else:
863 else:
866 # if not unknown and not ignored, drop dir recursion and step 2
864 # if not unknown and not ignored, drop dir recursion and step 2
867 ignore = util.always
865 ignore = util.always
868 dirignore = util.always
866 dirignore = util.always
869
867
870 matchfn = match.matchfn
868 matchfn = match.matchfn
871 matchalways = match.always()
869 matchalways = match.always()
872 matchtdir = match.traversedir
870 matchtdir = match.traversedir
873 dmap = self._map
871 dmap = self._map
874 listdir = util.listdir
872 listdir = util.listdir
875 lstat = os.lstat
873 lstat = os.lstat
876 dirkind = stat.S_IFDIR
874 dirkind = stat.S_IFDIR
877 regkind = stat.S_IFREG
875 regkind = stat.S_IFREG
878 lnkkind = stat.S_IFLNK
876 lnkkind = stat.S_IFLNK
879 join = self._join
877 join = self._join
880
878
881 exact = skipstep3 = False
879 exact = skipstep3 = False
882 if match.isexact(): # match.exact
880 if match.isexact(): # match.exact
883 exact = True
881 exact = True
884 dirignore = util.always # skip step 2
882 dirignore = util.always # skip step 2
885 elif match.prefix(): # match.match, no patterns
883 elif match.prefix(): # match.match, no patterns
886 skipstep3 = True
884 skipstep3 = True
887
885
888 if not exact and self._checkcase:
886 if not exact and self._checkcase:
889 normalize = self._normalize
887 normalize = self._normalize
890 normalizefile = self._normalizefile
888 normalizefile = self._normalizefile
891 skipstep3 = False
889 skipstep3 = False
892 else:
890 else:
893 normalize = self._normalize
891 normalize = self._normalize
894 normalizefile = None
892 normalizefile = None
895
893
896 # step 1: find all explicit files
894 # step 1: find all explicit files
897 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
895 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
898
896
899 skipstep3 = skipstep3 and not (work or dirsnotfound)
897 skipstep3 = skipstep3 and not (work or dirsnotfound)
900 work = [d for d in work if not dirignore(d[0])]
898 work = [d for d in work if not dirignore(d[0])]
901
899
902 # step 2: visit subdirectories
900 # step 2: visit subdirectories
903 def traverse(work, alreadynormed):
901 def traverse(work, alreadynormed):
904 wadd = work.append
902 wadd = work.append
905 while work:
903 while work:
906 nd = work.pop()
904 nd = work.pop()
907 visitentries = match.visitchildrenset(nd)
905 visitentries = match.visitchildrenset(nd)
908 if not visitentries:
906 if not visitentries:
909 continue
907 continue
910 if visitentries == 'this' or visitentries == 'all':
908 if visitentries == 'this' or visitentries == 'all':
911 visitentries = None
909 visitentries = None
912 skip = None
910 skip = None
913 if nd != '':
911 if nd != '':
914 skip = '.hg'
912 skip = '.hg'
915 try:
913 try:
916 entries = listdir(join(nd), stat=True, skip=skip)
914 entries = listdir(join(nd), stat=True, skip=skip)
917 except OSError as inst:
915 except OSError as inst:
918 if inst.errno in (errno.EACCES, errno.ENOENT):
916 if inst.errno in (errno.EACCES, errno.ENOENT):
919 match.bad(self.pathto(nd),
917 match.bad(self.pathto(nd),
920 encoding.strtolocal(inst.strerror))
918 encoding.strtolocal(inst.strerror))
921 continue
919 continue
922 raise
920 raise
923 for f, kind, st in entries:
921 for f, kind, st in entries:
924 # Some matchers may return files in the visitentries set,
922 # Some matchers may return files in the visitentries set,
925 # instead of 'this', if the matcher explicitly mentions them
923 # instead of 'this', if the matcher explicitly mentions them
926 # and is not an exactmatcher. This is acceptable; we do not
924 # and is not an exactmatcher. This is acceptable; we do not
927 # make any hard assumptions about file-or-directory below
925 # make any hard assumptions about file-or-directory below
928 # based on the presence of `f` in visitentries. If
926 # based on the presence of `f` in visitentries. If
929 # visitchildrenset returned a set, we can always skip the
927 # visitchildrenset returned a set, we can always skip the
930 # entries *not* in the set it provided regardless of whether
928 # entries *not* in the set it provided regardless of whether
931 # they're actually a file or a directory.
929 # they're actually a file or a directory.
932 if visitentries and f not in visitentries:
930 if visitentries and f not in visitentries:
933 continue
931 continue
934 if normalizefile:
932 if normalizefile:
935 # even though f might be a directory, we're only
933 # even though f might be a directory, we're only
936 # interested in comparing it to files currently in the
934 # interested in comparing it to files currently in the
937 # dmap -- therefore normalizefile is enough
935 # dmap -- therefore normalizefile is enough
938 nf = normalizefile(nd and (nd + "/" + f) or f, True,
936 nf = normalizefile(nd and (nd + "/" + f) or f, True,
939 True)
937 True)
940 else:
938 else:
941 nf = nd and (nd + "/" + f) or f
939 nf = nd and (nd + "/" + f) or f
942 if nf not in results:
940 if nf not in results:
943 if kind == dirkind:
941 if kind == dirkind:
944 if not ignore(nf):
942 if not ignore(nf):
945 if matchtdir:
943 if matchtdir:
946 matchtdir(nf)
944 matchtdir(nf)
947 wadd(nf)
945 wadd(nf)
948 if nf in dmap and (matchalways or matchfn(nf)):
946 if nf in dmap and (matchalways or matchfn(nf)):
949 results[nf] = None
947 results[nf] = None
950 elif kind == regkind or kind == lnkkind:
948 elif kind == regkind or kind == lnkkind:
951 if nf in dmap:
949 if nf in dmap:
952 if matchalways or matchfn(nf):
950 if matchalways or matchfn(nf):
953 results[nf] = st
951 results[nf] = st
954 elif ((matchalways or matchfn(nf))
952 elif ((matchalways or matchfn(nf))
955 and not ignore(nf)):
953 and not ignore(nf)):
956 # unknown file -- normalize if necessary
954 # unknown file -- normalize if necessary
957 if not alreadynormed:
955 if not alreadynormed:
958 nf = normalize(nf, False, True)
956 nf = normalize(nf, False, True)
959 results[nf] = st
957 results[nf] = st
960 elif nf in dmap and (matchalways or matchfn(nf)):
958 elif nf in dmap and (matchalways or matchfn(nf)):
961 results[nf] = None
959 results[nf] = None
962
960
963 for nd, d in work:
961 for nd, d in work:
964 # alreadynormed means that processwork doesn't have to do any
962 # alreadynormed means that processwork doesn't have to do any
965 # expensive directory normalization
963 # expensive directory normalization
966 alreadynormed = not normalize or nd == d
964 alreadynormed = not normalize or nd == d
967 traverse([d], alreadynormed)
965 traverse([d], alreadynormed)
968
966
969 for s in subrepos:
967 for s in subrepos:
970 del results[s]
968 del results[s]
971 del results['.hg']
969 del results['.hg']
972
970
973 # step 3: visit remaining files from dmap
971 # step 3: visit remaining files from dmap
974 if not skipstep3 and not exact:
972 if not skipstep3 and not exact:
975 # If a dmap file is not in results yet, it was either
973 # If a dmap file is not in results yet, it was either
976 # a) not matching matchfn b) ignored, c) missing, or d) under a
974 # a) not matching matchfn b) ignored, c) missing, or d) under a
977 # symlink directory.
975 # symlink directory.
978 if not results and matchalways:
976 if not results and matchalways:
979 visit = [f for f in dmap]
977 visit = [f for f in dmap]
980 else:
978 else:
981 visit = [f for f in dmap if f not in results and matchfn(f)]
979 visit = [f for f in dmap if f not in results and matchfn(f)]
982 visit.sort()
980 visit.sort()
983
981
984 if unknown:
982 if unknown:
985 # unknown == True means we walked all dirs under the roots
983 # unknown == True means we walked all dirs under the roots
986 # that wasn't ignored, and everything that matched was stat'ed
984 # that wasn't ignored, and everything that matched was stat'ed
987 # and is already in results.
985 # and is already in results.
988 # The rest must thus be ignored or under a symlink.
986 # The rest must thus be ignored or under a symlink.
989 audit_path = pathutil.pathauditor(self._root, cached=True)
987 audit_path = pathutil.pathauditor(self._root, cached=True)
990
988
991 for nf in iter(visit):
989 for nf in iter(visit):
992 # If a stat for the same file was already added with a
990 # If a stat for the same file was already added with a
993 # different case, don't add one for this, since that would
991 # different case, don't add one for this, since that would
994 # make it appear as if the file exists under both names
992 # make it appear as if the file exists under both names
995 # on disk.
993 # on disk.
996 if (normalizefile and
994 if (normalizefile and
997 normalizefile(nf, True, True) in results):
995 normalizefile(nf, True, True) in results):
998 results[nf] = None
996 results[nf] = None
999 # Report ignored items in the dmap as long as they are not
997 # Report ignored items in the dmap as long as they are not
1000 # under a symlink directory.
998 # under a symlink directory.
1001 elif audit_path.check(nf):
999 elif audit_path.check(nf):
1002 try:
1000 try:
1003 results[nf] = lstat(join(nf))
1001 results[nf] = lstat(join(nf))
1004 # file was just ignored, no links, and exists
1002 # file was just ignored, no links, and exists
1005 except OSError:
1003 except OSError:
1006 # file doesn't exist
1004 # file doesn't exist
1007 results[nf] = None
1005 results[nf] = None
1008 else:
1006 else:
1009 # It's either missing or under a symlink directory
1007 # It's either missing or under a symlink directory
1010 # which we in this case report as missing
1008 # which we in this case report as missing
1011 results[nf] = None
1009 results[nf] = None
1012 else:
1010 else:
1013 # We may not have walked the full directory tree above,
1011 # We may not have walked the full directory tree above,
1014 # so stat and check everything we missed.
1012 # so stat and check everything we missed.
1015 iv = iter(visit)
1013 iv = iter(visit)
1016 for st in util.statfiles([join(i) for i in visit]):
1014 for st in util.statfiles([join(i) for i in visit]):
1017 results[next(iv)] = st
1015 results[next(iv)] = st
1018 return results
1016 return results
1019
1017
1020 def status(self, match, subrepos, ignored, clean, unknown):
1018 def status(self, match, subrepos, ignored, clean, unknown):
1021 '''Determine the status of the working copy relative to the
1019 '''Determine the status of the working copy relative to the
1022 dirstate and return a pair of (unsure, status), where status is of type
1020 dirstate and return a pair of (unsure, status), where status is of type
1023 scmutil.status and:
1021 scmutil.status and:
1024
1022
1025 unsure:
1023 unsure:
1026 files that might have been modified since the dirstate was
1024 files that might have been modified since the dirstate was
1027 written, but need to be read to be sure (size is the same
1025 written, but need to be read to be sure (size is the same
1028 but mtime differs)
1026 but mtime differs)
1029 status.modified:
1027 status.modified:
1030 files that have definitely been modified since the dirstate
1028 files that have definitely been modified since the dirstate
1031 was written (different size or mode)
1029 was written (different size or mode)
1032 status.clean:
1030 status.clean:
1033 files that have definitely not been modified since the
1031 files that have definitely not been modified since the
1034 dirstate was written
1032 dirstate was written
1035 '''
1033 '''
1036 listignored, listclean, listunknown = ignored, clean, unknown
1034 listignored, listclean, listunknown = ignored, clean, unknown
1037 lookup, modified, added, unknown, ignored = [], [], [], [], []
1035 lookup, modified, added, unknown, ignored = [], [], [], [], []
1038 removed, deleted, clean = [], [], []
1036 removed, deleted, clean = [], [], []
1039
1037
1040 dmap = self._map
1038 dmap = self._map
1041 dmap.preload()
1039 dmap.preload()
1042 dcontains = dmap.__contains__
1040 dcontains = dmap.__contains__
1043 dget = dmap.__getitem__
1041 dget = dmap.__getitem__
1044 ladd = lookup.append # aka "unsure"
1042 ladd = lookup.append # aka "unsure"
1045 madd = modified.append
1043 madd = modified.append
1046 aadd = added.append
1044 aadd = added.append
1047 uadd = unknown.append
1045 uadd = unknown.append
1048 iadd = ignored.append
1046 iadd = ignored.append
1049 radd = removed.append
1047 radd = removed.append
1050 dadd = deleted.append
1048 dadd = deleted.append
1051 cadd = clean.append
1049 cadd = clean.append
1052 mexact = match.exact
1050 mexact = match.exact
1053 dirignore = self._dirignore
1051 dirignore = self._dirignore
1054 checkexec = self._checkexec
1052 checkexec = self._checkexec
1055 copymap = self._map.copymap
1053 copymap = self._map.copymap
1056 lastnormaltime = self._lastnormaltime
1054 lastnormaltime = self._lastnormaltime
1057
1055
1058 # We need to do full walks when either
1056 # We need to do full walks when either
1059 # - we're listing all clean files, or
1057 # - we're listing all clean files, or
1060 # - match.traversedir does something, because match.traversedir should
1058 # - match.traversedir does something, because match.traversedir should
1061 # be called for every dir in the working dir
1059 # be called for every dir in the working dir
1062 full = listclean or match.traversedir is not None
1060 full = listclean or match.traversedir is not None
1063 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1061 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1064 full=full).iteritems():
1062 full=full).iteritems():
1065 if not dcontains(fn):
1063 if not dcontains(fn):
1066 if (listignored or mexact(fn)) and dirignore(fn):
1064 if (listignored or mexact(fn)) and dirignore(fn):
1067 if listignored:
1065 if listignored:
1068 iadd(fn)
1066 iadd(fn)
1069 else:
1067 else:
1070 uadd(fn)
1068 uadd(fn)
1071 continue
1069 continue
1072
1070
1073 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1071 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1074 # written like that for performance reasons. dmap[fn] is not a
1072 # written like that for performance reasons. dmap[fn] is not a
1075 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1073 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1076 # opcode has fast paths when the value to be unpacked is a tuple or
1074 # opcode has fast paths when the value to be unpacked is a tuple or
1077 # a list, but falls back to creating a full-fledged iterator in
1075 # a list, but falls back to creating a full-fledged iterator in
1078 # general. That is much slower than simply accessing and storing the
1076 # general. That is much slower than simply accessing and storing the
1079 # tuple members one by one.
1077 # tuple members one by one.
1080 t = dget(fn)
1078 t = dget(fn)
1081 state = t[0]
1079 state = t[0]
1082 mode = t[1]
1080 mode = t[1]
1083 size = t[2]
1081 size = t[2]
1084 time = t[3]
1082 time = t[3]
1085
1083
1086 if not st and state in "nma":
1084 if not st and state in "nma":
1087 dadd(fn)
1085 dadd(fn)
1088 elif state == 'n':
1086 elif state == 'n':
1089 if (size >= 0 and
1087 if (size >= 0 and
1090 ((size != st.st_size and size != st.st_size & _rangemask)
1088 ((size != st.st_size and size != st.st_size & _rangemask)
1091 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1089 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1092 or size == -2 # other parent
1090 or size == -2 # other parent
1093 or fn in copymap):
1091 or fn in copymap):
1094 madd(fn)
1092 madd(fn)
1095 elif (time != st[stat.ST_MTIME]
1093 elif (time != st[stat.ST_MTIME]
1096 and time != st[stat.ST_MTIME] & _rangemask):
1094 and time != st[stat.ST_MTIME] & _rangemask):
1097 ladd(fn)
1095 ladd(fn)
1098 elif st[stat.ST_MTIME] == lastnormaltime:
1096 elif st[stat.ST_MTIME] == lastnormaltime:
1099 # fn may have just been marked as normal and it may have
1097 # fn may have just been marked as normal and it may have
1100 # changed in the same second without changing its size.
1098 # changed in the same second without changing its size.
1101 # This can happen if we quickly do multiple commits.
1099 # This can happen if we quickly do multiple commits.
1102 # Force lookup, so we don't miss such a racy file change.
1100 # Force lookup, so we don't miss such a racy file change.
1103 ladd(fn)
1101 ladd(fn)
1104 elif listclean:
1102 elif listclean:
1105 cadd(fn)
1103 cadd(fn)
1106 elif state == 'm':
1104 elif state == 'm':
1107 madd(fn)
1105 madd(fn)
1108 elif state == 'a':
1106 elif state == 'a':
1109 aadd(fn)
1107 aadd(fn)
1110 elif state == 'r':
1108 elif state == 'r':
1111 radd(fn)
1109 radd(fn)
1112
1110
1113 return (lookup, scmutil.status(modified, added, removed, deleted,
1111 return (lookup, scmutil.status(modified, added, removed, deleted,
1114 unknown, ignored, clean))
1112 unknown, ignored, clean))
1115
1113
1116 def matches(self, match):
1114 def matches(self, match):
1117 '''
1115 '''
1118 return files in the dirstate (in whatever state) filtered by match
1116 return files in the dirstate (in whatever state) filtered by match
1119 '''
1117 '''
1120 dmap = self._map
1118 dmap = self._map
1121 if match.always():
1119 if match.always():
1122 return dmap.keys()
1120 return dmap.keys()
1123 files = match.files()
1121 files = match.files()
1124 if match.isexact():
1122 if match.isexact():
1125 # fast path -- filter the other way around, since typically files is
1123 # fast path -- filter the other way around, since typically files is
1126 # much smaller than dmap
1124 # much smaller than dmap
1127 return [f for f in files if f in dmap]
1125 return [f for f in files if f in dmap]
1128 if match.prefix() and all(fn in dmap for fn in files):
1126 if match.prefix() and all(fn in dmap for fn in files):
1129 # fast path -- all the values are known to be files, so just return
1127 # fast path -- all the values are known to be files, so just return
1130 # that
1128 # that
1131 return list(files)
1129 return list(files)
1132 return [f for f in dmap if match(f)]
1130 return [f for f in dmap if match(f)]
1133
1131
1134 def _actualfilename(self, tr):
1132 def _actualfilename(self, tr):
1135 if tr:
1133 if tr:
1136 return self._pendingfilename
1134 return self._pendingfilename
1137 else:
1135 else:
1138 return self._filename
1136 return self._filename
1139
1137
1140 def savebackup(self, tr, backupname):
1138 def savebackup(self, tr, backupname):
1141 '''Save current dirstate into backup file'''
1139 '''Save current dirstate into backup file'''
1142 filename = self._actualfilename(tr)
1140 filename = self._actualfilename(tr)
1143 assert backupname != filename
1141 assert backupname != filename
1144
1142
1145 # use '_writedirstate' instead of 'write' to write changes certainly,
1143 # use '_writedirstate' instead of 'write' to write changes certainly,
1146 # because the latter omits writing out if transaction is running.
1144 # because the latter omits writing out if transaction is running.
1147 # output file will be used to create backup of dirstate at this point.
1145 # output file will be used to create backup of dirstate at this point.
1148 if self._dirty or not self._opener.exists(filename):
1146 if self._dirty or not self._opener.exists(filename):
1149 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1147 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1150 checkambig=True))
1148 checkambig=True))
1151
1149
1152 if tr:
1150 if tr:
1153 # ensure that subsequent tr.writepending returns True for
1151 # ensure that subsequent tr.writepending returns True for
1154 # changes written out above, even if dirstate is never
1152 # changes written out above, even if dirstate is never
1155 # changed after this
1153 # changed after this
1156 tr.addfilegenerator('dirstate', (self._filename,),
1154 tr.addfilegenerator('dirstate', (self._filename,),
1157 self._writedirstate, location='plain')
1155 self._writedirstate, location='plain')
1158
1156
1159 # ensure that pending file written above is unlinked at
1157 # ensure that pending file written above is unlinked at
1160 # failure, even if tr.writepending isn't invoked until the
1158 # failure, even if tr.writepending isn't invoked until the
1161 # end of this transaction
1159 # end of this transaction
1162 tr.registertmp(filename, location='plain')
1160 tr.registertmp(filename, location='plain')
1163
1161
1164 self._opener.tryunlink(backupname)
1162 self._opener.tryunlink(backupname)
1165 # hardlink backup is okay because _writedirstate is always called
1163 # hardlink backup is okay because _writedirstate is always called
1166 # with an "atomictemp=True" file.
1164 # with an "atomictemp=True" file.
1167 util.copyfile(self._opener.join(filename),
1165 util.copyfile(self._opener.join(filename),
1168 self._opener.join(backupname), hardlink=True)
1166 self._opener.join(backupname), hardlink=True)
1169
1167
1170 def restorebackup(self, tr, backupname):
1168 def restorebackup(self, tr, backupname):
1171 '''Restore dirstate by backup file'''
1169 '''Restore dirstate by backup file'''
1172 # this "invalidate()" prevents "wlock.release()" from writing
1170 # this "invalidate()" prevents "wlock.release()" from writing
1173 # changes of dirstate out after restoring from backup file
1171 # changes of dirstate out after restoring from backup file
1174 self.invalidate()
1172 self.invalidate()
1175 filename = self._actualfilename(tr)
1173 filename = self._actualfilename(tr)
1176 o = self._opener
1174 o = self._opener
1177 if util.samefile(o.join(backupname), o.join(filename)):
1175 if util.samefile(o.join(backupname), o.join(filename)):
1178 o.unlink(backupname)
1176 o.unlink(backupname)
1179 else:
1177 else:
1180 o.rename(backupname, filename, checkambig=True)
1178 o.rename(backupname, filename, checkambig=True)
1181
1179
1182 def clearbackup(self, tr, backupname):
1180 def clearbackup(self, tr, backupname):
1183 '''Clear backup file'''
1181 '''Clear backup file'''
1184 self._opener.unlink(backupname)
1182 self._opener.unlink(backupname)
1185
1183
1186 class dirstatemap(object):
1184 class dirstatemap(object):
1187 """Map encapsulating the dirstate's contents.
1185 """Map encapsulating the dirstate's contents.
1188
1186
1189 The dirstate contains the following state:
1187 The dirstate contains the following state:
1190
1188
1191 - `identity` is the identity of the dirstate file, which can be used to
1189 - `identity` is the identity of the dirstate file, which can be used to
1192 detect when changes have occurred to the dirstate file.
1190 detect when changes have occurred to the dirstate file.
1193
1191
1194 - `parents` is a pair containing the parents of the working copy. The
1192 - `parents` is a pair containing the parents of the working copy. The
1195 parents are updated by calling `setparents`.
1193 parents are updated by calling `setparents`.
1196
1194
1197 - the state map maps filenames to tuples of (state, mode, size, mtime),
1195 - the state map maps filenames to tuples of (state, mode, size, mtime),
1198 where state is a single character representing 'normal', 'added',
1196 where state is a single character representing 'normal', 'added',
1199 'removed', or 'merged'. It is read by treating the dirstate as a
1197 'removed', or 'merged'. It is read by treating the dirstate as a
1200 dict. File state is updated by calling the `addfile`, `removefile` and
1198 dict. File state is updated by calling the `addfile`, `removefile` and
1201 `dropfile` methods.
1199 `dropfile` methods.
1202
1200
1203 - `copymap` maps destination filenames to their source filename.
1201 - `copymap` maps destination filenames to their source filename.
1204
1202
1205 The dirstate also provides the following views onto the state:
1203 The dirstate also provides the following views onto the state:
1206
1204
1207 - `nonnormalset` is a set of the filenames that have state other
1205 - `nonnormalset` is a set of the filenames that have state other
1208 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1206 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1209
1207
1210 - `otherparentset` is a set of the filenames that are marked as coming
1208 - `otherparentset` is a set of the filenames that are marked as coming
1211 from the second parent when the dirstate is currently being merged.
1209 from the second parent when the dirstate is currently being merged.
1212
1210
1213 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1211 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1214 form that they appear as in the dirstate.
1212 form that they appear as in the dirstate.
1215
1213
1216 - `dirfoldmap` is a dict mapping normalized directory names to the
1214 - `dirfoldmap` is a dict mapping normalized directory names to the
1217 denormalized form that they appear as in the dirstate.
1215 denormalized form that they appear as in the dirstate.
1218 """
1216 """
1219
1217
1220 def __init__(self, ui, opener, root):
1218 def __init__(self, ui, opener, root):
1221 self._ui = ui
1219 self._ui = ui
1222 self._opener = opener
1220 self._opener = opener
1223 self._root = root
1221 self._root = root
1224 self._filename = 'dirstate'
1222 self._filename = 'dirstate'
1225
1223
1226 self._parents = None
1224 self._parents = None
1227 self._dirtyparents = False
1225 self._dirtyparents = False
1228
1226
1229 # for consistent view between _pl() and _read() invocations
1227 # for consistent view between _pl() and _read() invocations
1230 self._pendingmode = None
1228 self._pendingmode = None
1231
1229
1232 @propertycache
1230 @propertycache
1233 def _map(self):
1231 def _map(self):
1234 self._map = {}
1232 self._map = {}
1235 self.read()
1233 self.read()
1236 return self._map
1234 return self._map
1237
1235
1238 @propertycache
1236 @propertycache
1239 def copymap(self):
1237 def copymap(self):
1240 self.copymap = {}
1238 self.copymap = {}
1241 self._map
1239 self._map
1242 return self.copymap
1240 return self.copymap
1243
1241
1244 def clear(self):
1242 def clear(self):
1245 self._map.clear()
1243 self._map.clear()
1246 self.copymap.clear()
1244 self.copymap.clear()
1247 self.setparents(nullid, nullid)
1245 self.setparents(nullid, nullid)
1248 util.clearcachedproperty(self, "_dirs")
1246 util.clearcachedproperty(self, "_dirs")
1249 util.clearcachedproperty(self, "_alldirs")
1247 util.clearcachedproperty(self, "_alldirs")
1250 util.clearcachedproperty(self, "filefoldmap")
1248 util.clearcachedproperty(self, "filefoldmap")
1251 util.clearcachedproperty(self, "dirfoldmap")
1249 util.clearcachedproperty(self, "dirfoldmap")
1252 util.clearcachedproperty(self, "nonnormalset")
1250 util.clearcachedproperty(self, "nonnormalset")
1253 util.clearcachedproperty(self, "otherparentset")
1251 util.clearcachedproperty(self, "otherparentset")
1254
1252
1255 def items(self):
1253 def items(self):
1256 return self._map.iteritems()
1254 return self._map.iteritems()
1257
1255
1258 # forward for python2,3 compat
1256 # forward for python2,3 compat
1259 iteritems = items
1257 iteritems = items
1260
1258
1261 def __len__(self):
1259 def __len__(self):
1262 return len(self._map)
1260 return len(self._map)
1263
1261
1264 def __iter__(self):
1262 def __iter__(self):
1265 return iter(self._map)
1263 return iter(self._map)
1266
1264
1267 def get(self, key, default=None):
1265 def get(self, key, default=None):
1268 return self._map.get(key, default)
1266 return self._map.get(key, default)
1269
1267
1270 def __contains__(self, key):
1268 def __contains__(self, key):
1271 return key in self._map
1269 return key in self._map
1272
1270
1273 def __getitem__(self, key):
1271 def __getitem__(self, key):
1274 return self._map[key]
1272 return self._map[key]
1275
1273
1276 def keys(self):
1274 def keys(self):
1277 return self._map.keys()
1275 return self._map.keys()
1278
1276
1279 def preload(self):
1277 def preload(self):
1280 """Loads the underlying data, if it's not already loaded"""
1278 """Loads the underlying data, if it's not already loaded"""
1281 self._map
1279 self._map
1282
1280
1283 def addfile(self, f, oldstate, state, mode, size, mtime):
1281 def addfile(self, f, oldstate, state, mode, size, mtime):
1284 """Add a tracked file to the dirstate."""
1282 """Add a tracked file to the dirstate."""
1285 if oldstate in "?r" and r"_dirs" in self.__dict__:
1283 if oldstate in "?r" and r"_dirs" in self.__dict__:
1286 self._dirs.addpath(f)
1284 self._dirs.addpath(f)
1287 if oldstate == "?" and r"_alldirs" in self.__dict__:
1285 if oldstate == "?" and r"_alldirs" in self.__dict__:
1288 self._alldirs.addpath(f)
1286 self._alldirs.addpath(f)
1289 self._map[f] = dirstatetuple(state, mode, size, mtime)
1287 self._map[f] = dirstatetuple(state, mode, size, mtime)
1290 if state != 'n' or mtime == -1:
1288 if state != 'n' or mtime == -1:
1291 self.nonnormalset.add(f)
1289 self.nonnormalset.add(f)
1292 if size == -2:
1290 if size == -2:
1293 self.otherparentset.add(f)
1291 self.otherparentset.add(f)
1294
1292
1295 def removefile(self, f, oldstate, size):
1293 def removefile(self, f, oldstate, size):
1296 """
1294 """
1297 Mark a file as removed in the dirstate.
1295 Mark a file as removed in the dirstate.
1298
1296
1299 The `size` parameter is used to store sentinel values that indicate
1297 The `size` parameter is used to store sentinel values that indicate
1300 the file's previous state. In the future, we should refactor this
1298 the file's previous state. In the future, we should refactor this
1301 to be more explicit about what that state is.
1299 to be more explicit about what that state is.
1302 """
1300 """
1303 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1301 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1304 self._dirs.delpath(f)
1302 self._dirs.delpath(f)
1305 if oldstate == "?" and r"_alldirs" in self.__dict__:
1303 if oldstate == "?" and r"_alldirs" in self.__dict__:
1306 self._alldirs.addpath(f)
1304 self._alldirs.addpath(f)
1307 if r"filefoldmap" in self.__dict__:
1305 if r"filefoldmap" in self.__dict__:
1308 normed = util.normcase(f)
1306 normed = util.normcase(f)
1309 self.filefoldmap.pop(normed, None)
1307 self.filefoldmap.pop(normed, None)
1310 self._map[f] = dirstatetuple('r', 0, size, 0)
1308 self._map[f] = dirstatetuple('r', 0, size, 0)
1311 self.nonnormalset.add(f)
1309 self.nonnormalset.add(f)
1312
1310
1313 def dropfile(self, f, oldstate):
1311 def dropfile(self, f, oldstate):
1314 """
1312 """
1315 Remove a file from the dirstate. Returns True if the file was
1313 Remove a file from the dirstate. Returns True if the file was
1316 previously recorded.
1314 previously recorded.
1317 """
1315 """
1318 exists = self._map.pop(f, None) is not None
1316 exists = self._map.pop(f, None) is not None
1319 if exists:
1317 if exists:
1320 if oldstate != "r" and r"_dirs" in self.__dict__:
1318 if oldstate != "r" and r"_dirs" in self.__dict__:
1321 self._dirs.delpath(f)
1319 self._dirs.delpath(f)
1322 if r"_alldirs" in self.__dict__:
1320 if r"_alldirs" in self.__dict__:
1323 self._alldirs.delpath(f)
1321 self._alldirs.delpath(f)
1324 if r"filefoldmap" in self.__dict__:
1322 if r"filefoldmap" in self.__dict__:
1325 normed = util.normcase(f)
1323 normed = util.normcase(f)
1326 self.filefoldmap.pop(normed, None)
1324 self.filefoldmap.pop(normed, None)
1327 self.nonnormalset.discard(f)
1325 self.nonnormalset.discard(f)
1328 return exists
1326 return exists
1329
1327
1330 def clearambiguoustimes(self, files, now):
1328 def clearambiguoustimes(self, files, now):
1331 for f in files:
1329 for f in files:
1332 e = self.get(f)
1330 e = self.get(f)
1333 if e is not None and e[0] == 'n' and e[3] == now:
1331 if e is not None and e[0] == 'n' and e[3] == now:
1334 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1332 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1335 self.nonnormalset.add(f)
1333 self.nonnormalset.add(f)
1336
1334
1337 def nonnormalentries(self):
1335 def nonnormalentries(self):
1338 '''Compute the nonnormal dirstate entries from the dmap'''
1336 '''Compute the nonnormal dirstate entries from the dmap'''
1339 try:
1337 try:
1340 return parsers.nonnormalotherparententries(self._map)
1338 return parsers.nonnormalotherparententries(self._map)
1341 except AttributeError:
1339 except AttributeError:
1342 nonnorm = set()
1340 nonnorm = set()
1343 otherparent = set()
1341 otherparent = set()
1344 for fname, e in self._map.iteritems():
1342 for fname, e in self._map.iteritems():
1345 if e[0] != 'n' or e[3] == -1:
1343 if e[0] != 'n' or e[3] == -1:
1346 nonnorm.add(fname)
1344 nonnorm.add(fname)
1347 if e[0] == 'n' and e[2] == -2:
1345 if e[0] == 'n' and e[2] == -2:
1348 otherparent.add(fname)
1346 otherparent.add(fname)
1349 return nonnorm, otherparent
1347 return nonnorm, otherparent
1350
1348
1351 @propertycache
1349 @propertycache
1352 def filefoldmap(self):
1350 def filefoldmap(self):
1353 """Returns a dictionary mapping normalized case paths to their
1351 """Returns a dictionary mapping normalized case paths to their
1354 non-normalized versions.
1352 non-normalized versions.
1355 """
1353 """
1356 try:
1354 try:
1357 makefilefoldmap = parsers.make_file_foldmap
1355 makefilefoldmap = parsers.make_file_foldmap
1358 except AttributeError:
1356 except AttributeError:
1359 pass
1357 pass
1360 else:
1358 else:
1361 return makefilefoldmap(self._map, util.normcasespec,
1359 return makefilefoldmap(self._map, util.normcasespec,
1362 util.normcasefallback)
1360 util.normcasefallback)
1363
1361
1364 f = {}
1362 f = {}
1365 normcase = util.normcase
1363 normcase = util.normcase
1366 for name, s in self._map.iteritems():
1364 for name, s in self._map.iteritems():
1367 if s[0] != 'r':
1365 if s[0] != 'r':
1368 f[normcase(name)] = name
1366 f[normcase(name)] = name
1369 f['.'] = '.' # prevents useless util.fspath() invocation
1367 f['.'] = '.' # prevents useless util.fspath() invocation
1370 return f
1368 return f
1371
1369
1372 def hastrackeddir(self, d):
1370 def hastrackeddir(self, d):
1373 """
1371 """
1374 Returns True if the dirstate contains a tracked (not removed) file
1372 Returns True if the dirstate contains a tracked (not removed) file
1375 in this directory.
1373 in this directory.
1376 """
1374 """
1377 return d in self._dirs
1375 return d in self._dirs
1378
1376
1379 def hasdir(self, d):
1377 def hasdir(self, d):
1380 """
1378 """
1381 Returns True if the dirstate contains a file (tracked or removed)
1379 Returns True if the dirstate contains a file (tracked or removed)
1382 in this directory.
1380 in this directory.
1383 """
1381 """
1384 return d in self._alldirs
1382 return d in self._alldirs
1385
1383
1386 @propertycache
1384 @propertycache
1387 def _dirs(self):
1385 def _dirs(self):
1388 return util.dirs(self._map, 'r')
1386 return util.dirs(self._map, 'r')
1389
1387
1390 @propertycache
1388 @propertycache
1391 def _alldirs(self):
1389 def _alldirs(self):
1392 return util.dirs(self._map)
1390 return util.dirs(self._map)
1393
1391
1394 def _opendirstatefile(self):
1392 def _opendirstatefile(self):
1395 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1393 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1396 if self._pendingmode is not None and self._pendingmode != mode:
1394 if self._pendingmode is not None and self._pendingmode != mode:
1397 fp.close()
1395 fp.close()
1398 raise error.Abort(_('working directory state may be '
1396 raise error.Abort(_('working directory state may be '
1399 'changed parallelly'))
1397 'changed parallelly'))
1400 self._pendingmode = mode
1398 self._pendingmode = mode
1401 return fp
1399 return fp
1402
1400
1403 def parents(self):
1401 def parents(self):
1404 if not self._parents:
1402 if not self._parents:
1405 try:
1403 try:
1406 fp = self._opendirstatefile()
1404 fp = self._opendirstatefile()
1407 st = fp.read(40)
1405 st = fp.read(40)
1408 fp.close()
1406 fp.close()
1409 except IOError as err:
1407 except IOError as err:
1410 if err.errno != errno.ENOENT:
1408 if err.errno != errno.ENOENT:
1411 raise
1409 raise
1412 # File doesn't exist, so the current state is empty
1410 # File doesn't exist, so the current state is empty
1413 st = ''
1411 st = ''
1414
1412
1415 l = len(st)
1413 l = len(st)
1416 if l == 40:
1414 if l == 40:
1417 self._parents = (st[:20], st[20:40])
1415 self._parents = (st[:20], st[20:40])
1418 elif l == 0:
1416 elif l == 0:
1419 self._parents = (nullid, nullid)
1417 self._parents = (nullid, nullid)
1420 else:
1418 else:
1421 raise error.Abort(_('working directory state appears '
1419 raise error.Abort(_('working directory state appears '
1422 'damaged!'))
1420 'damaged!'))
1423
1421
1424 return self._parents
1422 return self._parents
1425
1423
1426 def setparents(self, p1, p2):
1424 def setparents(self, p1, p2):
1427 self._parents = (p1, p2)
1425 self._parents = (p1, p2)
1428 self._dirtyparents = True
1426 self._dirtyparents = True
1429
1427
1430 def read(self):
1428 def read(self):
1431 # ignore HG_PENDING because identity is used only for writing
1429 # ignore HG_PENDING because identity is used only for writing
1432 self.identity = util.filestat.frompath(
1430 self.identity = util.filestat.frompath(
1433 self._opener.join(self._filename))
1431 self._opener.join(self._filename))
1434
1432
1435 try:
1433 try:
1436 fp = self._opendirstatefile()
1434 fp = self._opendirstatefile()
1437 try:
1435 try:
1438 st = fp.read()
1436 st = fp.read()
1439 finally:
1437 finally:
1440 fp.close()
1438 fp.close()
1441 except IOError as err:
1439 except IOError as err:
1442 if err.errno != errno.ENOENT:
1440 if err.errno != errno.ENOENT:
1443 raise
1441 raise
1444 return
1442 return
1445 if not st:
1443 if not st:
1446 return
1444 return
1447
1445
1448 if util.safehasattr(parsers, 'dict_new_presized'):
1446 if util.safehasattr(parsers, 'dict_new_presized'):
1449 # Make an estimate of the number of files in the dirstate based on
1447 # Make an estimate of the number of files in the dirstate based on
1450 # its size. From a linear regression on a set of real-world repos,
1448 # its size. From a linear regression on a set of real-world repos,
1451 # all over 10,000 files, the size of a dirstate entry is 85
1449 # all over 10,000 files, the size of a dirstate entry is 85
1452 # bytes. The cost of resizing is significantly higher than the cost
1450 # bytes. The cost of resizing is significantly higher than the cost
1453 # of filling in a larger presized dict, so subtract 20% from the
1451 # of filling in a larger presized dict, so subtract 20% from the
1454 # size.
1452 # size.
1455 #
1453 #
1456 # This heuristic is imperfect in many ways, so in a future dirstate
1454 # This heuristic is imperfect in many ways, so in a future dirstate
1457 # format update it makes sense to just record the number of entries
1455 # format update it makes sense to just record the number of entries
1458 # on write.
1456 # on write.
1459 self._map = parsers.dict_new_presized(len(st) // 71)
1457 self._map = parsers.dict_new_presized(len(st) // 71)
1460
1458
1461 # Python's garbage collector triggers a GC each time a certain number
1459 # Python's garbage collector triggers a GC each time a certain number
1462 # of container objects (the number being defined by
1460 # of container objects (the number being defined by
1463 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1461 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1464 # for each file in the dirstate. The C version then immediately marks
1462 # for each file in the dirstate. The C version then immediately marks
1465 # them as not to be tracked by the collector. However, this has no
1463 # them as not to be tracked by the collector. However, this has no
1466 # effect on when GCs are triggered, only on what objects the GC looks
1464 # effect on when GCs are triggered, only on what objects the GC looks
1467 # into. This means that O(number of files) GCs are unavoidable.
1465 # into. This means that O(number of files) GCs are unavoidable.
1468 # Depending on when in the process's lifetime the dirstate is parsed,
1466 # Depending on when in the process's lifetime the dirstate is parsed,
1469 # this can get very expensive. As a workaround, disable GC while
1467 # this can get very expensive. As a workaround, disable GC while
1470 # parsing the dirstate.
1468 # parsing the dirstate.
1471 #
1469 #
1472 # (we cannot decorate the function directly since it is in a C module)
1470 # (we cannot decorate the function directly since it is in a C module)
1473 if rustext is not None:
1471 if rustext is not None:
1474 parse_dirstate = rustext.dirstate.parse_dirstate
1472 parse_dirstate = rustext.dirstate.parse_dirstate
1475 else:
1473 else:
1476 parse_dirstate = parsers.parse_dirstate
1474 parse_dirstate = parsers.parse_dirstate
1477
1475
1478 parse_dirstate = util.nogc(parse_dirstate)
1476 parse_dirstate = util.nogc(parse_dirstate)
1479 p = parse_dirstate(self._map, self.copymap, st)
1477 p = parse_dirstate(self._map, self.copymap, st)
1480 if not self._dirtyparents:
1478 if not self._dirtyparents:
1481 self.setparents(*p)
1479 self.setparents(*p)
1482
1480
1483 # Avoid excess attribute lookups by fast pathing certain checks
1481 # Avoid excess attribute lookups by fast pathing certain checks
1484 self.__contains__ = self._map.__contains__
1482 self.__contains__ = self._map.__contains__
1485 self.__getitem__ = self._map.__getitem__
1483 self.__getitem__ = self._map.__getitem__
1486 self.get = self._map.get
1484 self.get = self._map.get
1487
1485
1488 def write(self, st, now):
1486 def write(self, st, now):
1489 if rustext is not None:
1487 if rustext is not None:
1490 pack_dirstate = rustext.dirstate.pack_dirstate
1488 pack_dirstate = rustext.dirstate.pack_dirstate
1491 else:
1489 else:
1492 pack_dirstate = parsers.pack_dirstate
1490 pack_dirstate = parsers.pack_dirstate
1493
1491
1494 st.write(pack_dirstate(self._map, self.copymap,
1492 st.write(pack_dirstate(self._map, self.copymap,
1495 self.parents(), now))
1493 self.parents(), now))
1496 st.close()
1494 st.close()
1497 self._dirtyparents = False
1495 self._dirtyparents = False
1498 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1496 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1499
1497
1500 @propertycache
1498 @propertycache
1501 def nonnormalset(self):
1499 def nonnormalset(self):
1502 nonnorm, otherparents = self.nonnormalentries()
1500 nonnorm, otherparents = self.nonnormalentries()
1503 self.otherparentset = otherparents
1501 self.otherparentset = otherparents
1504 return nonnorm
1502 return nonnorm
1505
1503
1506 @propertycache
1504 @propertycache
1507 def otherparentset(self):
1505 def otherparentset(self):
1508 nonnorm, otherparents = self.nonnormalentries()
1506 nonnorm, otherparents = self.nonnormalentries()
1509 self.nonnormalset = nonnorm
1507 self.nonnormalset = nonnorm
1510 return otherparents
1508 return otherparents
1511
1509
1512 @propertycache
1510 @propertycache
1513 def identity(self):
1511 def identity(self):
1514 self._map
1512 self._map
1515 return self.identity
1513 return self.identity
1516
1514
1517 @propertycache
1515 @propertycache
1518 def dirfoldmap(self):
1516 def dirfoldmap(self):
1519 f = {}
1517 f = {}
1520 normcase = util.normcase
1518 normcase = util.normcase
1521 for name in self._dirs:
1519 for name in self._dirs:
1522 f[normcase(name)] = name
1520 f[normcase(name)] = name
1523 return f
1521 return f
General Comments 0
You need to be logged in to leave comments. Login now