##// END OF EJS Templates
dirstate: remove obsolete reference to dirstate.beginparentchange...
Martin von Zweigbergk -
r42117:42dd6998 default
parent child Browse files
Show More
@@ -1,1508 +1,1508 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd)[stat.ST_MTIME]
52 return os.fstat(tmpfd)[stat.ST_MTIME]
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83 self._mapcls = dirstatemap
83 self._mapcls = dirstatemap
84 # Access and cache cwd early, so we don't access it for the first time
84 # Access and cache cwd early, so we don't access it for the first time
85 # after a working-copy update caused it to not exist (accessing it then
85 # after a working-copy update caused it to not exist (accessing it then
86 # raises an exception).
86 # raises an exception).
87 self._cwd
87 self._cwd
88
88
89 @contextlib.contextmanager
89 @contextlib.contextmanager
90 def parentchange(self):
90 def parentchange(self):
91 '''Context manager for handling dirstate parents.
91 '''Context manager for handling dirstate parents.
92
92
93 If an exception occurs in the scope of the context manager,
93 If an exception occurs in the scope of the context manager,
94 the incoherent dirstate won't be written when wlock is
94 the incoherent dirstate won't be written when wlock is
95 released.
95 released.
96 '''
96 '''
97 self._parentwriters += 1
97 self._parentwriters += 1
98 yield
98 yield
99 # Typically we want the "undo" step of a context manager in a
99 # Typically we want the "undo" step of a context manager in a
100 # finally block so it happens even when an exception
100 # finally block so it happens even when an exception
101 # occurs. In this case, however, we only want to decrement
101 # occurs. In this case, however, we only want to decrement
102 # parentwriters if the code in the with statement exits
102 # parentwriters if the code in the with statement exits
103 # normally, so we don't have a try/finally here on purpose.
103 # normally, so we don't have a try/finally here on purpose.
104 self._parentwriters -= 1
104 self._parentwriters -= 1
105
105
106 def pendingparentchange(self):
106 def pendingparentchange(self):
107 '''Returns true if the dirstate is in the middle of a set of changes
107 '''Returns true if the dirstate is in the middle of a set of changes
108 that modify the dirstate parent.
108 that modify the dirstate parent.
109 '''
109 '''
110 return self._parentwriters > 0
110 return self._parentwriters > 0
111
111
112 @propertycache
112 @propertycache
113 def _map(self):
113 def _map(self):
114 """Return the dirstate contents (see documentation for dirstatemap)."""
114 """Return the dirstate contents (see documentation for dirstatemap)."""
115 self._map = self._mapcls(self._ui, self._opener, self._root)
115 self._map = self._mapcls(self._ui, self._opener, self._root)
116 return self._map
116 return self._map
117
117
118 @property
118 @property
119 def _sparsematcher(self):
119 def _sparsematcher(self):
120 """The matcher for the sparse checkout.
120 """The matcher for the sparse checkout.
121
121
122 The working directory may not include every file from a manifest. The
122 The working directory may not include every file from a manifest. The
123 matcher obtained by this property will match a path if it is to be
123 matcher obtained by this property will match a path if it is to be
124 included in the working directory.
124 included in the working directory.
125 """
125 """
126 # TODO there is potential to cache this property. For now, the matcher
126 # TODO there is potential to cache this property. For now, the matcher
127 # is resolved on every access. (But the called function does use a
127 # is resolved on every access. (But the called function does use a
128 # cache to keep the lookup fast.)
128 # cache to keep the lookup fast.)
129 return self._sparsematchfn()
129 return self._sparsematchfn()
130
130
131 @repocache('branch')
131 @repocache('branch')
132 def _branch(self):
132 def _branch(self):
133 try:
133 try:
134 return self._opener.read("branch").strip() or "default"
134 return self._opener.read("branch").strip() or "default"
135 except IOError as inst:
135 except IOError as inst:
136 if inst.errno != errno.ENOENT:
136 if inst.errno != errno.ENOENT:
137 raise
137 raise
138 return "default"
138 return "default"
139
139
140 @property
140 @property
141 def _pl(self):
141 def _pl(self):
142 return self._map.parents()
142 return self._map.parents()
143
143
144 def hasdir(self, d):
144 def hasdir(self, d):
145 return self._map.hastrackeddir(d)
145 return self._map.hastrackeddir(d)
146
146
147 @rootcache('.hgignore')
147 @rootcache('.hgignore')
148 def _ignore(self):
148 def _ignore(self):
149 files = self._ignorefiles()
149 files = self._ignorefiles()
150 if not files:
150 if not files:
151 return matchmod.never()
151 return matchmod.never()
152
152
153 pats = ['include:%s' % f for f in files]
153 pats = ['include:%s' % f for f in files]
154 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
154 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
155
155
156 @propertycache
156 @propertycache
157 def _slash(self):
157 def _slash(self):
158 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
158 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
159
159
160 @propertycache
160 @propertycache
161 def _checklink(self):
161 def _checklink(self):
162 return util.checklink(self._root)
162 return util.checklink(self._root)
163
163
164 @propertycache
164 @propertycache
165 def _checkexec(self):
165 def _checkexec(self):
166 return util.checkexec(self._root)
166 return util.checkexec(self._root)
167
167
168 @propertycache
168 @propertycache
169 def _checkcase(self):
169 def _checkcase(self):
170 return not util.fscasesensitive(self._join('.hg'))
170 return not util.fscasesensitive(self._join('.hg'))
171
171
172 def _join(self, f):
172 def _join(self, f):
173 # much faster than os.path.join()
173 # much faster than os.path.join()
174 # it's safe because f is always a relative path
174 # it's safe because f is always a relative path
175 return self._rootdir + f
175 return self._rootdir + f
176
176
177 def flagfunc(self, buildfallback):
177 def flagfunc(self, buildfallback):
178 if self._checklink and self._checkexec:
178 if self._checklink and self._checkexec:
179 def f(x):
179 def f(x):
180 try:
180 try:
181 st = os.lstat(self._join(x))
181 st = os.lstat(self._join(x))
182 if util.statislink(st):
182 if util.statislink(st):
183 return 'l'
183 return 'l'
184 if util.statisexec(st):
184 if util.statisexec(st):
185 return 'x'
185 return 'x'
186 except OSError:
186 except OSError:
187 pass
187 pass
188 return ''
188 return ''
189 return f
189 return f
190
190
191 fallback = buildfallback()
191 fallback = buildfallback()
192 if self._checklink:
192 if self._checklink:
193 def f(x):
193 def f(x):
194 if os.path.islink(self._join(x)):
194 if os.path.islink(self._join(x)):
195 return 'l'
195 return 'l'
196 if 'x' in fallback(x):
196 if 'x' in fallback(x):
197 return 'x'
197 return 'x'
198 return ''
198 return ''
199 return f
199 return f
200 if self._checkexec:
200 if self._checkexec:
201 def f(x):
201 def f(x):
202 if 'l' in fallback(x):
202 if 'l' in fallback(x):
203 return 'l'
203 return 'l'
204 if util.isexec(self._join(x)):
204 if util.isexec(self._join(x)):
205 return 'x'
205 return 'x'
206 return ''
206 return ''
207 return f
207 return f
208 else:
208 else:
209 return fallback
209 return fallback
210
210
211 @propertycache
211 @propertycache
212 def _cwd(self):
212 def _cwd(self):
213 # internal config: ui.forcecwd
213 # internal config: ui.forcecwd
214 forcecwd = self._ui.config('ui', 'forcecwd')
214 forcecwd = self._ui.config('ui', 'forcecwd')
215 if forcecwd:
215 if forcecwd:
216 return forcecwd
216 return forcecwd
217 return encoding.getcwd()
217 return encoding.getcwd()
218
218
219 def getcwd(self):
219 def getcwd(self):
220 '''Return the path from which a canonical path is calculated.
220 '''Return the path from which a canonical path is calculated.
221
221
222 This path should be used to resolve file patterns or to convert
222 This path should be used to resolve file patterns or to convert
223 canonical paths back to file paths for display. It shouldn't be
223 canonical paths back to file paths for display. It shouldn't be
224 used to get real file paths. Use vfs functions instead.
224 used to get real file paths. Use vfs functions instead.
225 '''
225 '''
226 cwd = self._cwd
226 cwd = self._cwd
227 if cwd == self._root:
227 if cwd == self._root:
228 return ''
228 return ''
229 # self._root ends with a path separator if self._root is '/' or 'C:\'
229 # self._root ends with a path separator if self._root is '/' or 'C:\'
230 rootsep = self._root
230 rootsep = self._root
231 if not util.endswithsep(rootsep):
231 if not util.endswithsep(rootsep):
232 rootsep += pycompat.ossep
232 rootsep += pycompat.ossep
233 if cwd.startswith(rootsep):
233 if cwd.startswith(rootsep):
234 return cwd[len(rootsep):]
234 return cwd[len(rootsep):]
235 else:
235 else:
236 # we're outside the repo. return an absolute path.
236 # we're outside the repo. return an absolute path.
237 return cwd
237 return cwd
238
238
239 def pathto(self, f, cwd=None):
239 def pathto(self, f, cwd=None):
240 if cwd is None:
240 if cwd is None:
241 cwd = self.getcwd()
241 cwd = self.getcwd()
242 path = util.pathto(self._root, cwd, f)
242 path = util.pathto(self._root, cwd, f)
243 if self._slash:
243 if self._slash:
244 return util.pconvert(path)
244 return util.pconvert(path)
245 return path
245 return path
246
246
247 def __getitem__(self, key):
247 def __getitem__(self, key):
248 '''Return the current state of key (a filename) in the dirstate.
248 '''Return the current state of key (a filename) in the dirstate.
249
249
250 States are:
250 States are:
251 n normal
251 n normal
252 m needs merging
252 m needs merging
253 r marked for removal
253 r marked for removal
254 a marked for addition
254 a marked for addition
255 ? not tracked
255 ? not tracked
256 '''
256 '''
257 return self._map.get(key, ("?",))[0]
257 return self._map.get(key, ("?",))[0]
258
258
259 def __contains__(self, key):
259 def __contains__(self, key):
260 return key in self._map
260 return key in self._map
261
261
262 def __iter__(self):
262 def __iter__(self):
263 return iter(sorted(self._map))
263 return iter(sorted(self._map))
264
264
265 def items(self):
265 def items(self):
266 return self._map.iteritems()
266 return self._map.iteritems()
267
267
268 iteritems = items
268 iteritems = items
269
269
270 def parents(self):
270 def parents(self):
271 return [self._validate(p) for p in self._pl]
271 return [self._validate(p) for p in self._pl]
272
272
273 def p1(self):
273 def p1(self):
274 return self._validate(self._pl[0])
274 return self._validate(self._pl[0])
275
275
276 def p2(self):
276 def p2(self):
277 return self._validate(self._pl[1])
277 return self._validate(self._pl[1])
278
278
279 def branch(self):
279 def branch(self):
280 return encoding.tolocal(self._branch)
280 return encoding.tolocal(self._branch)
281
281
282 def setparents(self, p1, p2=nullid):
282 def setparents(self, p1, p2=nullid):
283 """Set dirstate parents to p1 and p2.
283 """Set dirstate parents to p1 and p2.
284
284
285 When moving from two parents to one, 'm' merged entries a
285 When moving from two parents to one, 'm' merged entries a
286 adjusted to normal and previous copy records discarded and
286 adjusted to normal and previous copy records discarded and
287 returned by the call.
287 returned by the call.
288
288
289 See localrepo.setparents()
289 See localrepo.setparents()
290 """
290 """
291 if self._parentwriters == 0:
291 if self._parentwriters == 0:
292 raise ValueError("cannot set dirstate parent without "
292 raise ValueError("cannot set dirstate parent outside of "
293 "calling dirstate.beginparentchange")
293 "dirstate.parentchange context manager")
294
294
295 self._dirty = True
295 self._dirty = True
296 oldp2 = self._pl[1]
296 oldp2 = self._pl[1]
297 if self._origpl is None:
297 if self._origpl is None:
298 self._origpl = self._pl
298 self._origpl = self._pl
299 self._map.setparents(p1, p2)
299 self._map.setparents(p1, p2)
300 copies = {}
300 copies = {}
301 if oldp2 != nullid and p2 == nullid:
301 if oldp2 != nullid and p2 == nullid:
302 candidatefiles = self._map.nonnormalset.union(
302 candidatefiles = self._map.nonnormalset.union(
303 self._map.otherparentset)
303 self._map.otherparentset)
304 for f in candidatefiles:
304 for f in candidatefiles:
305 s = self._map.get(f)
305 s = self._map.get(f)
306 if s is None:
306 if s is None:
307 continue
307 continue
308
308
309 # Discard 'm' markers when moving away from a merge state
309 # Discard 'm' markers when moving away from a merge state
310 if s[0] == 'm':
310 if s[0] == 'm':
311 source = self._map.copymap.get(f)
311 source = self._map.copymap.get(f)
312 if source:
312 if source:
313 copies[f] = source
313 copies[f] = source
314 self.normallookup(f)
314 self.normallookup(f)
315 # Also fix up otherparent markers
315 # Also fix up otherparent markers
316 elif s[0] == 'n' and s[2] == -2:
316 elif s[0] == 'n' and s[2] == -2:
317 source = self._map.copymap.get(f)
317 source = self._map.copymap.get(f)
318 if source:
318 if source:
319 copies[f] = source
319 copies[f] = source
320 self.add(f)
320 self.add(f)
321 return copies
321 return copies
322
322
323 def setbranch(self, branch):
323 def setbranch(self, branch):
324 self.__class__._branch.set(self, encoding.fromlocal(branch))
324 self.__class__._branch.set(self, encoding.fromlocal(branch))
325 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
325 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
326 try:
326 try:
327 f.write(self._branch + '\n')
327 f.write(self._branch + '\n')
328 f.close()
328 f.close()
329
329
330 # make sure filecache has the correct stat info for _branch after
330 # make sure filecache has the correct stat info for _branch after
331 # replacing the underlying file
331 # replacing the underlying file
332 ce = self._filecache['_branch']
332 ce = self._filecache['_branch']
333 if ce:
333 if ce:
334 ce.refresh()
334 ce.refresh()
335 except: # re-raises
335 except: # re-raises
336 f.discard()
336 f.discard()
337 raise
337 raise
338
338
339 def invalidate(self):
339 def invalidate(self):
340 '''Causes the next access to reread the dirstate.
340 '''Causes the next access to reread the dirstate.
341
341
342 This is different from localrepo.invalidatedirstate() because it always
342 This is different from localrepo.invalidatedirstate() because it always
343 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
343 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
344 check whether the dirstate has changed before rereading it.'''
344 check whether the dirstate has changed before rereading it.'''
345
345
346 for a in (r"_map", r"_branch", r"_ignore"):
346 for a in (r"_map", r"_branch", r"_ignore"):
347 if a in self.__dict__:
347 if a in self.__dict__:
348 delattr(self, a)
348 delattr(self, a)
349 self._lastnormaltime = 0
349 self._lastnormaltime = 0
350 self._dirty = False
350 self._dirty = False
351 self._updatedfiles.clear()
351 self._updatedfiles.clear()
352 self._parentwriters = 0
352 self._parentwriters = 0
353 self._origpl = None
353 self._origpl = None
354
354
355 def copy(self, source, dest):
355 def copy(self, source, dest):
356 """Mark dest as a copy of source. Unmark dest if source is None."""
356 """Mark dest as a copy of source. Unmark dest if source is None."""
357 if source == dest:
357 if source == dest:
358 return
358 return
359 self._dirty = True
359 self._dirty = True
360 if source is not None:
360 if source is not None:
361 self._map.copymap[dest] = source
361 self._map.copymap[dest] = source
362 self._updatedfiles.add(source)
362 self._updatedfiles.add(source)
363 self._updatedfiles.add(dest)
363 self._updatedfiles.add(dest)
364 elif self._map.copymap.pop(dest, None):
364 elif self._map.copymap.pop(dest, None):
365 self._updatedfiles.add(dest)
365 self._updatedfiles.add(dest)
366
366
367 def copied(self, file):
367 def copied(self, file):
368 return self._map.copymap.get(file, None)
368 return self._map.copymap.get(file, None)
369
369
370 def copies(self):
370 def copies(self):
371 return self._map.copymap
371 return self._map.copymap
372
372
373 def _addpath(self, f, state, mode, size, mtime):
373 def _addpath(self, f, state, mode, size, mtime):
374 oldstate = self[f]
374 oldstate = self[f]
375 if state == 'a' or oldstate == 'r':
375 if state == 'a' or oldstate == 'r':
376 scmutil.checkfilename(f)
376 scmutil.checkfilename(f)
377 if self._map.hastrackeddir(f):
377 if self._map.hastrackeddir(f):
378 raise error.Abort(_('directory %r already in dirstate') %
378 raise error.Abort(_('directory %r already in dirstate') %
379 pycompat.bytestr(f))
379 pycompat.bytestr(f))
380 # shadows
380 # shadows
381 for d in util.finddirs(f):
381 for d in util.finddirs(f):
382 if self._map.hastrackeddir(d):
382 if self._map.hastrackeddir(d):
383 break
383 break
384 entry = self._map.get(d)
384 entry = self._map.get(d)
385 if entry is not None and entry[0] != 'r':
385 if entry is not None and entry[0] != 'r':
386 raise error.Abort(
386 raise error.Abort(
387 _('file %r in dirstate clashes with %r') %
387 _('file %r in dirstate clashes with %r') %
388 (pycompat.bytestr(d), pycompat.bytestr(f)))
388 (pycompat.bytestr(d), pycompat.bytestr(f)))
389 self._dirty = True
389 self._dirty = True
390 self._updatedfiles.add(f)
390 self._updatedfiles.add(f)
391 self._map.addfile(f, oldstate, state, mode, size, mtime)
391 self._map.addfile(f, oldstate, state, mode, size, mtime)
392
392
393 def normal(self, f):
393 def normal(self, f):
394 '''Mark a file normal and clean.'''
394 '''Mark a file normal and clean.'''
395 s = os.lstat(self._join(f))
395 s = os.lstat(self._join(f))
396 mtime = s[stat.ST_MTIME]
396 mtime = s[stat.ST_MTIME]
397 self._addpath(f, 'n', s.st_mode,
397 self._addpath(f, 'n', s.st_mode,
398 s.st_size & _rangemask, mtime & _rangemask)
398 s.st_size & _rangemask, mtime & _rangemask)
399 self._map.copymap.pop(f, None)
399 self._map.copymap.pop(f, None)
400 if f in self._map.nonnormalset:
400 if f in self._map.nonnormalset:
401 self._map.nonnormalset.remove(f)
401 self._map.nonnormalset.remove(f)
402 if mtime > self._lastnormaltime:
402 if mtime > self._lastnormaltime:
403 # Remember the most recent modification timeslot for status(),
403 # Remember the most recent modification timeslot for status(),
404 # to make sure we won't miss future size-preserving file content
404 # to make sure we won't miss future size-preserving file content
405 # modifications that happen within the same timeslot.
405 # modifications that happen within the same timeslot.
406 self._lastnormaltime = mtime
406 self._lastnormaltime = mtime
407
407
408 def normallookup(self, f):
408 def normallookup(self, f):
409 '''Mark a file normal, but possibly dirty.'''
409 '''Mark a file normal, but possibly dirty.'''
410 if self._pl[1] != nullid:
410 if self._pl[1] != nullid:
411 # if there is a merge going on and the file was either
411 # if there is a merge going on and the file was either
412 # in state 'm' (-1) or coming from other parent (-2) before
412 # in state 'm' (-1) or coming from other parent (-2) before
413 # being removed, restore that state.
413 # being removed, restore that state.
414 entry = self._map.get(f)
414 entry = self._map.get(f)
415 if entry is not None:
415 if entry is not None:
416 if entry[0] == 'r' and entry[2] in (-1, -2):
416 if entry[0] == 'r' and entry[2] in (-1, -2):
417 source = self._map.copymap.get(f)
417 source = self._map.copymap.get(f)
418 if entry[2] == -1:
418 if entry[2] == -1:
419 self.merge(f)
419 self.merge(f)
420 elif entry[2] == -2:
420 elif entry[2] == -2:
421 self.otherparent(f)
421 self.otherparent(f)
422 if source:
422 if source:
423 self.copy(source, f)
423 self.copy(source, f)
424 return
424 return
425 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
425 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
426 return
426 return
427 self._addpath(f, 'n', 0, -1, -1)
427 self._addpath(f, 'n', 0, -1, -1)
428 self._map.copymap.pop(f, None)
428 self._map.copymap.pop(f, None)
429
429
430 def otherparent(self, f):
430 def otherparent(self, f):
431 '''Mark as coming from the other parent, always dirty.'''
431 '''Mark as coming from the other parent, always dirty.'''
432 if self._pl[1] == nullid:
432 if self._pl[1] == nullid:
433 raise error.Abort(_("setting %r to other parent "
433 raise error.Abort(_("setting %r to other parent "
434 "only allowed in merges") % f)
434 "only allowed in merges") % f)
435 if f in self and self[f] == 'n':
435 if f in self and self[f] == 'n':
436 # merge-like
436 # merge-like
437 self._addpath(f, 'm', 0, -2, -1)
437 self._addpath(f, 'm', 0, -2, -1)
438 else:
438 else:
439 # add-like
439 # add-like
440 self._addpath(f, 'n', 0, -2, -1)
440 self._addpath(f, 'n', 0, -2, -1)
441 self._map.copymap.pop(f, None)
441 self._map.copymap.pop(f, None)
442
442
443 def add(self, f):
443 def add(self, f):
444 '''Mark a file added.'''
444 '''Mark a file added.'''
445 self._addpath(f, 'a', 0, -1, -1)
445 self._addpath(f, 'a', 0, -1, -1)
446 self._map.copymap.pop(f, None)
446 self._map.copymap.pop(f, None)
447
447
448 def remove(self, f):
448 def remove(self, f):
449 '''Mark a file removed.'''
449 '''Mark a file removed.'''
450 self._dirty = True
450 self._dirty = True
451 oldstate = self[f]
451 oldstate = self[f]
452 size = 0
452 size = 0
453 if self._pl[1] != nullid:
453 if self._pl[1] != nullid:
454 entry = self._map.get(f)
454 entry = self._map.get(f)
455 if entry is not None:
455 if entry is not None:
456 # backup the previous state
456 # backup the previous state
457 if entry[0] == 'm': # merge
457 if entry[0] == 'm': # merge
458 size = -1
458 size = -1
459 elif entry[0] == 'n' and entry[2] == -2: # other parent
459 elif entry[0] == 'n' and entry[2] == -2: # other parent
460 size = -2
460 size = -2
461 self._map.otherparentset.add(f)
461 self._map.otherparentset.add(f)
462 self._updatedfiles.add(f)
462 self._updatedfiles.add(f)
463 self._map.removefile(f, oldstate, size)
463 self._map.removefile(f, oldstate, size)
464 if size == 0:
464 if size == 0:
465 self._map.copymap.pop(f, None)
465 self._map.copymap.pop(f, None)
466
466
467 def merge(self, f):
467 def merge(self, f):
468 '''Mark a file merged.'''
468 '''Mark a file merged.'''
469 if self._pl[1] == nullid:
469 if self._pl[1] == nullid:
470 return self.normallookup(f)
470 return self.normallookup(f)
471 return self.otherparent(f)
471 return self.otherparent(f)
472
472
473 def drop(self, f):
473 def drop(self, f):
474 '''Drop a file from the dirstate'''
474 '''Drop a file from the dirstate'''
475 oldstate = self[f]
475 oldstate = self[f]
476 if self._map.dropfile(f, oldstate):
476 if self._map.dropfile(f, oldstate):
477 self._dirty = True
477 self._dirty = True
478 self._updatedfiles.add(f)
478 self._updatedfiles.add(f)
479 self._map.copymap.pop(f, None)
479 self._map.copymap.pop(f, None)
480
480
481 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
481 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
482 if exists is None:
482 if exists is None:
483 exists = os.path.lexists(os.path.join(self._root, path))
483 exists = os.path.lexists(os.path.join(self._root, path))
484 if not exists:
484 if not exists:
485 # Maybe a path component exists
485 # Maybe a path component exists
486 if not ignoremissing and '/' in path:
486 if not ignoremissing and '/' in path:
487 d, f = path.rsplit('/', 1)
487 d, f = path.rsplit('/', 1)
488 d = self._normalize(d, False, ignoremissing, None)
488 d = self._normalize(d, False, ignoremissing, None)
489 folded = d + "/" + f
489 folded = d + "/" + f
490 else:
490 else:
491 # No path components, preserve original case
491 # No path components, preserve original case
492 folded = path
492 folded = path
493 else:
493 else:
494 # recursively normalize leading directory components
494 # recursively normalize leading directory components
495 # against dirstate
495 # against dirstate
496 if '/' in normed:
496 if '/' in normed:
497 d, f = normed.rsplit('/', 1)
497 d, f = normed.rsplit('/', 1)
498 d = self._normalize(d, False, ignoremissing, True)
498 d = self._normalize(d, False, ignoremissing, True)
499 r = self._root + "/" + d
499 r = self._root + "/" + d
500 folded = d + "/" + util.fspath(f, r)
500 folded = d + "/" + util.fspath(f, r)
501 else:
501 else:
502 folded = util.fspath(normed, self._root)
502 folded = util.fspath(normed, self._root)
503 storemap[normed] = folded
503 storemap[normed] = folded
504
504
505 return folded
505 return folded
506
506
507 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
507 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
508 normed = util.normcase(path)
508 normed = util.normcase(path)
509 folded = self._map.filefoldmap.get(normed, None)
509 folded = self._map.filefoldmap.get(normed, None)
510 if folded is None:
510 if folded is None:
511 if isknown:
511 if isknown:
512 folded = path
512 folded = path
513 else:
513 else:
514 folded = self._discoverpath(path, normed, ignoremissing, exists,
514 folded = self._discoverpath(path, normed, ignoremissing, exists,
515 self._map.filefoldmap)
515 self._map.filefoldmap)
516 return folded
516 return folded
517
517
518 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
518 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
519 normed = util.normcase(path)
519 normed = util.normcase(path)
520 folded = self._map.filefoldmap.get(normed, None)
520 folded = self._map.filefoldmap.get(normed, None)
521 if folded is None:
521 if folded is None:
522 folded = self._map.dirfoldmap.get(normed, None)
522 folded = self._map.dirfoldmap.get(normed, None)
523 if folded is None:
523 if folded is None:
524 if isknown:
524 if isknown:
525 folded = path
525 folded = path
526 else:
526 else:
527 # store discovered result in dirfoldmap so that future
527 # store discovered result in dirfoldmap so that future
528 # normalizefile calls don't start matching directories
528 # normalizefile calls don't start matching directories
529 folded = self._discoverpath(path, normed, ignoremissing, exists,
529 folded = self._discoverpath(path, normed, ignoremissing, exists,
530 self._map.dirfoldmap)
530 self._map.dirfoldmap)
531 return folded
531 return folded
532
532
533 def normalize(self, path, isknown=False, ignoremissing=False):
533 def normalize(self, path, isknown=False, ignoremissing=False):
534 '''
534 '''
535 normalize the case of a pathname when on a casefolding filesystem
535 normalize the case of a pathname when on a casefolding filesystem
536
536
537 isknown specifies whether the filename came from walking the
537 isknown specifies whether the filename came from walking the
538 disk, to avoid extra filesystem access.
538 disk, to avoid extra filesystem access.
539
539
540 If ignoremissing is True, missing path are returned
540 If ignoremissing is True, missing path are returned
541 unchanged. Otherwise, we try harder to normalize possibly
541 unchanged. Otherwise, we try harder to normalize possibly
542 existing path components.
542 existing path components.
543
543
544 The normalized case is determined based on the following precedence:
544 The normalized case is determined based on the following precedence:
545
545
546 - version of name already stored in the dirstate
546 - version of name already stored in the dirstate
547 - version of name stored on disk
547 - version of name stored on disk
548 - version provided via command arguments
548 - version provided via command arguments
549 '''
549 '''
550
550
551 if self._checkcase:
551 if self._checkcase:
552 return self._normalize(path, isknown, ignoremissing)
552 return self._normalize(path, isknown, ignoremissing)
553 return path
553 return path
554
554
555 def clear(self):
555 def clear(self):
556 self._map.clear()
556 self._map.clear()
557 self._lastnormaltime = 0
557 self._lastnormaltime = 0
558 self._updatedfiles.clear()
558 self._updatedfiles.clear()
559 self._dirty = True
559 self._dirty = True
560
560
561 def rebuild(self, parent, allfiles, changedfiles=None):
561 def rebuild(self, parent, allfiles, changedfiles=None):
562 if changedfiles is None:
562 if changedfiles is None:
563 # Rebuild entire dirstate
563 # Rebuild entire dirstate
564 changedfiles = allfiles
564 changedfiles = allfiles
565 lastnormaltime = self._lastnormaltime
565 lastnormaltime = self._lastnormaltime
566 self.clear()
566 self.clear()
567 self._lastnormaltime = lastnormaltime
567 self._lastnormaltime = lastnormaltime
568
568
569 if self._origpl is None:
569 if self._origpl is None:
570 self._origpl = self._pl
570 self._origpl = self._pl
571 self._map.setparents(parent, nullid)
571 self._map.setparents(parent, nullid)
572 for f in changedfiles:
572 for f in changedfiles:
573 if f in allfiles:
573 if f in allfiles:
574 self.normallookup(f)
574 self.normallookup(f)
575 else:
575 else:
576 self.drop(f)
576 self.drop(f)
577
577
578 self._dirty = True
578 self._dirty = True
579
579
580 def identity(self):
580 def identity(self):
581 '''Return identity of dirstate itself to detect changing in storage
581 '''Return identity of dirstate itself to detect changing in storage
582
582
583 If identity of previous dirstate is equal to this, writing
583 If identity of previous dirstate is equal to this, writing
584 changes based on the former dirstate out can keep consistency.
584 changes based on the former dirstate out can keep consistency.
585 '''
585 '''
586 return self._map.identity
586 return self._map.identity
587
587
588 def write(self, tr):
588 def write(self, tr):
589 if not self._dirty:
589 if not self._dirty:
590 return
590 return
591
591
592 filename = self._filename
592 filename = self._filename
593 if tr:
593 if tr:
594 # 'dirstate.write()' is not only for writing in-memory
594 # 'dirstate.write()' is not only for writing in-memory
595 # changes out, but also for dropping ambiguous timestamp.
595 # changes out, but also for dropping ambiguous timestamp.
596 # delayed writing re-raise "ambiguous timestamp issue".
596 # delayed writing re-raise "ambiguous timestamp issue".
597 # See also the wiki page below for detail:
597 # See also the wiki page below for detail:
598 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
598 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
599
599
600 # emulate dropping timestamp in 'parsers.pack_dirstate'
600 # emulate dropping timestamp in 'parsers.pack_dirstate'
601 now = _getfsnow(self._opener)
601 now = _getfsnow(self._opener)
602 self._map.clearambiguoustimes(self._updatedfiles, now)
602 self._map.clearambiguoustimes(self._updatedfiles, now)
603
603
604 # emulate that all 'dirstate.normal' results are written out
604 # emulate that all 'dirstate.normal' results are written out
605 self._lastnormaltime = 0
605 self._lastnormaltime = 0
606 self._updatedfiles.clear()
606 self._updatedfiles.clear()
607
607
608 # delay writing in-memory changes out
608 # delay writing in-memory changes out
609 tr.addfilegenerator('dirstate', (self._filename,),
609 tr.addfilegenerator('dirstate', (self._filename,),
610 self._writedirstate, location='plain')
610 self._writedirstate, location='plain')
611 return
611 return
612
612
613 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
613 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
614 self._writedirstate(st)
614 self._writedirstate(st)
615
615
616 def addparentchangecallback(self, category, callback):
616 def addparentchangecallback(self, category, callback):
617 """add a callback to be called when the wd parents are changed
617 """add a callback to be called when the wd parents are changed
618
618
619 Callback will be called with the following arguments:
619 Callback will be called with the following arguments:
620 dirstate, (oldp1, oldp2), (newp1, newp2)
620 dirstate, (oldp1, oldp2), (newp1, newp2)
621
621
622 Category is a unique identifier to allow overwriting an old callback
622 Category is a unique identifier to allow overwriting an old callback
623 with a newer callback.
623 with a newer callback.
624 """
624 """
625 self._plchangecallbacks[category] = callback
625 self._plchangecallbacks[category] = callback
626
626
627 def _writedirstate(self, st):
627 def _writedirstate(self, st):
628 # notify callbacks about parents change
628 # notify callbacks about parents change
629 if self._origpl is not None and self._origpl != self._pl:
629 if self._origpl is not None and self._origpl != self._pl:
630 for c, callback in sorted(self._plchangecallbacks.iteritems()):
630 for c, callback in sorted(self._plchangecallbacks.iteritems()):
631 callback(self, self._origpl, self._pl)
631 callback(self, self._origpl, self._pl)
632 self._origpl = None
632 self._origpl = None
633 # use the modification time of the newly created temporary file as the
633 # use the modification time of the newly created temporary file as the
634 # filesystem's notion of 'now'
634 # filesystem's notion of 'now'
635 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
635 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
636
636
637 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
637 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
638 # timestamp of each entries in dirstate, because of 'now > mtime'
638 # timestamp of each entries in dirstate, because of 'now > mtime'
639 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
639 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
640 if delaywrite > 0:
640 if delaywrite > 0:
641 # do we have any files to delay for?
641 # do we have any files to delay for?
642 for f, e in self._map.iteritems():
642 for f, e in self._map.iteritems():
643 if e[0] == 'n' and e[3] == now:
643 if e[0] == 'n' and e[3] == now:
644 import time # to avoid useless import
644 import time # to avoid useless import
645 # rather than sleep n seconds, sleep until the next
645 # rather than sleep n seconds, sleep until the next
646 # multiple of n seconds
646 # multiple of n seconds
647 clock = time.time()
647 clock = time.time()
648 start = int(clock) - (int(clock) % delaywrite)
648 start = int(clock) - (int(clock) % delaywrite)
649 end = start + delaywrite
649 end = start + delaywrite
650 time.sleep(end - clock)
650 time.sleep(end - clock)
651 now = end # trust our estimate that the end is near now
651 now = end # trust our estimate that the end is near now
652 break
652 break
653
653
654 self._map.write(st, now)
654 self._map.write(st, now)
655 self._lastnormaltime = 0
655 self._lastnormaltime = 0
656 self._dirty = False
656 self._dirty = False
657
657
658 def _dirignore(self, f):
658 def _dirignore(self, f):
659 if f == '.':
659 if f == '.':
660 return False
660 return False
661 if self._ignore(f):
661 if self._ignore(f):
662 return True
662 return True
663 for p in util.finddirs(f):
663 for p in util.finddirs(f):
664 if self._ignore(p):
664 if self._ignore(p):
665 return True
665 return True
666 return False
666 return False
667
667
668 def _ignorefiles(self):
668 def _ignorefiles(self):
669 files = []
669 files = []
670 if os.path.exists(self._join('.hgignore')):
670 if os.path.exists(self._join('.hgignore')):
671 files.append(self._join('.hgignore'))
671 files.append(self._join('.hgignore'))
672 for name, path in self._ui.configitems("ui"):
672 for name, path in self._ui.configitems("ui"):
673 if name == 'ignore' or name.startswith('ignore.'):
673 if name == 'ignore' or name.startswith('ignore.'):
674 # we need to use os.path.join here rather than self._join
674 # we need to use os.path.join here rather than self._join
675 # because path is arbitrary and user-specified
675 # because path is arbitrary and user-specified
676 files.append(os.path.join(self._rootdir, util.expandpath(path)))
676 files.append(os.path.join(self._rootdir, util.expandpath(path)))
677 return files
677 return files
678
678
679 def _ignorefileandline(self, f):
679 def _ignorefileandline(self, f):
680 files = collections.deque(self._ignorefiles())
680 files = collections.deque(self._ignorefiles())
681 visited = set()
681 visited = set()
682 while files:
682 while files:
683 i = files.popleft()
683 i = files.popleft()
684 patterns = matchmod.readpatternfile(i, self._ui.warn,
684 patterns = matchmod.readpatternfile(i, self._ui.warn,
685 sourceinfo=True)
685 sourceinfo=True)
686 for pattern, lineno, line in patterns:
686 for pattern, lineno, line in patterns:
687 kind, p = matchmod._patsplit(pattern, 'glob')
687 kind, p = matchmod._patsplit(pattern, 'glob')
688 if kind == "subinclude":
688 if kind == "subinclude":
689 if p not in visited:
689 if p not in visited:
690 files.append(p)
690 files.append(p)
691 continue
691 continue
692 m = matchmod.match(self._root, '', [], [pattern],
692 m = matchmod.match(self._root, '', [], [pattern],
693 warn=self._ui.warn)
693 warn=self._ui.warn)
694 if m(f):
694 if m(f):
695 return (i, lineno, line)
695 return (i, lineno, line)
696 visited.add(i)
696 visited.add(i)
697 return (None, -1, "")
697 return (None, -1, "")
698
698
699 def _walkexplicit(self, match, subrepos):
699 def _walkexplicit(self, match, subrepos):
700 '''Get stat data about the files explicitly specified by match.
700 '''Get stat data about the files explicitly specified by match.
701
701
702 Return a triple (results, dirsfound, dirsnotfound).
702 Return a triple (results, dirsfound, dirsnotfound).
703 - results is a mapping from filename to stat result. It also contains
703 - results is a mapping from filename to stat result. It also contains
704 listings mapping subrepos and .hg to None.
704 listings mapping subrepos and .hg to None.
705 - dirsfound is a list of files found to be directories.
705 - dirsfound is a list of files found to be directories.
706 - dirsnotfound is a list of files that the dirstate thinks are
706 - dirsnotfound is a list of files that the dirstate thinks are
707 directories and that were not found.'''
707 directories and that were not found.'''
708
708
709 def badtype(mode):
709 def badtype(mode):
710 kind = _('unknown')
710 kind = _('unknown')
711 if stat.S_ISCHR(mode):
711 if stat.S_ISCHR(mode):
712 kind = _('character device')
712 kind = _('character device')
713 elif stat.S_ISBLK(mode):
713 elif stat.S_ISBLK(mode):
714 kind = _('block device')
714 kind = _('block device')
715 elif stat.S_ISFIFO(mode):
715 elif stat.S_ISFIFO(mode):
716 kind = _('fifo')
716 kind = _('fifo')
717 elif stat.S_ISSOCK(mode):
717 elif stat.S_ISSOCK(mode):
718 kind = _('socket')
718 kind = _('socket')
719 elif stat.S_ISDIR(mode):
719 elif stat.S_ISDIR(mode):
720 kind = _('directory')
720 kind = _('directory')
721 return _('unsupported file type (type is %s)') % kind
721 return _('unsupported file type (type is %s)') % kind
722
722
723 matchedir = match.explicitdir
723 matchedir = match.explicitdir
724 badfn = match.bad
724 badfn = match.bad
725 dmap = self._map
725 dmap = self._map
726 lstat = os.lstat
726 lstat = os.lstat
727 getkind = stat.S_IFMT
727 getkind = stat.S_IFMT
728 dirkind = stat.S_IFDIR
728 dirkind = stat.S_IFDIR
729 regkind = stat.S_IFREG
729 regkind = stat.S_IFREG
730 lnkkind = stat.S_IFLNK
730 lnkkind = stat.S_IFLNK
731 join = self._join
731 join = self._join
732 dirsfound = []
732 dirsfound = []
733 foundadd = dirsfound.append
733 foundadd = dirsfound.append
734 dirsnotfound = []
734 dirsnotfound = []
735 notfoundadd = dirsnotfound.append
735 notfoundadd = dirsnotfound.append
736
736
737 if not match.isexact() and self._checkcase:
737 if not match.isexact() and self._checkcase:
738 normalize = self._normalize
738 normalize = self._normalize
739 else:
739 else:
740 normalize = None
740 normalize = None
741
741
742 files = sorted(match.files())
742 files = sorted(match.files())
743 subrepos.sort()
743 subrepos.sort()
744 i, j = 0, 0
744 i, j = 0, 0
745 while i < len(files) and j < len(subrepos):
745 while i < len(files) and j < len(subrepos):
746 subpath = subrepos[j] + "/"
746 subpath = subrepos[j] + "/"
747 if files[i] < subpath:
747 if files[i] < subpath:
748 i += 1
748 i += 1
749 continue
749 continue
750 while i < len(files) and files[i].startswith(subpath):
750 while i < len(files) and files[i].startswith(subpath):
751 del files[i]
751 del files[i]
752 j += 1
752 j += 1
753
753
754 if not files or '.' in files:
754 if not files or '.' in files:
755 files = ['.']
755 files = ['.']
756 results = dict.fromkeys(subrepos)
756 results = dict.fromkeys(subrepos)
757 results['.hg'] = None
757 results['.hg'] = None
758
758
759 for ff in files:
759 for ff in files:
760 # constructing the foldmap is expensive, so don't do it for the
760 # constructing the foldmap is expensive, so don't do it for the
761 # common case where files is ['.']
761 # common case where files is ['.']
762 if normalize and ff != '.':
762 if normalize and ff != '.':
763 nf = normalize(ff, False, True)
763 nf = normalize(ff, False, True)
764 else:
764 else:
765 nf = ff
765 nf = ff
766 if nf in results:
766 if nf in results:
767 continue
767 continue
768
768
769 try:
769 try:
770 st = lstat(join(nf))
770 st = lstat(join(nf))
771 kind = getkind(st.st_mode)
771 kind = getkind(st.st_mode)
772 if kind == dirkind:
772 if kind == dirkind:
773 if nf in dmap:
773 if nf in dmap:
774 # file replaced by dir on disk but still in dirstate
774 # file replaced by dir on disk but still in dirstate
775 results[nf] = None
775 results[nf] = None
776 if matchedir:
776 if matchedir:
777 matchedir(nf)
777 matchedir(nf)
778 foundadd((nf, ff))
778 foundadd((nf, ff))
779 elif kind == regkind or kind == lnkkind:
779 elif kind == regkind or kind == lnkkind:
780 results[nf] = st
780 results[nf] = st
781 else:
781 else:
782 badfn(ff, badtype(kind))
782 badfn(ff, badtype(kind))
783 if nf in dmap:
783 if nf in dmap:
784 results[nf] = None
784 results[nf] = None
785 except OSError as inst: # nf not found on disk - it is dirstate only
785 except OSError as inst: # nf not found on disk - it is dirstate only
786 if nf in dmap: # does it exactly match a missing file?
786 if nf in dmap: # does it exactly match a missing file?
787 results[nf] = None
787 results[nf] = None
788 else: # does it match a missing directory?
788 else: # does it match a missing directory?
789 if self._map.hasdir(nf):
789 if self._map.hasdir(nf):
790 if matchedir:
790 if matchedir:
791 matchedir(nf)
791 matchedir(nf)
792 notfoundadd(nf)
792 notfoundadd(nf)
793 else:
793 else:
794 badfn(ff, encoding.strtolocal(inst.strerror))
794 badfn(ff, encoding.strtolocal(inst.strerror))
795
795
796 # match.files() may contain explicitly-specified paths that shouldn't
796 # match.files() may contain explicitly-specified paths that shouldn't
797 # be taken; drop them from the list of files found. dirsfound/notfound
797 # be taken; drop them from the list of files found. dirsfound/notfound
798 # aren't filtered here because they will be tested later.
798 # aren't filtered here because they will be tested later.
799 if match.anypats():
799 if match.anypats():
800 for f in list(results):
800 for f in list(results):
801 if f == '.hg' or f in subrepos:
801 if f == '.hg' or f in subrepos:
802 # keep sentinel to disable further out-of-repo walks
802 # keep sentinel to disable further out-of-repo walks
803 continue
803 continue
804 if not match(f):
804 if not match(f):
805 del results[f]
805 del results[f]
806
806
807 # Case insensitive filesystems cannot rely on lstat() failing to detect
807 # Case insensitive filesystems cannot rely on lstat() failing to detect
808 # a case-only rename. Prune the stat object for any file that does not
808 # a case-only rename. Prune the stat object for any file that does not
809 # match the case in the filesystem, if there are multiple files that
809 # match the case in the filesystem, if there are multiple files that
810 # normalize to the same path.
810 # normalize to the same path.
811 if match.isexact() and self._checkcase:
811 if match.isexact() and self._checkcase:
812 normed = {}
812 normed = {}
813
813
814 for f, st in results.iteritems():
814 for f, st in results.iteritems():
815 if st is None:
815 if st is None:
816 continue
816 continue
817
817
818 nc = util.normcase(f)
818 nc = util.normcase(f)
819 paths = normed.get(nc)
819 paths = normed.get(nc)
820
820
821 if paths is None:
821 if paths is None:
822 paths = set()
822 paths = set()
823 normed[nc] = paths
823 normed[nc] = paths
824
824
825 paths.add(f)
825 paths.add(f)
826
826
827 for norm, paths in normed.iteritems():
827 for norm, paths in normed.iteritems():
828 if len(paths) > 1:
828 if len(paths) > 1:
829 for path in paths:
829 for path in paths:
830 folded = self._discoverpath(path, norm, True, None,
830 folded = self._discoverpath(path, norm, True, None,
831 self._map.dirfoldmap)
831 self._map.dirfoldmap)
832 if path != folded:
832 if path != folded:
833 results[path] = None
833 results[path] = None
834
834
835 return results, dirsfound, dirsnotfound
835 return results, dirsfound, dirsnotfound
836
836
837 def walk(self, match, subrepos, unknown, ignored, full=True):
837 def walk(self, match, subrepos, unknown, ignored, full=True):
838 '''
838 '''
839 Walk recursively through the directory tree, finding all files
839 Walk recursively through the directory tree, finding all files
840 matched by match.
840 matched by match.
841
841
842 If full is False, maybe skip some known-clean files.
842 If full is False, maybe skip some known-clean files.
843
843
844 Return a dict mapping filename to stat-like object (either
844 Return a dict mapping filename to stat-like object (either
845 mercurial.osutil.stat instance or return value of os.stat()).
845 mercurial.osutil.stat instance or return value of os.stat()).
846
846
847 '''
847 '''
848 # full is a flag that extensions that hook into walk can use -- this
848 # full is a flag that extensions that hook into walk can use -- this
849 # implementation doesn't use it at all. This satisfies the contract
849 # implementation doesn't use it at all. This satisfies the contract
850 # because we only guarantee a "maybe".
850 # because we only guarantee a "maybe".
851
851
852 if ignored:
852 if ignored:
853 ignore = util.never
853 ignore = util.never
854 dirignore = util.never
854 dirignore = util.never
855 elif unknown:
855 elif unknown:
856 ignore = self._ignore
856 ignore = self._ignore
857 dirignore = self._dirignore
857 dirignore = self._dirignore
858 else:
858 else:
859 # if not unknown and not ignored, drop dir recursion and step 2
859 # if not unknown and not ignored, drop dir recursion and step 2
860 ignore = util.always
860 ignore = util.always
861 dirignore = util.always
861 dirignore = util.always
862
862
863 matchfn = match.matchfn
863 matchfn = match.matchfn
864 matchalways = match.always()
864 matchalways = match.always()
865 matchtdir = match.traversedir
865 matchtdir = match.traversedir
866 dmap = self._map
866 dmap = self._map
867 listdir = util.listdir
867 listdir = util.listdir
868 lstat = os.lstat
868 lstat = os.lstat
869 dirkind = stat.S_IFDIR
869 dirkind = stat.S_IFDIR
870 regkind = stat.S_IFREG
870 regkind = stat.S_IFREG
871 lnkkind = stat.S_IFLNK
871 lnkkind = stat.S_IFLNK
872 join = self._join
872 join = self._join
873
873
874 exact = skipstep3 = False
874 exact = skipstep3 = False
875 if match.isexact(): # match.exact
875 if match.isexact(): # match.exact
876 exact = True
876 exact = True
877 dirignore = util.always # skip step 2
877 dirignore = util.always # skip step 2
878 elif match.prefix(): # match.match, no patterns
878 elif match.prefix(): # match.match, no patterns
879 skipstep3 = True
879 skipstep3 = True
880
880
881 if not exact and self._checkcase:
881 if not exact and self._checkcase:
882 normalize = self._normalize
882 normalize = self._normalize
883 normalizefile = self._normalizefile
883 normalizefile = self._normalizefile
884 skipstep3 = False
884 skipstep3 = False
885 else:
885 else:
886 normalize = self._normalize
886 normalize = self._normalize
887 normalizefile = None
887 normalizefile = None
888
888
889 # step 1: find all explicit files
889 # step 1: find all explicit files
890 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
890 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
891
891
892 skipstep3 = skipstep3 and not (work or dirsnotfound)
892 skipstep3 = skipstep3 and not (work or dirsnotfound)
893 work = [d for d in work if not dirignore(d[0])]
893 work = [d for d in work if not dirignore(d[0])]
894
894
895 # step 2: visit subdirectories
895 # step 2: visit subdirectories
896 def traverse(work, alreadynormed):
896 def traverse(work, alreadynormed):
897 wadd = work.append
897 wadd = work.append
898 while work:
898 while work:
899 nd = work.pop()
899 nd = work.pop()
900 visitentries = match.visitchildrenset(nd)
900 visitentries = match.visitchildrenset(nd)
901 if not visitentries:
901 if not visitentries:
902 continue
902 continue
903 if visitentries == 'this' or visitentries == 'all':
903 if visitentries == 'this' or visitentries == 'all':
904 visitentries = None
904 visitentries = None
905 skip = None
905 skip = None
906 if nd == '.':
906 if nd == '.':
907 nd = ''
907 nd = ''
908 else:
908 else:
909 skip = '.hg'
909 skip = '.hg'
910 try:
910 try:
911 entries = listdir(join(nd), stat=True, skip=skip)
911 entries = listdir(join(nd), stat=True, skip=skip)
912 except OSError as inst:
912 except OSError as inst:
913 if inst.errno in (errno.EACCES, errno.ENOENT):
913 if inst.errno in (errno.EACCES, errno.ENOENT):
914 match.bad(self.pathto(nd),
914 match.bad(self.pathto(nd),
915 encoding.strtolocal(inst.strerror))
915 encoding.strtolocal(inst.strerror))
916 continue
916 continue
917 raise
917 raise
918 for f, kind, st in entries:
918 for f, kind, st in entries:
919 # Some matchers may return files in the visitentries set,
919 # Some matchers may return files in the visitentries set,
920 # instead of 'this', if the matcher explicitly mentions them
920 # instead of 'this', if the matcher explicitly mentions them
921 # and is not an exactmatcher. This is acceptable; we do not
921 # and is not an exactmatcher. This is acceptable; we do not
922 # make any hard assumptions about file-or-directory below
922 # make any hard assumptions about file-or-directory below
923 # based on the presence of `f` in visitentries. If
923 # based on the presence of `f` in visitentries. If
924 # visitchildrenset returned a set, we can always skip the
924 # visitchildrenset returned a set, we can always skip the
925 # entries *not* in the set it provided regardless of whether
925 # entries *not* in the set it provided regardless of whether
926 # they're actually a file or a directory.
926 # they're actually a file or a directory.
927 if visitentries and f not in visitentries:
927 if visitentries and f not in visitentries:
928 continue
928 continue
929 if normalizefile:
929 if normalizefile:
930 # even though f might be a directory, we're only
930 # even though f might be a directory, we're only
931 # interested in comparing it to files currently in the
931 # interested in comparing it to files currently in the
932 # dmap -- therefore normalizefile is enough
932 # dmap -- therefore normalizefile is enough
933 nf = normalizefile(nd and (nd + "/" + f) or f, True,
933 nf = normalizefile(nd and (nd + "/" + f) or f, True,
934 True)
934 True)
935 else:
935 else:
936 nf = nd and (nd + "/" + f) or f
936 nf = nd and (nd + "/" + f) or f
937 if nf not in results:
937 if nf not in results:
938 if kind == dirkind:
938 if kind == dirkind:
939 if not ignore(nf):
939 if not ignore(nf):
940 if matchtdir:
940 if matchtdir:
941 matchtdir(nf)
941 matchtdir(nf)
942 wadd(nf)
942 wadd(nf)
943 if nf in dmap and (matchalways or matchfn(nf)):
943 if nf in dmap and (matchalways or matchfn(nf)):
944 results[nf] = None
944 results[nf] = None
945 elif kind == regkind or kind == lnkkind:
945 elif kind == regkind or kind == lnkkind:
946 if nf in dmap:
946 if nf in dmap:
947 if matchalways or matchfn(nf):
947 if matchalways or matchfn(nf):
948 results[nf] = st
948 results[nf] = st
949 elif ((matchalways or matchfn(nf))
949 elif ((matchalways or matchfn(nf))
950 and not ignore(nf)):
950 and not ignore(nf)):
951 # unknown file -- normalize if necessary
951 # unknown file -- normalize if necessary
952 if not alreadynormed:
952 if not alreadynormed:
953 nf = normalize(nf, False, True)
953 nf = normalize(nf, False, True)
954 results[nf] = st
954 results[nf] = st
955 elif nf in dmap and (matchalways or matchfn(nf)):
955 elif nf in dmap and (matchalways or matchfn(nf)):
956 results[nf] = None
956 results[nf] = None
957
957
958 for nd, d in work:
958 for nd, d in work:
959 # alreadynormed means that processwork doesn't have to do any
959 # alreadynormed means that processwork doesn't have to do any
960 # expensive directory normalization
960 # expensive directory normalization
961 alreadynormed = not normalize or nd == d
961 alreadynormed = not normalize or nd == d
962 traverse([d], alreadynormed)
962 traverse([d], alreadynormed)
963
963
964 for s in subrepos:
964 for s in subrepos:
965 del results[s]
965 del results[s]
966 del results['.hg']
966 del results['.hg']
967
967
968 # step 3: visit remaining files from dmap
968 # step 3: visit remaining files from dmap
969 if not skipstep3 and not exact:
969 if not skipstep3 and not exact:
970 # If a dmap file is not in results yet, it was either
970 # If a dmap file is not in results yet, it was either
971 # a) not matching matchfn b) ignored, c) missing, or d) under a
971 # a) not matching matchfn b) ignored, c) missing, or d) under a
972 # symlink directory.
972 # symlink directory.
973 if not results and matchalways:
973 if not results and matchalways:
974 visit = [f for f in dmap]
974 visit = [f for f in dmap]
975 else:
975 else:
976 visit = [f for f in dmap if f not in results and matchfn(f)]
976 visit = [f for f in dmap if f not in results and matchfn(f)]
977 visit.sort()
977 visit.sort()
978
978
979 if unknown:
979 if unknown:
980 # unknown == True means we walked all dirs under the roots
980 # unknown == True means we walked all dirs under the roots
981 # that wasn't ignored, and everything that matched was stat'ed
981 # that wasn't ignored, and everything that matched was stat'ed
982 # and is already in results.
982 # and is already in results.
983 # The rest must thus be ignored or under a symlink.
983 # The rest must thus be ignored or under a symlink.
984 audit_path = pathutil.pathauditor(self._root, cached=True)
984 audit_path = pathutil.pathauditor(self._root, cached=True)
985
985
986 for nf in iter(visit):
986 for nf in iter(visit):
987 # If a stat for the same file was already added with a
987 # If a stat for the same file was already added with a
988 # different case, don't add one for this, since that would
988 # different case, don't add one for this, since that would
989 # make it appear as if the file exists under both names
989 # make it appear as if the file exists under both names
990 # on disk.
990 # on disk.
991 if (normalizefile and
991 if (normalizefile and
992 normalizefile(nf, True, True) in results):
992 normalizefile(nf, True, True) in results):
993 results[nf] = None
993 results[nf] = None
994 # Report ignored items in the dmap as long as they are not
994 # Report ignored items in the dmap as long as they are not
995 # under a symlink directory.
995 # under a symlink directory.
996 elif audit_path.check(nf):
996 elif audit_path.check(nf):
997 try:
997 try:
998 results[nf] = lstat(join(nf))
998 results[nf] = lstat(join(nf))
999 # file was just ignored, no links, and exists
999 # file was just ignored, no links, and exists
1000 except OSError:
1000 except OSError:
1001 # file doesn't exist
1001 # file doesn't exist
1002 results[nf] = None
1002 results[nf] = None
1003 else:
1003 else:
1004 # It's either missing or under a symlink directory
1004 # It's either missing or under a symlink directory
1005 # which we in this case report as missing
1005 # which we in this case report as missing
1006 results[nf] = None
1006 results[nf] = None
1007 else:
1007 else:
1008 # We may not have walked the full directory tree above,
1008 # We may not have walked the full directory tree above,
1009 # so stat and check everything we missed.
1009 # so stat and check everything we missed.
1010 iv = iter(visit)
1010 iv = iter(visit)
1011 for st in util.statfiles([join(i) for i in visit]):
1011 for st in util.statfiles([join(i) for i in visit]):
1012 results[next(iv)] = st
1012 results[next(iv)] = st
1013 return results
1013 return results
1014
1014
1015 def status(self, match, subrepos, ignored, clean, unknown):
1015 def status(self, match, subrepos, ignored, clean, unknown):
1016 '''Determine the status of the working copy relative to the
1016 '''Determine the status of the working copy relative to the
1017 dirstate and return a pair of (unsure, status), where status is of type
1017 dirstate and return a pair of (unsure, status), where status is of type
1018 scmutil.status and:
1018 scmutil.status and:
1019
1019
1020 unsure:
1020 unsure:
1021 files that might have been modified since the dirstate was
1021 files that might have been modified since the dirstate was
1022 written, but need to be read to be sure (size is the same
1022 written, but need to be read to be sure (size is the same
1023 but mtime differs)
1023 but mtime differs)
1024 status.modified:
1024 status.modified:
1025 files that have definitely been modified since the dirstate
1025 files that have definitely been modified since the dirstate
1026 was written (different size or mode)
1026 was written (different size or mode)
1027 status.clean:
1027 status.clean:
1028 files that have definitely not been modified since the
1028 files that have definitely not been modified since the
1029 dirstate was written
1029 dirstate was written
1030 '''
1030 '''
1031 listignored, listclean, listunknown = ignored, clean, unknown
1031 listignored, listclean, listunknown = ignored, clean, unknown
1032 lookup, modified, added, unknown, ignored = [], [], [], [], []
1032 lookup, modified, added, unknown, ignored = [], [], [], [], []
1033 removed, deleted, clean = [], [], []
1033 removed, deleted, clean = [], [], []
1034
1034
1035 dmap = self._map
1035 dmap = self._map
1036 dmap.preload()
1036 dmap.preload()
1037 dcontains = dmap.__contains__
1037 dcontains = dmap.__contains__
1038 dget = dmap.__getitem__
1038 dget = dmap.__getitem__
1039 ladd = lookup.append # aka "unsure"
1039 ladd = lookup.append # aka "unsure"
1040 madd = modified.append
1040 madd = modified.append
1041 aadd = added.append
1041 aadd = added.append
1042 uadd = unknown.append
1042 uadd = unknown.append
1043 iadd = ignored.append
1043 iadd = ignored.append
1044 radd = removed.append
1044 radd = removed.append
1045 dadd = deleted.append
1045 dadd = deleted.append
1046 cadd = clean.append
1046 cadd = clean.append
1047 mexact = match.exact
1047 mexact = match.exact
1048 dirignore = self._dirignore
1048 dirignore = self._dirignore
1049 checkexec = self._checkexec
1049 checkexec = self._checkexec
1050 copymap = self._map.copymap
1050 copymap = self._map.copymap
1051 lastnormaltime = self._lastnormaltime
1051 lastnormaltime = self._lastnormaltime
1052
1052
1053 # We need to do full walks when either
1053 # We need to do full walks when either
1054 # - we're listing all clean files, or
1054 # - we're listing all clean files, or
1055 # - match.traversedir does something, because match.traversedir should
1055 # - match.traversedir does something, because match.traversedir should
1056 # be called for every dir in the working dir
1056 # be called for every dir in the working dir
1057 full = listclean or match.traversedir is not None
1057 full = listclean or match.traversedir is not None
1058 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1058 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1059 full=full).iteritems():
1059 full=full).iteritems():
1060 if not dcontains(fn):
1060 if not dcontains(fn):
1061 if (listignored or mexact(fn)) and dirignore(fn):
1061 if (listignored or mexact(fn)) and dirignore(fn):
1062 if listignored:
1062 if listignored:
1063 iadd(fn)
1063 iadd(fn)
1064 else:
1064 else:
1065 uadd(fn)
1065 uadd(fn)
1066 continue
1066 continue
1067
1067
1068 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1068 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1069 # written like that for performance reasons. dmap[fn] is not a
1069 # written like that for performance reasons. dmap[fn] is not a
1070 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1070 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1071 # opcode has fast paths when the value to be unpacked is a tuple or
1071 # opcode has fast paths when the value to be unpacked is a tuple or
1072 # a list, but falls back to creating a full-fledged iterator in
1072 # a list, but falls back to creating a full-fledged iterator in
1073 # general. That is much slower than simply accessing and storing the
1073 # general. That is much slower than simply accessing and storing the
1074 # tuple members one by one.
1074 # tuple members one by one.
1075 t = dget(fn)
1075 t = dget(fn)
1076 state = t[0]
1076 state = t[0]
1077 mode = t[1]
1077 mode = t[1]
1078 size = t[2]
1078 size = t[2]
1079 time = t[3]
1079 time = t[3]
1080
1080
1081 if not st and state in "nma":
1081 if not st and state in "nma":
1082 dadd(fn)
1082 dadd(fn)
1083 elif state == 'n':
1083 elif state == 'n':
1084 if (size >= 0 and
1084 if (size >= 0 and
1085 ((size != st.st_size and size != st.st_size & _rangemask)
1085 ((size != st.st_size and size != st.st_size & _rangemask)
1086 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1086 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1087 or size == -2 # other parent
1087 or size == -2 # other parent
1088 or fn in copymap):
1088 or fn in copymap):
1089 madd(fn)
1089 madd(fn)
1090 elif (time != st[stat.ST_MTIME]
1090 elif (time != st[stat.ST_MTIME]
1091 and time != st[stat.ST_MTIME] & _rangemask):
1091 and time != st[stat.ST_MTIME] & _rangemask):
1092 ladd(fn)
1092 ladd(fn)
1093 elif st[stat.ST_MTIME] == lastnormaltime:
1093 elif st[stat.ST_MTIME] == lastnormaltime:
1094 # fn may have just been marked as normal and it may have
1094 # fn may have just been marked as normal and it may have
1095 # changed in the same second without changing its size.
1095 # changed in the same second without changing its size.
1096 # This can happen if we quickly do multiple commits.
1096 # This can happen if we quickly do multiple commits.
1097 # Force lookup, so we don't miss such a racy file change.
1097 # Force lookup, so we don't miss such a racy file change.
1098 ladd(fn)
1098 ladd(fn)
1099 elif listclean:
1099 elif listclean:
1100 cadd(fn)
1100 cadd(fn)
1101 elif state == 'm':
1101 elif state == 'm':
1102 madd(fn)
1102 madd(fn)
1103 elif state == 'a':
1103 elif state == 'a':
1104 aadd(fn)
1104 aadd(fn)
1105 elif state == 'r':
1105 elif state == 'r':
1106 radd(fn)
1106 radd(fn)
1107
1107
1108 return (lookup, scmutil.status(modified, added, removed, deleted,
1108 return (lookup, scmutil.status(modified, added, removed, deleted,
1109 unknown, ignored, clean))
1109 unknown, ignored, clean))
1110
1110
1111 def matches(self, match):
1111 def matches(self, match):
1112 '''
1112 '''
1113 return files in the dirstate (in whatever state) filtered by match
1113 return files in the dirstate (in whatever state) filtered by match
1114 '''
1114 '''
1115 dmap = self._map
1115 dmap = self._map
1116 if match.always():
1116 if match.always():
1117 return dmap.keys()
1117 return dmap.keys()
1118 files = match.files()
1118 files = match.files()
1119 if match.isexact():
1119 if match.isexact():
1120 # fast path -- filter the other way around, since typically files is
1120 # fast path -- filter the other way around, since typically files is
1121 # much smaller than dmap
1121 # much smaller than dmap
1122 return [f for f in files if f in dmap]
1122 return [f for f in files if f in dmap]
1123 if match.prefix() and all(fn in dmap for fn in files):
1123 if match.prefix() and all(fn in dmap for fn in files):
1124 # fast path -- all the values are known to be files, so just return
1124 # fast path -- all the values are known to be files, so just return
1125 # that
1125 # that
1126 return list(files)
1126 return list(files)
1127 return [f for f in dmap if match(f)]
1127 return [f for f in dmap if match(f)]
1128
1128
1129 def _actualfilename(self, tr):
1129 def _actualfilename(self, tr):
1130 if tr:
1130 if tr:
1131 return self._pendingfilename
1131 return self._pendingfilename
1132 else:
1132 else:
1133 return self._filename
1133 return self._filename
1134
1134
1135 def savebackup(self, tr, backupname):
1135 def savebackup(self, tr, backupname):
1136 '''Save current dirstate into backup file'''
1136 '''Save current dirstate into backup file'''
1137 filename = self._actualfilename(tr)
1137 filename = self._actualfilename(tr)
1138 assert backupname != filename
1138 assert backupname != filename
1139
1139
1140 # use '_writedirstate' instead of 'write' to write changes certainly,
1140 # use '_writedirstate' instead of 'write' to write changes certainly,
1141 # because the latter omits writing out if transaction is running.
1141 # because the latter omits writing out if transaction is running.
1142 # output file will be used to create backup of dirstate at this point.
1142 # output file will be used to create backup of dirstate at this point.
1143 if self._dirty or not self._opener.exists(filename):
1143 if self._dirty or not self._opener.exists(filename):
1144 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1144 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1145 checkambig=True))
1145 checkambig=True))
1146
1146
1147 if tr:
1147 if tr:
1148 # ensure that subsequent tr.writepending returns True for
1148 # ensure that subsequent tr.writepending returns True for
1149 # changes written out above, even if dirstate is never
1149 # changes written out above, even if dirstate is never
1150 # changed after this
1150 # changed after this
1151 tr.addfilegenerator('dirstate', (self._filename,),
1151 tr.addfilegenerator('dirstate', (self._filename,),
1152 self._writedirstate, location='plain')
1152 self._writedirstate, location='plain')
1153
1153
1154 # ensure that pending file written above is unlinked at
1154 # ensure that pending file written above is unlinked at
1155 # failure, even if tr.writepending isn't invoked until the
1155 # failure, even if tr.writepending isn't invoked until the
1156 # end of this transaction
1156 # end of this transaction
1157 tr.registertmp(filename, location='plain')
1157 tr.registertmp(filename, location='plain')
1158
1158
1159 self._opener.tryunlink(backupname)
1159 self._opener.tryunlink(backupname)
1160 # hardlink backup is okay because _writedirstate is always called
1160 # hardlink backup is okay because _writedirstate is always called
1161 # with an "atomictemp=True" file.
1161 # with an "atomictemp=True" file.
1162 util.copyfile(self._opener.join(filename),
1162 util.copyfile(self._opener.join(filename),
1163 self._opener.join(backupname), hardlink=True)
1163 self._opener.join(backupname), hardlink=True)
1164
1164
1165 def restorebackup(self, tr, backupname):
1165 def restorebackup(self, tr, backupname):
1166 '''Restore dirstate by backup file'''
1166 '''Restore dirstate by backup file'''
1167 # this "invalidate()" prevents "wlock.release()" from writing
1167 # this "invalidate()" prevents "wlock.release()" from writing
1168 # changes of dirstate out after restoring from backup file
1168 # changes of dirstate out after restoring from backup file
1169 self.invalidate()
1169 self.invalidate()
1170 filename = self._actualfilename(tr)
1170 filename = self._actualfilename(tr)
1171 o = self._opener
1171 o = self._opener
1172 if util.samefile(o.join(backupname), o.join(filename)):
1172 if util.samefile(o.join(backupname), o.join(filename)):
1173 o.unlink(backupname)
1173 o.unlink(backupname)
1174 else:
1174 else:
1175 o.rename(backupname, filename, checkambig=True)
1175 o.rename(backupname, filename, checkambig=True)
1176
1176
1177 def clearbackup(self, tr, backupname):
1177 def clearbackup(self, tr, backupname):
1178 '''Clear backup file'''
1178 '''Clear backup file'''
1179 self._opener.unlink(backupname)
1179 self._opener.unlink(backupname)
1180
1180
1181 class dirstatemap(object):
1181 class dirstatemap(object):
1182 """Map encapsulating the dirstate's contents.
1182 """Map encapsulating the dirstate's contents.
1183
1183
1184 The dirstate contains the following state:
1184 The dirstate contains the following state:
1185
1185
1186 - `identity` is the identity of the dirstate file, which can be used to
1186 - `identity` is the identity of the dirstate file, which can be used to
1187 detect when changes have occurred to the dirstate file.
1187 detect when changes have occurred to the dirstate file.
1188
1188
1189 - `parents` is a pair containing the parents of the working copy. The
1189 - `parents` is a pair containing the parents of the working copy. The
1190 parents are updated by calling `setparents`.
1190 parents are updated by calling `setparents`.
1191
1191
1192 - the state map maps filenames to tuples of (state, mode, size, mtime),
1192 - the state map maps filenames to tuples of (state, mode, size, mtime),
1193 where state is a single character representing 'normal', 'added',
1193 where state is a single character representing 'normal', 'added',
1194 'removed', or 'merged'. It is read by treating the dirstate as a
1194 'removed', or 'merged'. It is read by treating the dirstate as a
1195 dict. File state is updated by calling the `addfile`, `removefile` and
1195 dict. File state is updated by calling the `addfile`, `removefile` and
1196 `dropfile` methods.
1196 `dropfile` methods.
1197
1197
1198 - `copymap` maps destination filenames to their source filename.
1198 - `copymap` maps destination filenames to their source filename.
1199
1199
1200 The dirstate also provides the following views onto the state:
1200 The dirstate also provides the following views onto the state:
1201
1201
1202 - `nonnormalset` is a set of the filenames that have state other
1202 - `nonnormalset` is a set of the filenames that have state other
1203 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1203 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1204
1204
1205 - `otherparentset` is a set of the filenames that are marked as coming
1205 - `otherparentset` is a set of the filenames that are marked as coming
1206 from the second parent when the dirstate is currently being merged.
1206 from the second parent when the dirstate is currently being merged.
1207
1207
1208 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1208 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1209 form that they appear as in the dirstate.
1209 form that they appear as in the dirstate.
1210
1210
1211 - `dirfoldmap` is a dict mapping normalized directory names to the
1211 - `dirfoldmap` is a dict mapping normalized directory names to the
1212 denormalized form that they appear as in the dirstate.
1212 denormalized form that they appear as in the dirstate.
1213 """
1213 """
1214
1214
1215 def __init__(self, ui, opener, root):
1215 def __init__(self, ui, opener, root):
1216 self._ui = ui
1216 self._ui = ui
1217 self._opener = opener
1217 self._opener = opener
1218 self._root = root
1218 self._root = root
1219 self._filename = 'dirstate'
1219 self._filename = 'dirstate'
1220
1220
1221 self._parents = None
1221 self._parents = None
1222 self._dirtyparents = False
1222 self._dirtyparents = False
1223
1223
1224 # for consistent view between _pl() and _read() invocations
1224 # for consistent view between _pl() and _read() invocations
1225 self._pendingmode = None
1225 self._pendingmode = None
1226
1226
1227 @propertycache
1227 @propertycache
1228 def _map(self):
1228 def _map(self):
1229 self._map = {}
1229 self._map = {}
1230 self.read()
1230 self.read()
1231 return self._map
1231 return self._map
1232
1232
1233 @propertycache
1233 @propertycache
1234 def copymap(self):
1234 def copymap(self):
1235 self.copymap = {}
1235 self.copymap = {}
1236 self._map
1236 self._map
1237 return self.copymap
1237 return self.copymap
1238
1238
1239 def clear(self):
1239 def clear(self):
1240 self._map.clear()
1240 self._map.clear()
1241 self.copymap.clear()
1241 self.copymap.clear()
1242 self.setparents(nullid, nullid)
1242 self.setparents(nullid, nullid)
1243 util.clearcachedproperty(self, "_dirs")
1243 util.clearcachedproperty(self, "_dirs")
1244 util.clearcachedproperty(self, "_alldirs")
1244 util.clearcachedproperty(self, "_alldirs")
1245 util.clearcachedproperty(self, "filefoldmap")
1245 util.clearcachedproperty(self, "filefoldmap")
1246 util.clearcachedproperty(self, "dirfoldmap")
1246 util.clearcachedproperty(self, "dirfoldmap")
1247 util.clearcachedproperty(self, "nonnormalset")
1247 util.clearcachedproperty(self, "nonnormalset")
1248 util.clearcachedproperty(self, "otherparentset")
1248 util.clearcachedproperty(self, "otherparentset")
1249
1249
1250 def items(self):
1250 def items(self):
1251 return self._map.iteritems()
1251 return self._map.iteritems()
1252
1252
1253 # forward for python2,3 compat
1253 # forward for python2,3 compat
1254 iteritems = items
1254 iteritems = items
1255
1255
1256 def __len__(self):
1256 def __len__(self):
1257 return len(self._map)
1257 return len(self._map)
1258
1258
1259 def __iter__(self):
1259 def __iter__(self):
1260 return iter(self._map)
1260 return iter(self._map)
1261
1261
1262 def get(self, key, default=None):
1262 def get(self, key, default=None):
1263 return self._map.get(key, default)
1263 return self._map.get(key, default)
1264
1264
1265 def __contains__(self, key):
1265 def __contains__(self, key):
1266 return key in self._map
1266 return key in self._map
1267
1267
1268 def __getitem__(self, key):
1268 def __getitem__(self, key):
1269 return self._map[key]
1269 return self._map[key]
1270
1270
1271 def keys(self):
1271 def keys(self):
1272 return self._map.keys()
1272 return self._map.keys()
1273
1273
1274 def preload(self):
1274 def preload(self):
1275 """Loads the underlying data, if it's not already loaded"""
1275 """Loads the underlying data, if it's not already loaded"""
1276 self._map
1276 self._map
1277
1277
1278 def addfile(self, f, oldstate, state, mode, size, mtime):
1278 def addfile(self, f, oldstate, state, mode, size, mtime):
1279 """Add a tracked file to the dirstate."""
1279 """Add a tracked file to the dirstate."""
1280 if oldstate in "?r" and r"_dirs" in self.__dict__:
1280 if oldstate in "?r" and r"_dirs" in self.__dict__:
1281 self._dirs.addpath(f)
1281 self._dirs.addpath(f)
1282 if oldstate == "?" and r"_alldirs" in self.__dict__:
1282 if oldstate == "?" and r"_alldirs" in self.__dict__:
1283 self._alldirs.addpath(f)
1283 self._alldirs.addpath(f)
1284 self._map[f] = dirstatetuple(state, mode, size, mtime)
1284 self._map[f] = dirstatetuple(state, mode, size, mtime)
1285 if state != 'n' or mtime == -1:
1285 if state != 'n' or mtime == -1:
1286 self.nonnormalset.add(f)
1286 self.nonnormalset.add(f)
1287 if size == -2:
1287 if size == -2:
1288 self.otherparentset.add(f)
1288 self.otherparentset.add(f)
1289
1289
1290 def removefile(self, f, oldstate, size):
1290 def removefile(self, f, oldstate, size):
1291 """
1291 """
1292 Mark a file as removed in the dirstate.
1292 Mark a file as removed in the dirstate.
1293
1293
1294 The `size` parameter is used to store sentinel values that indicate
1294 The `size` parameter is used to store sentinel values that indicate
1295 the file's previous state. In the future, we should refactor this
1295 the file's previous state. In the future, we should refactor this
1296 to be more explicit about what that state is.
1296 to be more explicit about what that state is.
1297 """
1297 """
1298 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1298 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1299 self._dirs.delpath(f)
1299 self._dirs.delpath(f)
1300 if oldstate == "?" and r"_alldirs" in self.__dict__:
1300 if oldstate == "?" and r"_alldirs" in self.__dict__:
1301 self._alldirs.addpath(f)
1301 self._alldirs.addpath(f)
1302 if r"filefoldmap" in self.__dict__:
1302 if r"filefoldmap" in self.__dict__:
1303 normed = util.normcase(f)
1303 normed = util.normcase(f)
1304 self.filefoldmap.pop(normed, None)
1304 self.filefoldmap.pop(normed, None)
1305 self._map[f] = dirstatetuple('r', 0, size, 0)
1305 self._map[f] = dirstatetuple('r', 0, size, 0)
1306 self.nonnormalset.add(f)
1306 self.nonnormalset.add(f)
1307
1307
1308 def dropfile(self, f, oldstate):
1308 def dropfile(self, f, oldstate):
1309 """
1309 """
1310 Remove a file from the dirstate. Returns True if the file was
1310 Remove a file from the dirstate. Returns True if the file was
1311 previously recorded.
1311 previously recorded.
1312 """
1312 """
1313 exists = self._map.pop(f, None) is not None
1313 exists = self._map.pop(f, None) is not None
1314 if exists:
1314 if exists:
1315 if oldstate != "r" and r"_dirs" in self.__dict__:
1315 if oldstate != "r" and r"_dirs" in self.__dict__:
1316 self._dirs.delpath(f)
1316 self._dirs.delpath(f)
1317 if r"_alldirs" in self.__dict__:
1317 if r"_alldirs" in self.__dict__:
1318 self._alldirs.delpath(f)
1318 self._alldirs.delpath(f)
1319 if r"filefoldmap" in self.__dict__:
1319 if r"filefoldmap" in self.__dict__:
1320 normed = util.normcase(f)
1320 normed = util.normcase(f)
1321 self.filefoldmap.pop(normed, None)
1321 self.filefoldmap.pop(normed, None)
1322 self.nonnormalset.discard(f)
1322 self.nonnormalset.discard(f)
1323 return exists
1323 return exists
1324
1324
1325 def clearambiguoustimes(self, files, now):
1325 def clearambiguoustimes(self, files, now):
1326 for f in files:
1326 for f in files:
1327 e = self.get(f)
1327 e = self.get(f)
1328 if e is not None and e[0] == 'n' and e[3] == now:
1328 if e is not None and e[0] == 'n' and e[3] == now:
1329 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1329 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1330 self.nonnormalset.add(f)
1330 self.nonnormalset.add(f)
1331
1331
1332 def nonnormalentries(self):
1332 def nonnormalentries(self):
1333 '''Compute the nonnormal dirstate entries from the dmap'''
1333 '''Compute the nonnormal dirstate entries from the dmap'''
1334 try:
1334 try:
1335 return parsers.nonnormalotherparententries(self._map)
1335 return parsers.nonnormalotherparententries(self._map)
1336 except AttributeError:
1336 except AttributeError:
1337 nonnorm = set()
1337 nonnorm = set()
1338 otherparent = set()
1338 otherparent = set()
1339 for fname, e in self._map.iteritems():
1339 for fname, e in self._map.iteritems():
1340 if e[0] != 'n' or e[3] == -1:
1340 if e[0] != 'n' or e[3] == -1:
1341 nonnorm.add(fname)
1341 nonnorm.add(fname)
1342 if e[0] == 'n' and e[2] == -2:
1342 if e[0] == 'n' and e[2] == -2:
1343 otherparent.add(fname)
1343 otherparent.add(fname)
1344 return nonnorm, otherparent
1344 return nonnorm, otherparent
1345
1345
1346 @propertycache
1346 @propertycache
1347 def filefoldmap(self):
1347 def filefoldmap(self):
1348 """Returns a dictionary mapping normalized case paths to their
1348 """Returns a dictionary mapping normalized case paths to their
1349 non-normalized versions.
1349 non-normalized versions.
1350 """
1350 """
1351 try:
1351 try:
1352 makefilefoldmap = parsers.make_file_foldmap
1352 makefilefoldmap = parsers.make_file_foldmap
1353 except AttributeError:
1353 except AttributeError:
1354 pass
1354 pass
1355 else:
1355 else:
1356 return makefilefoldmap(self._map, util.normcasespec,
1356 return makefilefoldmap(self._map, util.normcasespec,
1357 util.normcasefallback)
1357 util.normcasefallback)
1358
1358
1359 f = {}
1359 f = {}
1360 normcase = util.normcase
1360 normcase = util.normcase
1361 for name, s in self._map.iteritems():
1361 for name, s in self._map.iteritems():
1362 if s[0] != 'r':
1362 if s[0] != 'r':
1363 f[normcase(name)] = name
1363 f[normcase(name)] = name
1364 f['.'] = '.' # prevents useless util.fspath() invocation
1364 f['.'] = '.' # prevents useless util.fspath() invocation
1365 return f
1365 return f
1366
1366
1367 def hastrackeddir(self, d):
1367 def hastrackeddir(self, d):
1368 """
1368 """
1369 Returns True if the dirstate contains a tracked (not removed) file
1369 Returns True if the dirstate contains a tracked (not removed) file
1370 in this directory.
1370 in this directory.
1371 """
1371 """
1372 return d in self._dirs
1372 return d in self._dirs
1373
1373
1374 def hasdir(self, d):
1374 def hasdir(self, d):
1375 """
1375 """
1376 Returns True if the dirstate contains a file (tracked or removed)
1376 Returns True if the dirstate contains a file (tracked or removed)
1377 in this directory.
1377 in this directory.
1378 """
1378 """
1379 return d in self._alldirs
1379 return d in self._alldirs
1380
1380
1381 @propertycache
1381 @propertycache
1382 def _dirs(self):
1382 def _dirs(self):
1383 return util.dirs(self._map, 'r')
1383 return util.dirs(self._map, 'r')
1384
1384
1385 @propertycache
1385 @propertycache
1386 def _alldirs(self):
1386 def _alldirs(self):
1387 return util.dirs(self._map)
1387 return util.dirs(self._map)
1388
1388
1389 def _opendirstatefile(self):
1389 def _opendirstatefile(self):
1390 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1390 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1391 if self._pendingmode is not None and self._pendingmode != mode:
1391 if self._pendingmode is not None and self._pendingmode != mode:
1392 fp.close()
1392 fp.close()
1393 raise error.Abort(_('working directory state may be '
1393 raise error.Abort(_('working directory state may be '
1394 'changed parallelly'))
1394 'changed parallelly'))
1395 self._pendingmode = mode
1395 self._pendingmode = mode
1396 return fp
1396 return fp
1397
1397
1398 def parents(self):
1398 def parents(self):
1399 if not self._parents:
1399 if not self._parents:
1400 try:
1400 try:
1401 fp = self._opendirstatefile()
1401 fp = self._opendirstatefile()
1402 st = fp.read(40)
1402 st = fp.read(40)
1403 fp.close()
1403 fp.close()
1404 except IOError as err:
1404 except IOError as err:
1405 if err.errno != errno.ENOENT:
1405 if err.errno != errno.ENOENT:
1406 raise
1406 raise
1407 # File doesn't exist, so the current state is empty
1407 # File doesn't exist, so the current state is empty
1408 st = ''
1408 st = ''
1409
1409
1410 l = len(st)
1410 l = len(st)
1411 if l == 40:
1411 if l == 40:
1412 self._parents = (st[:20], st[20:40])
1412 self._parents = (st[:20], st[20:40])
1413 elif l == 0:
1413 elif l == 0:
1414 self._parents = (nullid, nullid)
1414 self._parents = (nullid, nullid)
1415 else:
1415 else:
1416 raise error.Abort(_('working directory state appears '
1416 raise error.Abort(_('working directory state appears '
1417 'damaged!'))
1417 'damaged!'))
1418
1418
1419 return self._parents
1419 return self._parents
1420
1420
1421 def setparents(self, p1, p2):
1421 def setparents(self, p1, p2):
1422 self._parents = (p1, p2)
1422 self._parents = (p1, p2)
1423 self._dirtyparents = True
1423 self._dirtyparents = True
1424
1424
1425 def read(self):
1425 def read(self):
1426 # ignore HG_PENDING because identity is used only for writing
1426 # ignore HG_PENDING because identity is used only for writing
1427 self.identity = util.filestat.frompath(
1427 self.identity = util.filestat.frompath(
1428 self._opener.join(self._filename))
1428 self._opener.join(self._filename))
1429
1429
1430 try:
1430 try:
1431 fp = self._opendirstatefile()
1431 fp = self._opendirstatefile()
1432 try:
1432 try:
1433 st = fp.read()
1433 st = fp.read()
1434 finally:
1434 finally:
1435 fp.close()
1435 fp.close()
1436 except IOError as err:
1436 except IOError as err:
1437 if err.errno != errno.ENOENT:
1437 if err.errno != errno.ENOENT:
1438 raise
1438 raise
1439 return
1439 return
1440 if not st:
1440 if not st:
1441 return
1441 return
1442
1442
1443 if util.safehasattr(parsers, 'dict_new_presized'):
1443 if util.safehasattr(parsers, 'dict_new_presized'):
1444 # Make an estimate of the number of files in the dirstate based on
1444 # Make an estimate of the number of files in the dirstate based on
1445 # its size. From a linear regression on a set of real-world repos,
1445 # its size. From a linear regression on a set of real-world repos,
1446 # all over 10,000 files, the size of a dirstate entry is 85
1446 # all over 10,000 files, the size of a dirstate entry is 85
1447 # bytes. The cost of resizing is significantly higher than the cost
1447 # bytes. The cost of resizing is significantly higher than the cost
1448 # of filling in a larger presized dict, so subtract 20% from the
1448 # of filling in a larger presized dict, so subtract 20% from the
1449 # size.
1449 # size.
1450 #
1450 #
1451 # This heuristic is imperfect in many ways, so in a future dirstate
1451 # This heuristic is imperfect in many ways, so in a future dirstate
1452 # format update it makes sense to just record the number of entries
1452 # format update it makes sense to just record the number of entries
1453 # on write.
1453 # on write.
1454 self._map = parsers.dict_new_presized(len(st) // 71)
1454 self._map = parsers.dict_new_presized(len(st) // 71)
1455
1455
1456 # Python's garbage collector triggers a GC each time a certain number
1456 # Python's garbage collector triggers a GC each time a certain number
1457 # of container objects (the number being defined by
1457 # of container objects (the number being defined by
1458 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1458 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1459 # for each file in the dirstate. The C version then immediately marks
1459 # for each file in the dirstate. The C version then immediately marks
1460 # them as not to be tracked by the collector. However, this has no
1460 # them as not to be tracked by the collector. However, this has no
1461 # effect on when GCs are triggered, only on what objects the GC looks
1461 # effect on when GCs are triggered, only on what objects the GC looks
1462 # into. This means that O(number of files) GCs are unavoidable.
1462 # into. This means that O(number of files) GCs are unavoidable.
1463 # Depending on when in the process's lifetime the dirstate is parsed,
1463 # Depending on when in the process's lifetime the dirstate is parsed,
1464 # this can get very expensive. As a workaround, disable GC while
1464 # this can get very expensive. As a workaround, disable GC while
1465 # parsing the dirstate.
1465 # parsing the dirstate.
1466 #
1466 #
1467 # (we cannot decorate the function directly since it is in a C module)
1467 # (we cannot decorate the function directly since it is in a C module)
1468 parse_dirstate = util.nogc(parsers.parse_dirstate)
1468 parse_dirstate = util.nogc(parsers.parse_dirstate)
1469 p = parse_dirstate(self._map, self.copymap, st)
1469 p = parse_dirstate(self._map, self.copymap, st)
1470 if not self._dirtyparents:
1470 if not self._dirtyparents:
1471 self.setparents(*p)
1471 self.setparents(*p)
1472
1472
1473 # Avoid excess attribute lookups by fast pathing certain checks
1473 # Avoid excess attribute lookups by fast pathing certain checks
1474 self.__contains__ = self._map.__contains__
1474 self.__contains__ = self._map.__contains__
1475 self.__getitem__ = self._map.__getitem__
1475 self.__getitem__ = self._map.__getitem__
1476 self.get = self._map.get
1476 self.get = self._map.get
1477
1477
1478 def write(self, st, now):
1478 def write(self, st, now):
1479 st.write(parsers.pack_dirstate(self._map, self.copymap,
1479 st.write(parsers.pack_dirstate(self._map, self.copymap,
1480 self.parents(), now))
1480 self.parents(), now))
1481 st.close()
1481 st.close()
1482 self._dirtyparents = False
1482 self._dirtyparents = False
1483 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1483 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1484
1484
1485 @propertycache
1485 @propertycache
1486 def nonnormalset(self):
1486 def nonnormalset(self):
1487 nonnorm, otherparents = self.nonnormalentries()
1487 nonnorm, otherparents = self.nonnormalentries()
1488 self.otherparentset = otherparents
1488 self.otherparentset = otherparents
1489 return nonnorm
1489 return nonnorm
1490
1490
1491 @propertycache
1491 @propertycache
1492 def otherparentset(self):
1492 def otherparentset(self):
1493 nonnorm, otherparents = self.nonnormalentries()
1493 nonnorm, otherparents = self.nonnormalentries()
1494 self.nonnormalset = nonnorm
1494 self.nonnormalset = nonnorm
1495 return otherparents
1495 return otherparents
1496
1496
1497 @propertycache
1497 @propertycache
1498 def identity(self):
1498 def identity(self):
1499 self._map
1499 self._map
1500 return self.identity
1500 return self.identity
1501
1501
1502 @propertycache
1502 @propertycache
1503 def dirfoldmap(self):
1503 def dirfoldmap(self):
1504 f = {}
1504 f = {}
1505 normcase = util.normcase
1505 normcase = util.normcase
1506 for name in self._dirs:
1506 for name in self._dirs:
1507 f[normcase(name)] = name
1507 f[normcase(name)] = name
1508 return f
1508 return f
General Comments 0
You need to be logged in to leave comments. Login now