##// END OF EJS Templates
dirstate: add comment on why we don't need to check if something is a dir/file...
Kyle Lippincott -
r38995:0d032756 default
parent child Browse files
Show More
@@ -1,1496 +1,1501 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd)[stat.ST_MTIME]
52 return os.fstat(tmpfd)[stat.ST_MTIME]
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83 self._mapcls = dirstatemap
83 self._mapcls = dirstatemap
84
84
85 @contextlib.contextmanager
85 @contextlib.contextmanager
86 def parentchange(self):
86 def parentchange(self):
87 '''Context manager for handling dirstate parents.
87 '''Context manager for handling dirstate parents.
88
88
89 If an exception occurs in the scope of the context manager,
89 If an exception occurs in the scope of the context manager,
90 the incoherent dirstate won't be written when wlock is
90 the incoherent dirstate won't be written when wlock is
91 released.
91 released.
92 '''
92 '''
93 self._parentwriters += 1
93 self._parentwriters += 1
94 yield
94 yield
95 # Typically we want the "undo" step of a context manager in a
95 # Typically we want the "undo" step of a context manager in a
96 # finally block so it happens even when an exception
96 # finally block so it happens even when an exception
97 # occurs. In this case, however, we only want to decrement
97 # occurs. In this case, however, we only want to decrement
98 # parentwriters if the code in the with statement exits
98 # parentwriters if the code in the with statement exits
99 # normally, so we don't have a try/finally here on purpose.
99 # normally, so we don't have a try/finally here on purpose.
100 self._parentwriters -= 1
100 self._parentwriters -= 1
101
101
102 def pendingparentchange(self):
102 def pendingparentchange(self):
103 '''Returns true if the dirstate is in the middle of a set of changes
103 '''Returns true if the dirstate is in the middle of a set of changes
104 that modify the dirstate parent.
104 that modify the dirstate parent.
105 '''
105 '''
106 return self._parentwriters > 0
106 return self._parentwriters > 0
107
107
108 @propertycache
108 @propertycache
109 def _map(self):
109 def _map(self):
110 """Return the dirstate contents (see documentation for dirstatemap)."""
110 """Return the dirstate contents (see documentation for dirstatemap)."""
111 self._map = self._mapcls(self._ui, self._opener, self._root)
111 self._map = self._mapcls(self._ui, self._opener, self._root)
112 return self._map
112 return self._map
113
113
114 @property
114 @property
115 def _sparsematcher(self):
115 def _sparsematcher(self):
116 """The matcher for the sparse checkout.
116 """The matcher for the sparse checkout.
117
117
118 The working directory may not include every file from a manifest. The
118 The working directory may not include every file from a manifest. The
119 matcher obtained by this property will match a path if it is to be
119 matcher obtained by this property will match a path if it is to be
120 included in the working directory.
120 included in the working directory.
121 """
121 """
122 # TODO there is potential to cache this property. For now, the matcher
122 # TODO there is potential to cache this property. For now, the matcher
123 # is resolved on every access. (But the called function does use a
123 # is resolved on every access. (But the called function does use a
124 # cache to keep the lookup fast.)
124 # cache to keep the lookup fast.)
125 return self._sparsematchfn()
125 return self._sparsematchfn()
126
126
127 @repocache('branch')
127 @repocache('branch')
128 def _branch(self):
128 def _branch(self):
129 try:
129 try:
130 return self._opener.read("branch").strip() or "default"
130 return self._opener.read("branch").strip() or "default"
131 except IOError as inst:
131 except IOError as inst:
132 if inst.errno != errno.ENOENT:
132 if inst.errno != errno.ENOENT:
133 raise
133 raise
134 return "default"
134 return "default"
135
135
136 @property
136 @property
137 def _pl(self):
137 def _pl(self):
138 return self._map.parents()
138 return self._map.parents()
139
139
140 def hasdir(self, d):
140 def hasdir(self, d):
141 return self._map.hastrackeddir(d)
141 return self._map.hastrackeddir(d)
142
142
143 @rootcache('.hgignore')
143 @rootcache('.hgignore')
144 def _ignore(self):
144 def _ignore(self):
145 files = self._ignorefiles()
145 files = self._ignorefiles()
146 if not files:
146 if not files:
147 return matchmod.never(self._root, '')
147 return matchmod.never(self._root, '')
148
148
149 pats = ['include:%s' % f for f in files]
149 pats = ['include:%s' % f for f in files]
150 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
150 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
151
151
152 @propertycache
152 @propertycache
153 def _slash(self):
153 def _slash(self):
154 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
154 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
155
155
156 @propertycache
156 @propertycache
157 def _checklink(self):
157 def _checklink(self):
158 return util.checklink(self._root)
158 return util.checklink(self._root)
159
159
160 @propertycache
160 @propertycache
161 def _checkexec(self):
161 def _checkexec(self):
162 return util.checkexec(self._root)
162 return util.checkexec(self._root)
163
163
164 @propertycache
164 @propertycache
165 def _checkcase(self):
165 def _checkcase(self):
166 return not util.fscasesensitive(self._join('.hg'))
166 return not util.fscasesensitive(self._join('.hg'))
167
167
168 def _join(self, f):
168 def _join(self, f):
169 # much faster than os.path.join()
169 # much faster than os.path.join()
170 # it's safe because f is always a relative path
170 # it's safe because f is always a relative path
171 return self._rootdir + f
171 return self._rootdir + f
172
172
173 def flagfunc(self, buildfallback):
173 def flagfunc(self, buildfallback):
174 if self._checklink and self._checkexec:
174 if self._checklink and self._checkexec:
175 def f(x):
175 def f(x):
176 try:
176 try:
177 st = os.lstat(self._join(x))
177 st = os.lstat(self._join(x))
178 if util.statislink(st):
178 if util.statislink(st):
179 return 'l'
179 return 'l'
180 if util.statisexec(st):
180 if util.statisexec(st):
181 return 'x'
181 return 'x'
182 except OSError:
182 except OSError:
183 pass
183 pass
184 return ''
184 return ''
185 return f
185 return f
186
186
187 fallback = buildfallback()
187 fallback = buildfallback()
188 if self._checklink:
188 if self._checklink:
189 def f(x):
189 def f(x):
190 if os.path.islink(self._join(x)):
190 if os.path.islink(self._join(x)):
191 return 'l'
191 return 'l'
192 if 'x' in fallback(x):
192 if 'x' in fallback(x):
193 return 'x'
193 return 'x'
194 return ''
194 return ''
195 return f
195 return f
196 if self._checkexec:
196 if self._checkexec:
197 def f(x):
197 def f(x):
198 if 'l' in fallback(x):
198 if 'l' in fallback(x):
199 return 'l'
199 return 'l'
200 if util.isexec(self._join(x)):
200 if util.isexec(self._join(x)):
201 return 'x'
201 return 'x'
202 return ''
202 return ''
203 return f
203 return f
204 else:
204 else:
205 return fallback
205 return fallback
206
206
207 @propertycache
207 @propertycache
208 def _cwd(self):
208 def _cwd(self):
209 # internal config: ui.forcecwd
209 # internal config: ui.forcecwd
210 forcecwd = self._ui.config('ui', 'forcecwd')
210 forcecwd = self._ui.config('ui', 'forcecwd')
211 if forcecwd:
211 if forcecwd:
212 return forcecwd
212 return forcecwd
213 return pycompat.getcwd()
213 return pycompat.getcwd()
214
214
215 def getcwd(self):
215 def getcwd(self):
216 '''Return the path from which a canonical path is calculated.
216 '''Return the path from which a canonical path is calculated.
217
217
218 This path should be used to resolve file patterns or to convert
218 This path should be used to resolve file patterns or to convert
219 canonical paths back to file paths for display. It shouldn't be
219 canonical paths back to file paths for display. It shouldn't be
220 used to get real file paths. Use vfs functions instead.
220 used to get real file paths. Use vfs functions instead.
221 '''
221 '''
222 cwd = self._cwd
222 cwd = self._cwd
223 if cwd == self._root:
223 if cwd == self._root:
224 return ''
224 return ''
225 # self._root ends with a path separator if self._root is '/' or 'C:\'
225 # self._root ends with a path separator if self._root is '/' or 'C:\'
226 rootsep = self._root
226 rootsep = self._root
227 if not util.endswithsep(rootsep):
227 if not util.endswithsep(rootsep):
228 rootsep += pycompat.ossep
228 rootsep += pycompat.ossep
229 if cwd.startswith(rootsep):
229 if cwd.startswith(rootsep):
230 return cwd[len(rootsep):]
230 return cwd[len(rootsep):]
231 else:
231 else:
232 # we're outside the repo. return an absolute path.
232 # we're outside the repo. return an absolute path.
233 return cwd
233 return cwd
234
234
235 def pathto(self, f, cwd=None):
235 def pathto(self, f, cwd=None):
236 if cwd is None:
236 if cwd is None:
237 cwd = self.getcwd()
237 cwd = self.getcwd()
238 path = util.pathto(self._root, cwd, f)
238 path = util.pathto(self._root, cwd, f)
239 if self._slash:
239 if self._slash:
240 return util.pconvert(path)
240 return util.pconvert(path)
241 return path
241 return path
242
242
243 def __getitem__(self, key):
243 def __getitem__(self, key):
244 '''Return the current state of key (a filename) in the dirstate.
244 '''Return the current state of key (a filename) in the dirstate.
245
245
246 States are:
246 States are:
247 n normal
247 n normal
248 m needs merging
248 m needs merging
249 r marked for removal
249 r marked for removal
250 a marked for addition
250 a marked for addition
251 ? not tracked
251 ? not tracked
252 '''
252 '''
253 return self._map.get(key, ("?",))[0]
253 return self._map.get(key, ("?",))[0]
254
254
255 def __contains__(self, key):
255 def __contains__(self, key):
256 return key in self._map
256 return key in self._map
257
257
258 def __iter__(self):
258 def __iter__(self):
259 return iter(sorted(self._map))
259 return iter(sorted(self._map))
260
260
261 def items(self):
261 def items(self):
262 return self._map.iteritems()
262 return self._map.iteritems()
263
263
264 iteritems = items
264 iteritems = items
265
265
266 def parents(self):
266 def parents(self):
267 return [self._validate(p) for p in self._pl]
267 return [self._validate(p) for p in self._pl]
268
268
269 def p1(self):
269 def p1(self):
270 return self._validate(self._pl[0])
270 return self._validate(self._pl[0])
271
271
272 def p2(self):
272 def p2(self):
273 return self._validate(self._pl[1])
273 return self._validate(self._pl[1])
274
274
275 def branch(self):
275 def branch(self):
276 return encoding.tolocal(self._branch)
276 return encoding.tolocal(self._branch)
277
277
278 def setparents(self, p1, p2=nullid):
278 def setparents(self, p1, p2=nullid):
279 """Set dirstate parents to p1 and p2.
279 """Set dirstate parents to p1 and p2.
280
280
281 When moving from two parents to one, 'm' merged entries a
281 When moving from two parents to one, 'm' merged entries a
282 adjusted to normal and previous copy records discarded and
282 adjusted to normal and previous copy records discarded and
283 returned by the call.
283 returned by the call.
284
284
285 See localrepo.setparents()
285 See localrepo.setparents()
286 """
286 """
287 if self._parentwriters == 0:
287 if self._parentwriters == 0:
288 raise ValueError("cannot set dirstate parent without "
288 raise ValueError("cannot set dirstate parent without "
289 "calling dirstate.beginparentchange")
289 "calling dirstate.beginparentchange")
290
290
291 self._dirty = True
291 self._dirty = True
292 oldp2 = self._pl[1]
292 oldp2 = self._pl[1]
293 if self._origpl is None:
293 if self._origpl is None:
294 self._origpl = self._pl
294 self._origpl = self._pl
295 self._map.setparents(p1, p2)
295 self._map.setparents(p1, p2)
296 copies = {}
296 copies = {}
297 if oldp2 != nullid and p2 == nullid:
297 if oldp2 != nullid and p2 == nullid:
298 candidatefiles = self._map.nonnormalset.union(
298 candidatefiles = self._map.nonnormalset.union(
299 self._map.otherparentset)
299 self._map.otherparentset)
300 for f in candidatefiles:
300 for f in candidatefiles:
301 s = self._map.get(f)
301 s = self._map.get(f)
302 if s is None:
302 if s is None:
303 continue
303 continue
304
304
305 # Discard 'm' markers when moving away from a merge state
305 # Discard 'm' markers when moving away from a merge state
306 if s[0] == 'm':
306 if s[0] == 'm':
307 source = self._map.copymap.get(f)
307 source = self._map.copymap.get(f)
308 if source:
308 if source:
309 copies[f] = source
309 copies[f] = source
310 self.normallookup(f)
310 self.normallookup(f)
311 # Also fix up otherparent markers
311 # Also fix up otherparent markers
312 elif s[0] == 'n' and s[2] == -2:
312 elif s[0] == 'n' and s[2] == -2:
313 source = self._map.copymap.get(f)
313 source = self._map.copymap.get(f)
314 if source:
314 if source:
315 copies[f] = source
315 copies[f] = source
316 self.add(f)
316 self.add(f)
317 return copies
317 return copies
318
318
319 def setbranch(self, branch):
319 def setbranch(self, branch):
320 self._branch = encoding.fromlocal(branch)
320 self._branch = encoding.fromlocal(branch)
321 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
321 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
322 try:
322 try:
323 f.write(self._branch + '\n')
323 f.write(self._branch + '\n')
324 f.close()
324 f.close()
325
325
326 # make sure filecache has the correct stat info for _branch after
326 # make sure filecache has the correct stat info for _branch after
327 # replacing the underlying file
327 # replacing the underlying file
328 ce = self._filecache['_branch']
328 ce = self._filecache['_branch']
329 if ce:
329 if ce:
330 ce.refresh()
330 ce.refresh()
331 except: # re-raises
331 except: # re-raises
332 f.discard()
332 f.discard()
333 raise
333 raise
334
334
335 def invalidate(self):
335 def invalidate(self):
336 '''Causes the next access to reread the dirstate.
336 '''Causes the next access to reread the dirstate.
337
337
338 This is different from localrepo.invalidatedirstate() because it always
338 This is different from localrepo.invalidatedirstate() because it always
339 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
339 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
340 check whether the dirstate has changed before rereading it.'''
340 check whether the dirstate has changed before rereading it.'''
341
341
342 for a in (r"_map", r"_branch", r"_ignore"):
342 for a in (r"_map", r"_branch", r"_ignore"):
343 if a in self.__dict__:
343 if a in self.__dict__:
344 delattr(self, a)
344 delattr(self, a)
345 self._lastnormaltime = 0
345 self._lastnormaltime = 0
346 self._dirty = False
346 self._dirty = False
347 self._updatedfiles.clear()
347 self._updatedfiles.clear()
348 self._parentwriters = 0
348 self._parentwriters = 0
349 self._origpl = None
349 self._origpl = None
350
350
351 def copy(self, source, dest):
351 def copy(self, source, dest):
352 """Mark dest as a copy of source. Unmark dest if source is None."""
352 """Mark dest as a copy of source. Unmark dest if source is None."""
353 if source == dest:
353 if source == dest:
354 return
354 return
355 self._dirty = True
355 self._dirty = True
356 if source is not None:
356 if source is not None:
357 self._map.copymap[dest] = source
357 self._map.copymap[dest] = source
358 self._updatedfiles.add(source)
358 self._updatedfiles.add(source)
359 self._updatedfiles.add(dest)
359 self._updatedfiles.add(dest)
360 elif self._map.copymap.pop(dest, None):
360 elif self._map.copymap.pop(dest, None):
361 self._updatedfiles.add(dest)
361 self._updatedfiles.add(dest)
362
362
363 def copied(self, file):
363 def copied(self, file):
364 return self._map.copymap.get(file, None)
364 return self._map.copymap.get(file, None)
365
365
366 def copies(self):
366 def copies(self):
367 return self._map.copymap
367 return self._map.copymap
368
368
369 def _addpath(self, f, state, mode, size, mtime):
369 def _addpath(self, f, state, mode, size, mtime):
370 oldstate = self[f]
370 oldstate = self[f]
371 if state == 'a' or oldstate == 'r':
371 if state == 'a' or oldstate == 'r':
372 scmutil.checkfilename(f)
372 scmutil.checkfilename(f)
373 if self._map.hastrackeddir(f):
373 if self._map.hastrackeddir(f):
374 raise error.Abort(_('directory %r already in dirstate') %
374 raise error.Abort(_('directory %r already in dirstate') %
375 pycompat.bytestr(f))
375 pycompat.bytestr(f))
376 # shadows
376 # shadows
377 for d in util.finddirs(f):
377 for d in util.finddirs(f):
378 if self._map.hastrackeddir(d):
378 if self._map.hastrackeddir(d):
379 break
379 break
380 entry = self._map.get(d)
380 entry = self._map.get(d)
381 if entry is not None and entry[0] != 'r':
381 if entry is not None and entry[0] != 'r':
382 raise error.Abort(
382 raise error.Abort(
383 _('file %r in dirstate clashes with %r') %
383 _('file %r in dirstate clashes with %r') %
384 (pycompat.bytestr(d), pycompat.bytestr(f)))
384 (pycompat.bytestr(d), pycompat.bytestr(f)))
385 self._dirty = True
385 self._dirty = True
386 self._updatedfiles.add(f)
386 self._updatedfiles.add(f)
387 self._map.addfile(f, oldstate, state, mode, size, mtime)
387 self._map.addfile(f, oldstate, state, mode, size, mtime)
388
388
389 def normal(self, f):
389 def normal(self, f):
390 '''Mark a file normal and clean.'''
390 '''Mark a file normal and clean.'''
391 s = os.lstat(self._join(f))
391 s = os.lstat(self._join(f))
392 mtime = s[stat.ST_MTIME]
392 mtime = s[stat.ST_MTIME]
393 self._addpath(f, 'n', s.st_mode,
393 self._addpath(f, 'n', s.st_mode,
394 s.st_size & _rangemask, mtime & _rangemask)
394 s.st_size & _rangemask, mtime & _rangemask)
395 self._map.copymap.pop(f, None)
395 self._map.copymap.pop(f, None)
396 if f in self._map.nonnormalset:
396 if f in self._map.nonnormalset:
397 self._map.nonnormalset.remove(f)
397 self._map.nonnormalset.remove(f)
398 if mtime > self._lastnormaltime:
398 if mtime > self._lastnormaltime:
399 # Remember the most recent modification timeslot for status(),
399 # Remember the most recent modification timeslot for status(),
400 # to make sure we won't miss future size-preserving file content
400 # to make sure we won't miss future size-preserving file content
401 # modifications that happen within the same timeslot.
401 # modifications that happen within the same timeslot.
402 self._lastnormaltime = mtime
402 self._lastnormaltime = mtime
403
403
404 def normallookup(self, f):
404 def normallookup(self, f):
405 '''Mark a file normal, but possibly dirty.'''
405 '''Mark a file normal, but possibly dirty.'''
406 if self._pl[1] != nullid:
406 if self._pl[1] != nullid:
407 # if there is a merge going on and the file was either
407 # if there is a merge going on and the file was either
408 # in state 'm' (-1) or coming from other parent (-2) before
408 # in state 'm' (-1) or coming from other parent (-2) before
409 # being removed, restore that state.
409 # being removed, restore that state.
410 entry = self._map.get(f)
410 entry = self._map.get(f)
411 if entry is not None:
411 if entry is not None:
412 if entry[0] == 'r' and entry[2] in (-1, -2):
412 if entry[0] == 'r' and entry[2] in (-1, -2):
413 source = self._map.copymap.get(f)
413 source = self._map.copymap.get(f)
414 if entry[2] == -1:
414 if entry[2] == -1:
415 self.merge(f)
415 self.merge(f)
416 elif entry[2] == -2:
416 elif entry[2] == -2:
417 self.otherparent(f)
417 self.otherparent(f)
418 if source:
418 if source:
419 self.copy(source, f)
419 self.copy(source, f)
420 return
420 return
421 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
421 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
422 return
422 return
423 self._addpath(f, 'n', 0, -1, -1)
423 self._addpath(f, 'n', 0, -1, -1)
424 self._map.copymap.pop(f, None)
424 self._map.copymap.pop(f, None)
425
425
426 def otherparent(self, f):
426 def otherparent(self, f):
427 '''Mark as coming from the other parent, always dirty.'''
427 '''Mark as coming from the other parent, always dirty.'''
428 if self._pl[1] == nullid:
428 if self._pl[1] == nullid:
429 raise error.Abort(_("setting %r to other parent "
429 raise error.Abort(_("setting %r to other parent "
430 "only allowed in merges") % f)
430 "only allowed in merges") % f)
431 if f in self and self[f] == 'n':
431 if f in self and self[f] == 'n':
432 # merge-like
432 # merge-like
433 self._addpath(f, 'm', 0, -2, -1)
433 self._addpath(f, 'm', 0, -2, -1)
434 else:
434 else:
435 # add-like
435 # add-like
436 self._addpath(f, 'n', 0, -2, -1)
436 self._addpath(f, 'n', 0, -2, -1)
437 self._map.copymap.pop(f, None)
437 self._map.copymap.pop(f, None)
438
438
439 def add(self, f):
439 def add(self, f):
440 '''Mark a file added.'''
440 '''Mark a file added.'''
441 self._addpath(f, 'a', 0, -1, -1)
441 self._addpath(f, 'a', 0, -1, -1)
442 self._map.copymap.pop(f, None)
442 self._map.copymap.pop(f, None)
443
443
444 def remove(self, f):
444 def remove(self, f):
445 '''Mark a file removed.'''
445 '''Mark a file removed.'''
446 self._dirty = True
446 self._dirty = True
447 oldstate = self[f]
447 oldstate = self[f]
448 size = 0
448 size = 0
449 if self._pl[1] != nullid:
449 if self._pl[1] != nullid:
450 entry = self._map.get(f)
450 entry = self._map.get(f)
451 if entry is not None:
451 if entry is not None:
452 # backup the previous state
452 # backup the previous state
453 if entry[0] == 'm': # merge
453 if entry[0] == 'm': # merge
454 size = -1
454 size = -1
455 elif entry[0] == 'n' and entry[2] == -2: # other parent
455 elif entry[0] == 'n' and entry[2] == -2: # other parent
456 size = -2
456 size = -2
457 self._map.otherparentset.add(f)
457 self._map.otherparentset.add(f)
458 self._updatedfiles.add(f)
458 self._updatedfiles.add(f)
459 self._map.removefile(f, oldstate, size)
459 self._map.removefile(f, oldstate, size)
460 if size == 0:
460 if size == 0:
461 self._map.copymap.pop(f, None)
461 self._map.copymap.pop(f, None)
462
462
463 def merge(self, f):
463 def merge(self, f):
464 '''Mark a file merged.'''
464 '''Mark a file merged.'''
465 if self._pl[1] == nullid:
465 if self._pl[1] == nullid:
466 return self.normallookup(f)
466 return self.normallookup(f)
467 return self.otherparent(f)
467 return self.otherparent(f)
468
468
469 def drop(self, f):
469 def drop(self, f):
470 '''Drop a file from the dirstate'''
470 '''Drop a file from the dirstate'''
471 oldstate = self[f]
471 oldstate = self[f]
472 if self._map.dropfile(f, oldstate):
472 if self._map.dropfile(f, oldstate):
473 self._dirty = True
473 self._dirty = True
474 self._updatedfiles.add(f)
474 self._updatedfiles.add(f)
475 self._map.copymap.pop(f, None)
475 self._map.copymap.pop(f, None)
476
476
477 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
477 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
478 if exists is None:
478 if exists is None:
479 exists = os.path.lexists(os.path.join(self._root, path))
479 exists = os.path.lexists(os.path.join(self._root, path))
480 if not exists:
480 if not exists:
481 # Maybe a path component exists
481 # Maybe a path component exists
482 if not ignoremissing and '/' in path:
482 if not ignoremissing and '/' in path:
483 d, f = path.rsplit('/', 1)
483 d, f = path.rsplit('/', 1)
484 d = self._normalize(d, False, ignoremissing, None)
484 d = self._normalize(d, False, ignoremissing, None)
485 folded = d + "/" + f
485 folded = d + "/" + f
486 else:
486 else:
487 # No path components, preserve original case
487 # No path components, preserve original case
488 folded = path
488 folded = path
489 else:
489 else:
490 # recursively normalize leading directory components
490 # recursively normalize leading directory components
491 # against dirstate
491 # against dirstate
492 if '/' in normed:
492 if '/' in normed:
493 d, f = normed.rsplit('/', 1)
493 d, f = normed.rsplit('/', 1)
494 d = self._normalize(d, False, ignoremissing, True)
494 d = self._normalize(d, False, ignoremissing, True)
495 r = self._root + "/" + d
495 r = self._root + "/" + d
496 folded = d + "/" + util.fspath(f, r)
496 folded = d + "/" + util.fspath(f, r)
497 else:
497 else:
498 folded = util.fspath(normed, self._root)
498 folded = util.fspath(normed, self._root)
499 storemap[normed] = folded
499 storemap[normed] = folded
500
500
501 return folded
501 return folded
502
502
503 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
503 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
504 normed = util.normcase(path)
504 normed = util.normcase(path)
505 folded = self._map.filefoldmap.get(normed, None)
505 folded = self._map.filefoldmap.get(normed, None)
506 if folded is None:
506 if folded is None:
507 if isknown:
507 if isknown:
508 folded = path
508 folded = path
509 else:
509 else:
510 folded = self._discoverpath(path, normed, ignoremissing, exists,
510 folded = self._discoverpath(path, normed, ignoremissing, exists,
511 self._map.filefoldmap)
511 self._map.filefoldmap)
512 return folded
512 return folded
513
513
514 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
514 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
515 normed = util.normcase(path)
515 normed = util.normcase(path)
516 folded = self._map.filefoldmap.get(normed, None)
516 folded = self._map.filefoldmap.get(normed, None)
517 if folded is None:
517 if folded is None:
518 folded = self._map.dirfoldmap.get(normed, None)
518 folded = self._map.dirfoldmap.get(normed, None)
519 if folded is None:
519 if folded is None:
520 if isknown:
520 if isknown:
521 folded = path
521 folded = path
522 else:
522 else:
523 # store discovered result in dirfoldmap so that future
523 # store discovered result in dirfoldmap so that future
524 # normalizefile calls don't start matching directories
524 # normalizefile calls don't start matching directories
525 folded = self._discoverpath(path, normed, ignoremissing, exists,
525 folded = self._discoverpath(path, normed, ignoremissing, exists,
526 self._map.dirfoldmap)
526 self._map.dirfoldmap)
527 return folded
527 return folded
528
528
529 def normalize(self, path, isknown=False, ignoremissing=False):
529 def normalize(self, path, isknown=False, ignoremissing=False):
530 '''
530 '''
531 normalize the case of a pathname when on a casefolding filesystem
531 normalize the case of a pathname when on a casefolding filesystem
532
532
533 isknown specifies whether the filename came from walking the
533 isknown specifies whether the filename came from walking the
534 disk, to avoid extra filesystem access.
534 disk, to avoid extra filesystem access.
535
535
536 If ignoremissing is True, missing path are returned
536 If ignoremissing is True, missing path are returned
537 unchanged. Otherwise, we try harder to normalize possibly
537 unchanged. Otherwise, we try harder to normalize possibly
538 existing path components.
538 existing path components.
539
539
540 The normalized case is determined based on the following precedence:
540 The normalized case is determined based on the following precedence:
541
541
542 - version of name already stored in the dirstate
542 - version of name already stored in the dirstate
543 - version of name stored on disk
543 - version of name stored on disk
544 - version provided via command arguments
544 - version provided via command arguments
545 '''
545 '''
546
546
547 if self._checkcase:
547 if self._checkcase:
548 return self._normalize(path, isknown, ignoremissing)
548 return self._normalize(path, isknown, ignoremissing)
549 return path
549 return path
550
550
551 def clear(self):
551 def clear(self):
552 self._map.clear()
552 self._map.clear()
553 self._lastnormaltime = 0
553 self._lastnormaltime = 0
554 self._updatedfiles.clear()
554 self._updatedfiles.clear()
555 self._dirty = True
555 self._dirty = True
556
556
557 def rebuild(self, parent, allfiles, changedfiles=None):
557 def rebuild(self, parent, allfiles, changedfiles=None):
558 if changedfiles is None:
558 if changedfiles is None:
559 # Rebuild entire dirstate
559 # Rebuild entire dirstate
560 changedfiles = allfiles
560 changedfiles = allfiles
561 lastnormaltime = self._lastnormaltime
561 lastnormaltime = self._lastnormaltime
562 self.clear()
562 self.clear()
563 self._lastnormaltime = lastnormaltime
563 self._lastnormaltime = lastnormaltime
564
564
565 if self._origpl is None:
565 if self._origpl is None:
566 self._origpl = self._pl
566 self._origpl = self._pl
567 self._map.setparents(parent, nullid)
567 self._map.setparents(parent, nullid)
568 for f in changedfiles:
568 for f in changedfiles:
569 if f in allfiles:
569 if f in allfiles:
570 self.normallookup(f)
570 self.normallookup(f)
571 else:
571 else:
572 self.drop(f)
572 self.drop(f)
573
573
574 self._dirty = True
574 self._dirty = True
575
575
576 def identity(self):
576 def identity(self):
577 '''Return identity of dirstate itself to detect changing in storage
577 '''Return identity of dirstate itself to detect changing in storage
578
578
579 If identity of previous dirstate is equal to this, writing
579 If identity of previous dirstate is equal to this, writing
580 changes based on the former dirstate out can keep consistency.
580 changes based on the former dirstate out can keep consistency.
581 '''
581 '''
582 return self._map.identity
582 return self._map.identity
583
583
584 def write(self, tr):
584 def write(self, tr):
585 if not self._dirty:
585 if not self._dirty:
586 return
586 return
587
587
588 filename = self._filename
588 filename = self._filename
589 if tr:
589 if tr:
590 # 'dirstate.write()' is not only for writing in-memory
590 # 'dirstate.write()' is not only for writing in-memory
591 # changes out, but also for dropping ambiguous timestamp.
591 # changes out, but also for dropping ambiguous timestamp.
592 # delayed writing re-raise "ambiguous timestamp issue".
592 # delayed writing re-raise "ambiguous timestamp issue".
593 # See also the wiki page below for detail:
593 # See also the wiki page below for detail:
594 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
594 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
595
595
596 # emulate dropping timestamp in 'parsers.pack_dirstate'
596 # emulate dropping timestamp in 'parsers.pack_dirstate'
597 now = _getfsnow(self._opener)
597 now = _getfsnow(self._opener)
598 self._map.clearambiguoustimes(self._updatedfiles, now)
598 self._map.clearambiguoustimes(self._updatedfiles, now)
599
599
600 # emulate that all 'dirstate.normal' results are written out
600 # emulate that all 'dirstate.normal' results are written out
601 self._lastnormaltime = 0
601 self._lastnormaltime = 0
602 self._updatedfiles.clear()
602 self._updatedfiles.clear()
603
603
604 # delay writing in-memory changes out
604 # delay writing in-memory changes out
605 tr.addfilegenerator('dirstate', (self._filename,),
605 tr.addfilegenerator('dirstate', (self._filename,),
606 self._writedirstate, location='plain')
606 self._writedirstate, location='plain')
607 return
607 return
608
608
609 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
609 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
610 self._writedirstate(st)
610 self._writedirstate(st)
611
611
612 def addparentchangecallback(self, category, callback):
612 def addparentchangecallback(self, category, callback):
613 """add a callback to be called when the wd parents are changed
613 """add a callback to be called when the wd parents are changed
614
614
615 Callback will be called with the following arguments:
615 Callback will be called with the following arguments:
616 dirstate, (oldp1, oldp2), (newp1, newp2)
616 dirstate, (oldp1, oldp2), (newp1, newp2)
617
617
618 Category is a unique identifier to allow overwriting an old callback
618 Category is a unique identifier to allow overwriting an old callback
619 with a newer callback.
619 with a newer callback.
620 """
620 """
621 self._plchangecallbacks[category] = callback
621 self._plchangecallbacks[category] = callback
622
622
623 def _writedirstate(self, st):
623 def _writedirstate(self, st):
624 # notify callbacks about parents change
624 # notify callbacks about parents change
625 if self._origpl is not None and self._origpl != self._pl:
625 if self._origpl is not None and self._origpl != self._pl:
626 for c, callback in sorted(self._plchangecallbacks.iteritems()):
626 for c, callback in sorted(self._plchangecallbacks.iteritems()):
627 callback(self, self._origpl, self._pl)
627 callback(self, self._origpl, self._pl)
628 self._origpl = None
628 self._origpl = None
629 # use the modification time of the newly created temporary file as the
629 # use the modification time of the newly created temporary file as the
630 # filesystem's notion of 'now'
630 # filesystem's notion of 'now'
631 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
631 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
632
632
633 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
633 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
634 # timestamp of each entries in dirstate, because of 'now > mtime'
634 # timestamp of each entries in dirstate, because of 'now > mtime'
635 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
635 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
636 if delaywrite > 0:
636 if delaywrite > 0:
637 # do we have any files to delay for?
637 # do we have any files to delay for?
638 for f, e in self._map.iteritems():
638 for f, e in self._map.iteritems():
639 if e[0] == 'n' and e[3] == now:
639 if e[0] == 'n' and e[3] == now:
640 import time # to avoid useless import
640 import time # to avoid useless import
641 # rather than sleep n seconds, sleep until the next
641 # rather than sleep n seconds, sleep until the next
642 # multiple of n seconds
642 # multiple of n seconds
643 clock = time.time()
643 clock = time.time()
644 start = int(clock) - (int(clock) % delaywrite)
644 start = int(clock) - (int(clock) % delaywrite)
645 end = start + delaywrite
645 end = start + delaywrite
646 time.sleep(end - clock)
646 time.sleep(end - clock)
647 now = end # trust our estimate that the end is near now
647 now = end # trust our estimate that the end is near now
648 break
648 break
649
649
650 self._map.write(st, now)
650 self._map.write(st, now)
651 self._lastnormaltime = 0
651 self._lastnormaltime = 0
652 self._dirty = False
652 self._dirty = False
653
653
654 def _dirignore(self, f):
654 def _dirignore(self, f):
655 if f == '.':
655 if f == '.':
656 return False
656 return False
657 if self._ignore(f):
657 if self._ignore(f):
658 return True
658 return True
659 for p in util.finddirs(f):
659 for p in util.finddirs(f):
660 if self._ignore(p):
660 if self._ignore(p):
661 return True
661 return True
662 return False
662 return False
663
663
664 def _ignorefiles(self):
664 def _ignorefiles(self):
665 files = []
665 files = []
666 if os.path.exists(self._join('.hgignore')):
666 if os.path.exists(self._join('.hgignore')):
667 files.append(self._join('.hgignore'))
667 files.append(self._join('.hgignore'))
668 for name, path in self._ui.configitems("ui"):
668 for name, path in self._ui.configitems("ui"):
669 if name == 'ignore' or name.startswith('ignore.'):
669 if name == 'ignore' or name.startswith('ignore.'):
670 # we need to use os.path.join here rather than self._join
670 # we need to use os.path.join here rather than self._join
671 # because path is arbitrary and user-specified
671 # because path is arbitrary and user-specified
672 files.append(os.path.join(self._rootdir, util.expandpath(path)))
672 files.append(os.path.join(self._rootdir, util.expandpath(path)))
673 return files
673 return files
674
674
675 def _ignorefileandline(self, f):
675 def _ignorefileandline(self, f):
676 files = collections.deque(self._ignorefiles())
676 files = collections.deque(self._ignorefiles())
677 visited = set()
677 visited = set()
678 while files:
678 while files:
679 i = files.popleft()
679 i = files.popleft()
680 patterns = matchmod.readpatternfile(i, self._ui.warn,
680 patterns = matchmod.readpatternfile(i, self._ui.warn,
681 sourceinfo=True)
681 sourceinfo=True)
682 for pattern, lineno, line in patterns:
682 for pattern, lineno, line in patterns:
683 kind, p = matchmod._patsplit(pattern, 'glob')
683 kind, p = matchmod._patsplit(pattern, 'glob')
684 if kind == "subinclude":
684 if kind == "subinclude":
685 if p not in visited:
685 if p not in visited:
686 files.append(p)
686 files.append(p)
687 continue
687 continue
688 m = matchmod.match(self._root, '', [], [pattern],
688 m = matchmod.match(self._root, '', [], [pattern],
689 warn=self._ui.warn)
689 warn=self._ui.warn)
690 if m(f):
690 if m(f):
691 return (i, lineno, line)
691 return (i, lineno, line)
692 visited.add(i)
692 visited.add(i)
693 return (None, -1, "")
693 return (None, -1, "")
694
694
695 def _walkexplicit(self, match, subrepos):
695 def _walkexplicit(self, match, subrepos):
696 '''Get stat data about the files explicitly specified by match.
696 '''Get stat data about the files explicitly specified by match.
697
697
698 Return a triple (results, dirsfound, dirsnotfound).
698 Return a triple (results, dirsfound, dirsnotfound).
699 - results is a mapping from filename to stat result. It also contains
699 - results is a mapping from filename to stat result. It also contains
700 listings mapping subrepos and .hg to None.
700 listings mapping subrepos and .hg to None.
701 - dirsfound is a list of files found to be directories.
701 - dirsfound is a list of files found to be directories.
702 - dirsnotfound is a list of files that the dirstate thinks are
702 - dirsnotfound is a list of files that the dirstate thinks are
703 directories and that were not found.'''
703 directories and that were not found.'''
704
704
705 def badtype(mode):
705 def badtype(mode):
706 kind = _('unknown')
706 kind = _('unknown')
707 if stat.S_ISCHR(mode):
707 if stat.S_ISCHR(mode):
708 kind = _('character device')
708 kind = _('character device')
709 elif stat.S_ISBLK(mode):
709 elif stat.S_ISBLK(mode):
710 kind = _('block device')
710 kind = _('block device')
711 elif stat.S_ISFIFO(mode):
711 elif stat.S_ISFIFO(mode):
712 kind = _('fifo')
712 kind = _('fifo')
713 elif stat.S_ISSOCK(mode):
713 elif stat.S_ISSOCK(mode):
714 kind = _('socket')
714 kind = _('socket')
715 elif stat.S_ISDIR(mode):
715 elif stat.S_ISDIR(mode):
716 kind = _('directory')
716 kind = _('directory')
717 return _('unsupported file type (type is %s)') % kind
717 return _('unsupported file type (type is %s)') % kind
718
718
719 matchedir = match.explicitdir
719 matchedir = match.explicitdir
720 badfn = match.bad
720 badfn = match.bad
721 dmap = self._map
721 dmap = self._map
722 lstat = os.lstat
722 lstat = os.lstat
723 getkind = stat.S_IFMT
723 getkind = stat.S_IFMT
724 dirkind = stat.S_IFDIR
724 dirkind = stat.S_IFDIR
725 regkind = stat.S_IFREG
725 regkind = stat.S_IFREG
726 lnkkind = stat.S_IFLNK
726 lnkkind = stat.S_IFLNK
727 join = self._join
727 join = self._join
728 dirsfound = []
728 dirsfound = []
729 foundadd = dirsfound.append
729 foundadd = dirsfound.append
730 dirsnotfound = []
730 dirsnotfound = []
731 notfoundadd = dirsnotfound.append
731 notfoundadd = dirsnotfound.append
732
732
733 if not match.isexact() and self._checkcase:
733 if not match.isexact() and self._checkcase:
734 normalize = self._normalize
734 normalize = self._normalize
735 else:
735 else:
736 normalize = None
736 normalize = None
737
737
738 files = sorted(match.files())
738 files = sorted(match.files())
739 subrepos.sort()
739 subrepos.sort()
740 i, j = 0, 0
740 i, j = 0, 0
741 while i < len(files) and j < len(subrepos):
741 while i < len(files) and j < len(subrepos):
742 subpath = subrepos[j] + "/"
742 subpath = subrepos[j] + "/"
743 if files[i] < subpath:
743 if files[i] < subpath:
744 i += 1
744 i += 1
745 continue
745 continue
746 while i < len(files) and files[i].startswith(subpath):
746 while i < len(files) and files[i].startswith(subpath):
747 del files[i]
747 del files[i]
748 j += 1
748 j += 1
749
749
750 if not files or '.' in files:
750 if not files or '.' in files:
751 files = ['.']
751 files = ['.']
752 results = dict.fromkeys(subrepos)
752 results = dict.fromkeys(subrepos)
753 results['.hg'] = None
753 results['.hg'] = None
754
754
755 for ff in files:
755 for ff in files:
756 # constructing the foldmap is expensive, so don't do it for the
756 # constructing the foldmap is expensive, so don't do it for the
757 # common case where files is ['.']
757 # common case where files is ['.']
758 if normalize and ff != '.':
758 if normalize and ff != '.':
759 nf = normalize(ff, False, True)
759 nf = normalize(ff, False, True)
760 else:
760 else:
761 nf = ff
761 nf = ff
762 if nf in results:
762 if nf in results:
763 continue
763 continue
764
764
765 try:
765 try:
766 st = lstat(join(nf))
766 st = lstat(join(nf))
767 kind = getkind(st.st_mode)
767 kind = getkind(st.st_mode)
768 if kind == dirkind:
768 if kind == dirkind:
769 if nf in dmap:
769 if nf in dmap:
770 # file replaced by dir on disk but still in dirstate
770 # file replaced by dir on disk but still in dirstate
771 results[nf] = None
771 results[nf] = None
772 if matchedir:
772 if matchedir:
773 matchedir(nf)
773 matchedir(nf)
774 foundadd((nf, ff))
774 foundadd((nf, ff))
775 elif kind == regkind or kind == lnkkind:
775 elif kind == regkind or kind == lnkkind:
776 results[nf] = st
776 results[nf] = st
777 else:
777 else:
778 badfn(ff, badtype(kind))
778 badfn(ff, badtype(kind))
779 if nf in dmap:
779 if nf in dmap:
780 results[nf] = None
780 results[nf] = None
781 except OSError as inst: # nf not found on disk - it is dirstate only
781 except OSError as inst: # nf not found on disk - it is dirstate only
782 if nf in dmap: # does it exactly match a missing file?
782 if nf in dmap: # does it exactly match a missing file?
783 results[nf] = None
783 results[nf] = None
784 else: # does it match a missing directory?
784 else: # does it match a missing directory?
785 if self._map.hasdir(nf):
785 if self._map.hasdir(nf):
786 if matchedir:
786 if matchedir:
787 matchedir(nf)
787 matchedir(nf)
788 notfoundadd(nf)
788 notfoundadd(nf)
789 else:
789 else:
790 badfn(ff, encoding.strtolocal(inst.strerror))
790 badfn(ff, encoding.strtolocal(inst.strerror))
791
791
792 # match.files() may contain explicitly-specified paths that shouldn't
792 # match.files() may contain explicitly-specified paths that shouldn't
793 # be taken; drop them from the list of files found. dirsfound/notfound
793 # be taken; drop them from the list of files found. dirsfound/notfound
794 # aren't filtered here because they will be tested later.
794 # aren't filtered here because they will be tested later.
795 if match.anypats():
795 if match.anypats():
796 for f in list(results):
796 for f in list(results):
797 if f == '.hg' or f in subrepos:
797 if f == '.hg' or f in subrepos:
798 # keep sentinel to disable further out-of-repo walks
798 # keep sentinel to disable further out-of-repo walks
799 continue
799 continue
800 if not match(f):
800 if not match(f):
801 del results[f]
801 del results[f]
802
802
803 # Case insensitive filesystems cannot rely on lstat() failing to detect
803 # Case insensitive filesystems cannot rely on lstat() failing to detect
804 # a case-only rename. Prune the stat object for any file that does not
804 # a case-only rename. Prune the stat object for any file that does not
805 # match the case in the filesystem, if there are multiple files that
805 # match the case in the filesystem, if there are multiple files that
806 # normalize to the same path.
806 # normalize to the same path.
807 if match.isexact() and self._checkcase:
807 if match.isexact() and self._checkcase:
808 normed = {}
808 normed = {}
809
809
810 for f, st in results.iteritems():
810 for f, st in results.iteritems():
811 if st is None:
811 if st is None:
812 continue
812 continue
813
813
814 nc = util.normcase(f)
814 nc = util.normcase(f)
815 paths = normed.get(nc)
815 paths = normed.get(nc)
816
816
817 if paths is None:
817 if paths is None:
818 paths = set()
818 paths = set()
819 normed[nc] = paths
819 normed[nc] = paths
820
820
821 paths.add(f)
821 paths.add(f)
822
822
823 for norm, paths in normed.iteritems():
823 for norm, paths in normed.iteritems():
824 if len(paths) > 1:
824 if len(paths) > 1:
825 for path in paths:
825 for path in paths:
826 folded = self._discoverpath(path, norm, True, None,
826 folded = self._discoverpath(path, norm, True, None,
827 self._map.dirfoldmap)
827 self._map.dirfoldmap)
828 if path != folded:
828 if path != folded:
829 results[path] = None
829 results[path] = None
830
830
831 return results, dirsfound, dirsnotfound
831 return results, dirsfound, dirsnotfound
832
832
833 def walk(self, match, subrepos, unknown, ignored, full=True):
833 def walk(self, match, subrepos, unknown, ignored, full=True):
834 '''
834 '''
835 Walk recursively through the directory tree, finding all files
835 Walk recursively through the directory tree, finding all files
836 matched by match.
836 matched by match.
837
837
838 If full is False, maybe skip some known-clean files.
838 If full is False, maybe skip some known-clean files.
839
839
840 Return a dict mapping filename to stat-like object (either
840 Return a dict mapping filename to stat-like object (either
841 mercurial.osutil.stat instance or return value of os.stat()).
841 mercurial.osutil.stat instance or return value of os.stat()).
842
842
843 '''
843 '''
844 # full is a flag that extensions that hook into walk can use -- this
844 # full is a flag that extensions that hook into walk can use -- this
845 # implementation doesn't use it at all. This satisfies the contract
845 # implementation doesn't use it at all. This satisfies the contract
846 # because we only guarantee a "maybe".
846 # because we only guarantee a "maybe".
847
847
848 if ignored:
848 if ignored:
849 ignore = util.never
849 ignore = util.never
850 dirignore = util.never
850 dirignore = util.never
851 elif unknown:
851 elif unknown:
852 ignore = self._ignore
852 ignore = self._ignore
853 dirignore = self._dirignore
853 dirignore = self._dirignore
854 else:
854 else:
855 # if not unknown and not ignored, drop dir recursion and step 2
855 # if not unknown and not ignored, drop dir recursion and step 2
856 ignore = util.always
856 ignore = util.always
857 dirignore = util.always
857 dirignore = util.always
858
858
859 matchfn = match.matchfn
859 matchfn = match.matchfn
860 matchalways = match.always()
860 matchalways = match.always()
861 matchtdir = match.traversedir
861 matchtdir = match.traversedir
862 dmap = self._map
862 dmap = self._map
863 listdir = util.listdir
863 listdir = util.listdir
864 lstat = os.lstat
864 lstat = os.lstat
865 dirkind = stat.S_IFDIR
865 dirkind = stat.S_IFDIR
866 regkind = stat.S_IFREG
866 regkind = stat.S_IFREG
867 lnkkind = stat.S_IFLNK
867 lnkkind = stat.S_IFLNK
868 join = self._join
868 join = self._join
869
869
870 exact = skipstep3 = False
870 exact = skipstep3 = False
871 if match.isexact(): # match.exact
871 if match.isexact(): # match.exact
872 exact = True
872 exact = True
873 dirignore = util.always # skip step 2
873 dirignore = util.always # skip step 2
874 elif match.prefix(): # match.match, no patterns
874 elif match.prefix(): # match.match, no patterns
875 skipstep3 = True
875 skipstep3 = True
876
876
877 if not exact and self._checkcase:
877 if not exact and self._checkcase:
878 normalize = self._normalize
878 normalize = self._normalize
879 normalizefile = self._normalizefile
879 normalizefile = self._normalizefile
880 skipstep3 = False
880 skipstep3 = False
881 else:
881 else:
882 normalize = self._normalize
882 normalize = self._normalize
883 normalizefile = None
883 normalizefile = None
884
884
885 # step 1: find all explicit files
885 # step 1: find all explicit files
886 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
886 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
887
887
888 skipstep3 = skipstep3 and not (work or dirsnotfound)
888 skipstep3 = skipstep3 and not (work or dirsnotfound)
889 work = [d for d in work if not dirignore(d[0])]
889 work = [d for d in work if not dirignore(d[0])]
890
890
891 # step 2: visit subdirectories
891 # step 2: visit subdirectories
892 def traverse(work, alreadynormed):
892 def traverse(work, alreadynormed):
893 wadd = work.append
893 wadd = work.append
894 while work:
894 while work:
895 nd = work.pop()
895 nd = work.pop()
896 visitentries = match.visitchildrenset(nd)
896 visitentries = match.visitchildrenset(nd)
897 if not visitentries:
897 if not visitentries:
898 continue
898 continue
899 if visitentries == 'this' or visitentries == 'all':
899 if visitentries == 'this' or visitentries == 'all':
900 visitentries = None
900 visitentries = None
901 skip = None
901 skip = None
902 if nd == '.':
902 if nd == '.':
903 nd = ''
903 nd = ''
904 else:
904 else:
905 skip = '.hg'
905 skip = '.hg'
906 try:
906 try:
907 entries = listdir(join(nd), stat=True, skip=skip)
907 entries = listdir(join(nd), stat=True, skip=skip)
908 except OSError as inst:
908 except OSError as inst:
909 if inst.errno in (errno.EACCES, errno.ENOENT):
909 if inst.errno in (errno.EACCES, errno.ENOENT):
910 match.bad(self.pathto(nd),
910 match.bad(self.pathto(nd),
911 encoding.strtolocal(inst.strerror))
911 encoding.strtolocal(inst.strerror))
912 continue
912 continue
913 raise
913 raise
914 for f, kind, st in entries:
914 for f, kind, st in entries:
915 # If we needed to inspect any files, visitentries would have
916 # been 'this' or 'all', and we would have set it to None
917 # above. If we have visitentries populated here, we don't
918 # care about any files in this directory, so no need to
919 # check the type of `f`.
915 if visitentries and f not in visitentries:
920 if visitentries and f not in visitentries:
916 continue
921 continue
917 if normalizefile:
922 if normalizefile:
918 # even though f might be a directory, we're only
923 # even though f might be a directory, we're only
919 # interested in comparing it to files currently in the
924 # interested in comparing it to files currently in the
920 # dmap -- therefore normalizefile is enough
925 # dmap -- therefore normalizefile is enough
921 nf = normalizefile(nd and (nd + "/" + f) or f, True,
926 nf = normalizefile(nd and (nd + "/" + f) or f, True,
922 True)
927 True)
923 else:
928 else:
924 nf = nd and (nd + "/" + f) or f
929 nf = nd and (nd + "/" + f) or f
925 if nf not in results:
930 if nf not in results:
926 if kind == dirkind:
931 if kind == dirkind:
927 if not ignore(nf):
932 if not ignore(nf):
928 if matchtdir:
933 if matchtdir:
929 matchtdir(nf)
934 matchtdir(nf)
930 wadd(nf)
935 wadd(nf)
931 if nf in dmap and (matchalways or matchfn(nf)):
936 if nf in dmap and (matchalways or matchfn(nf)):
932 results[nf] = None
937 results[nf] = None
933 elif kind == regkind or kind == lnkkind:
938 elif kind == regkind or kind == lnkkind:
934 if nf in dmap:
939 if nf in dmap:
935 if matchalways or matchfn(nf):
940 if matchalways or matchfn(nf):
936 results[nf] = st
941 results[nf] = st
937 elif ((matchalways or matchfn(nf))
942 elif ((matchalways or matchfn(nf))
938 and not ignore(nf)):
943 and not ignore(nf)):
939 # unknown file -- normalize if necessary
944 # unknown file -- normalize if necessary
940 if not alreadynormed:
945 if not alreadynormed:
941 nf = normalize(nf, False, True)
946 nf = normalize(nf, False, True)
942 results[nf] = st
947 results[nf] = st
943 elif nf in dmap and (matchalways or matchfn(nf)):
948 elif nf in dmap and (matchalways or matchfn(nf)):
944 results[nf] = None
949 results[nf] = None
945
950
946 for nd, d in work:
951 for nd, d in work:
947 # alreadynormed means that processwork doesn't have to do any
952 # alreadynormed means that processwork doesn't have to do any
948 # expensive directory normalization
953 # expensive directory normalization
949 alreadynormed = not normalize or nd == d
954 alreadynormed = not normalize or nd == d
950 traverse([d], alreadynormed)
955 traverse([d], alreadynormed)
951
956
952 for s in subrepos:
957 for s in subrepos:
953 del results[s]
958 del results[s]
954 del results['.hg']
959 del results['.hg']
955
960
956 # step 3: visit remaining files from dmap
961 # step 3: visit remaining files from dmap
957 if not skipstep3 and not exact:
962 if not skipstep3 and not exact:
958 # If a dmap file is not in results yet, it was either
963 # If a dmap file is not in results yet, it was either
959 # a) not matching matchfn b) ignored, c) missing, or d) under a
964 # a) not matching matchfn b) ignored, c) missing, or d) under a
960 # symlink directory.
965 # symlink directory.
961 if not results and matchalways:
966 if not results and matchalways:
962 visit = [f for f in dmap]
967 visit = [f for f in dmap]
963 else:
968 else:
964 visit = [f for f in dmap if f not in results and matchfn(f)]
969 visit = [f for f in dmap if f not in results and matchfn(f)]
965 visit.sort()
970 visit.sort()
966
971
967 if unknown:
972 if unknown:
968 # unknown == True means we walked all dirs under the roots
973 # unknown == True means we walked all dirs under the roots
969 # that wasn't ignored, and everything that matched was stat'ed
974 # that wasn't ignored, and everything that matched was stat'ed
970 # and is already in results.
975 # and is already in results.
971 # The rest must thus be ignored or under a symlink.
976 # The rest must thus be ignored or under a symlink.
972 audit_path = pathutil.pathauditor(self._root, cached=True)
977 audit_path = pathutil.pathauditor(self._root, cached=True)
973
978
974 for nf in iter(visit):
979 for nf in iter(visit):
975 # If a stat for the same file was already added with a
980 # If a stat for the same file was already added with a
976 # different case, don't add one for this, since that would
981 # different case, don't add one for this, since that would
977 # make it appear as if the file exists under both names
982 # make it appear as if the file exists under both names
978 # on disk.
983 # on disk.
979 if (normalizefile and
984 if (normalizefile and
980 normalizefile(nf, True, True) in results):
985 normalizefile(nf, True, True) in results):
981 results[nf] = None
986 results[nf] = None
982 # Report ignored items in the dmap as long as they are not
987 # Report ignored items in the dmap as long as they are not
983 # under a symlink directory.
988 # under a symlink directory.
984 elif audit_path.check(nf):
989 elif audit_path.check(nf):
985 try:
990 try:
986 results[nf] = lstat(join(nf))
991 results[nf] = lstat(join(nf))
987 # file was just ignored, no links, and exists
992 # file was just ignored, no links, and exists
988 except OSError:
993 except OSError:
989 # file doesn't exist
994 # file doesn't exist
990 results[nf] = None
995 results[nf] = None
991 else:
996 else:
992 # It's either missing or under a symlink directory
997 # It's either missing or under a symlink directory
993 # which we in this case report as missing
998 # which we in this case report as missing
994 results[nf] = None
999 results[nf] = None
995 else:
1000 else:
996 # We may not have walked the full directory tree above,
1001 # We may not have walked the full directory tree above,
997 # so stat and check everything we missed.
1002 # so stat and check everything we missed.
998 iv = iter(visit)
1003 iv = iter(visit)
999 for st in util.statfiles([join(i) for i in visit]):
1004 for st in util.statfiles([join(i) for i in visit]):
1000 results[next(iv)] = st
1005 results[next(iv)] = st
1001 return results
1006 return results
1002
1007
1003 def status(self, match, subrepos, ignored, clean, unknown):
1008 def status(self, match, subrepos, ignored, clean, unknown):
1004 '''Determine the status of the working copy relative to the
1009 '''Determine the status of the working copy relative to the
1005 dirstate and return a pair of (unsure, status), where status is of type
1010 dirstate and return a pair of (unsure, status), where status is of type
1006 scmutil.status and:
1011 scmutil.status and:
1007
1012
1008 unsure:
1013 unsure:
1009 files that might have been modified since the dirstate was
1014 files that might have been modified since the dirstate was
1010 written, but need to be read to be sure (size is the same
1015 written, but need to be read to be sure (size is the same
1011 but mtime differs)
1016 but mtime differs)
1012 status.modified:
1017 status.modified:
1013 files that have definitely been modified since the dirstate
1018 files that have definitely been modified since the dirstate
1014 was written (different size or mode)
1019 was written (different size or mode)
1015 status.clean:
1020 status.clean:
1016 files that have definitely not been modified since the
1021 files that have definitely not been modified since the
1017 dirstate was written
1022 dirstate was written
1018 '''
1023 '''
1019 listignored, listclean, listunknown = ignored, clean, unknown
1024 listignored, listclean, listunknown = ignored, clean, unknown
1020 lookup, modified, added, unknown, ignored = [], [], [], [], []
1025 lookup, modified, added, unknown, ignored = [], [], [], [], []
1021 removed, deleted, clean = [], [], []
1026 removed, deleted, clean = [], [], []
1022
1027
1023 dmap = self._map
1028 dmap = self._map
1024 dmap.preload()
1029 dmap.preload()
1025 dcontains = dmap.__contains__
1030 dcontains = dmap.__contains__
1026 dget = dmap.__getitem__
1031 dget = dmap.__getitem__
1027 ladd = lookup.append # aka "unsure"
1032 ladd = lookup.append # aka "unsure"
1028 madd = modified.append
1033 madd = modified.append
1029 aadd = added.append
1034 aadd = added.append
1030 uadd = unknown.append
1035 uadd = unknown.append
1031 iadd = ignored.append
1036 iadd = ignored.append
1032 radd = removed.append
1037 radd = removed.append
1033 dadd = deleted.append
1038 dadd = deleted.append
1034 cadd = clean.append
1039 cadd = clean.append
1035 mexact = match.exact
1040 mexact = match.exact
1036 dirignore = self._dirignore
1041 dirignore = self._dirignore
1037 checkexec = self._checkexec
1042 checkexec = self._checkexec
1038 copymap = self._map.copymap
1043 copymap = self._map.copymap
1039 lastnormaltime = self._lastnormaltime
1044 lastnormaltime = self._lastnormaltime
1040
1045
1041 # We need to do full walks when either
1046 # We need to do full walks when either
1042 # - we're listing all clean files, or
1047 # - we're listing all clean files, or
1043 # - match.traversedir does something, because match.traversedir should
1048 # - match.traversedir does something, because match.traversedir should
1044 # be called for every dir in the working dir
1049 # be called for every dir in the working dir
1045 full = listclean or match.traversedir is not None
1050 full = listclean or match.traversedir is not None
1046 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1051 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1047 full=full).iteritems():
1052 full=full).iteritems():
1048 if not dcontains(fn):
1053 if not dcontains(fn):
1049 if (listignored or mexact(fn)) and dirignore(fn):
1054 if (listignored or mexact(fn)) and dirignore(fn):
1050 if listignored:
1055 if listignored:
1051 iadd(fn)
1056 iadd(fn)
1052 else:
1057 else:
1053 uadd(fn)
1058 uadd(fn)
1054 continue
1059 continue
1055
1060
1056 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1061 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1057 # written like that for performance reasons. dmap[fn] is not a
1062 # written like that for performance reasons. dmap[fn] is not a
1058 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1063 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1059 # opcode has fast paths when the value to be unpacked is a tuple or
1064 # opcode has fast paths when the value to be unpacked is a tuple or
1060 # a list, but falls back to creating a full-fledged iterator in
1065 # a list, but falls back to creating a full-fledged iterator in
1061 # general. That is much slower than simply accessing and storing the
1066 # general. That is much slower than simply accessing and storing the
1062 # tuple members one by one.
1067 # tuple members one by one.
1063 t = dget(fn)
1068 t = dget(fn)
1064 state = t[0]
1069 state = t[0]
1065 mode = t[1]
1070 mode = t[1]
1066 size = t[2]
1071 size = t[2]
1067 time = t[3]
1072 time = t[3]
1068
1073
1069 if not st and state in "nma":
1074 if not st and state in "nma":
1070 dadd(fn)
1075 dadd(fn)
1071 elif state == 'n':
1076 elif state == 'n':
1072 if (size >= 0 and
1077 if (size >= 0 and
1073 ((size != st.st_size and size != st.st_size & _rangemask)
1078 ((size != st.st_size and size != st.st_size & _rangemask)
1074 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1079 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1075 or size == -2 # other parent
1080 or size == -2 # other parent
1076 or fn in copymap):
1081 or fn in copymap):
1077 madd(fn)
1082 madd(fn)
1078 elif (time != st[stat.ST_MTIME]
1083 elif (time != st[stat.ST_MTIME]
1079 and time != st[stat.ST_MTIME] & _rangemask):
1084 and time != st[stat.ST_MTIME] & _rangemask):
1080 ladd(fn)
1085 ladd(fn)
1081 elif st[stat.ST_MTIME] == lastnormaltime:
1086 elif st[stat.ST_MTIME] == lastnormaltime:
1082 # fn may have just been marked as normal and it may have
1087 # fn may have just been marked as normal and it may have
1083 # changed in the same second without changing its size.
1088 # changed in the same second without changing its size.
1084 # This can happen if we quickly do multiple commits.
1089 # This can happen if we quickly do multiple commits.
1085 # Force lookup, so we don't miss such a racy file change.
1090 # Force lookup, so we don't miss such a racy file change.
1086 ladd(fn)
1091 ladd(fn)
1087 elif listclean:
1092 elif listclean:
1088 cadd(fn)
1093 cadd(fn)
1089 elif state == 'm':
1094 elif state == 'm':
1090 madd(fn)
1095 madd(fn)
1091 elif state == 'a':
1096 elif state == 'a':
1092 aadd(fn)
1097 aadd(fn)
1093 elif state == 'r':
1098 elif state == 'r':
1094 radd(fn)
1099 radd(fn)
1095
1100
1096 return (lookup, scmutil.status(modified, added, removed, deleted,
1101 return (lookup, scmutil.status(modified, added, removed, deleted,
1097 unknown, ignored, clean))
1102 unknown, ignored, clean))
1098
1103
1099 def matches(self, match):
1104 def matches(self, match):
1100 '''
1105 '''
1101 return files in the dirstate (in whatever state) filtered by match
1106 return files in the dirstate (in whatever state) filtered by match
1102 '''
1107 '''
1103 dmap = self._map
1108 dmap = self._map
1104 if match.always():
1109 if match.always():
1105 return dmap.keys()
1110 return dmap.keys()
1106 files = match.files()
1111 files = match.files()
1107 if match.isexact():
1112 if match.isexact():
1108 # fast path -- filter the other way around, since typically files is
1113 # fast path -- filter the other way around, since typically files is
1109 # much smaller than dmap
1114 # much smaller than dmap
1110 return [f for f in files if f in dmap]
1115 return [f for f in files if f in dmap]
1111 if match.prefix() and all(fn in dmap for fn in files):
1116 if match.prefix() and all(fn in dmap for fn in files):
1112 # fast path -- all the values are known to be files, so just return
1117 # fast path -- all the values are known to be files, so just return
1113 # that
1118 # that
1114 return list(files)
1119 return list(files)
1115 return [f for f in dmap if match(f)]
1120 return [f for f in dmap if match(f)]
1116
1121
1117 def _actualfilename(self, tr):
1122 def _actualfilename(self, tr):
1118 if tr:
1123 if tr:
1119 return self._pendingfilename
1124 return self._pendingfilename
1120 else:
1125 else:
1121 return self._filename
1126 return self._filename
1122
1127
1123 def savebackup(self, tr, backupname):
1128 def savebackup(self, tr, backupname):
1124 '''Save current dirstate into backup file'''
1129 '''Save current dirstate into backup file'''
1125 filename = self._actualfilename(tr)
1130 filename = self._actualfilename(tr)
1126 assert backupname != filename
1131 assert backupname != filename
1127
1132
1128 # use '_writedirstate' instead of 'write' to write changes certainly,
1133 # use '_writedirstate' instead of 'write' to write changes certainly,
1129 # because the latter omits writing out if transaction is running.
1134 # because the latter omits writing out if transaction is running.
1130 # output file will be used to create backup of dirstate at this point.
1135 # output file will be used to create backup of dirstate at this point.
1131 if self._dirty or not self._opener.exists(filename):
1136 if self._dirty or not self._opener.exists(filename):
1132 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1137 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1133 checkambig=True))
1138 checkambig=True))
1134
1139
1135 if tr:
1140 if tr:
1136 # ensure that subsequent tr.writepending returns True for
1141 # ensure that subsequent tr.writepending returns True for
1137 # changes written out above, even if dirstate is never
1142 # changes written out above, even if dirstate is never
1138 # changed after this
1143 # changed after this
1139 tr.addfilegenerator('dirstate', (self._filename,),
1144 tr.addfilegenerator('dirstate', (self._filename,),
1140 self._writedirstate, location='plain')
1145 self._writedirstate, location='plain')
1141
1146
1142 # ensure that pending file written above is unlinked at
1147 # ensure that pending file written above is unlinked at
1143 # failure, even if tr.writepending isn't invoked until the
1148 # failure, even if tr.writepending isn't invoked until the
1144 # end of this transaction
1149 # end of this transaction
1145 tr.registertmp(filename, location='plain')
1150 tr.registertmp(filename, location='plain')
1146
1151
1147 self._opener.tryunlink(backupname)
1152 self._opener.tryunlink(backupname)
1148 # hardlink backup is okay because _writedirstate is always called
1153 # hardlink backup is okay because _writedirstate is always called
1149 # with an "atomictemp=True" file.
1154 # with an "atomictemp=True" file.
1150 util.copyfile(self._opener.join(filename),
1155 util.copyfile(self._opener.join(filename),
1151 self._opener.join(backupname), hardlink=True)
1156 self._opener.join(backupname), hardlink=True)
1152
1157
1153 def restorebackup(self, tr, backupname):
1158 def restorebackup(self, tr, backupname):
1154 '''Restore dirstate by backup file'''
1159 '''Restore dirstate by backup file'''
1155 # this "invalidate()" prevents "wlock.release()" from writing
1160 # this "invalidate()" prevents "wlock.release()" from writing
1156 # changes of dirstate out after restoring from backup file
1161 # changes of dirstate out after restoring from backup file
1157 self.invalidate()
1162 self.invalidate()
1158 filename = self._actualfilename(tr)
1163 filename = self._actualfilename(tr)
1159 o = self._opener
1164 o = self._opener
1160 if util.samefile(o.join(backupname), o.join(filename)):
1165 if util.samefile(o.join(backupname), o.join(filename)):
1161 o.unlink(backupname)
1166 o.unlink(backupname)
1162 else:
1167 else:
1163 o.rename(backupname, filename, checkambig=True)
1168 o.rename(backupname, filename, checkambig=True)
1164
1169
1165 def clearbackup(self, tr, backupname):
1170 def clearbackup(self, tr, backupname):
1166 '''Clear backup file'''
1171 '''Clear backup file'''
1167 self._opener.unlink(backupname)
1172 self._opener.unlink(backupname)
1168
1173
1169 class dirstatemap(object):
1174 class dirstatemap(object):
1170 """Map encapsulating the dirstate's contents.
1175 """Map encapsulating the dirstate's contents.
1171
1176
1172 The dirstate contains the following state:
1177 The dirstate contains the following state:
1173
1178
1174 - `identity` is the identity of the dirstate file, which can be used to
1179 - `identity` is the identity of the dirstate file, which can be used to
1175 detect when changes have occurred to the dirstate file.
1180 detect when changes have occurred to the dirstate file.
1176
1181
1177 - `parents` is a pair containing the parents of the working copy. The
1182 - `parents` is a pair containing the parents of the working copy. The
1178 parents are updated by calling `setparents`.
1183 parents are updated by calling `setparents`.
1179
1184
1180 - the state map maps filenames to tuples of (state, mode, size, mtime),
1185 - the state map maps filenames to tuples of (state, mode, size, mtime),
1181 where state is a single character representing 'normal', 'added',
1186 where state is a single character representing 'normal', 'added',
1182 'removed', or 'merged'. It is read by treating the dirstate as a
1187 'removed', or 'merged'. It is read by treating the dirstate as a
1183 dict. File state is updated by calling the `addfile`, `removefile` and
1188 dict. File state is updated by calling the `addfile`, `removefile` and
1184 `dropfile` methods.
1189 `dropfile` methods.
1185
1190
1186 - `copymap` maps destination filenames to their source filename.
1191 - `copymap` maps destination filenames to their source filename.
1187
1192
1188 The dirstate also provides the following views onto the state:
1193 The dirstate also provides the following views onto the state:
1189
1194
1190 - `nonnormalset` is a set of the filenames that have state other
1195 - `nonnormalset` is a set of the filenames that have state other
1191 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1196 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1192
1197
1193 - `otherparentset` is a set of the filenames that are marked as coming
1198 - `otherparentset` is a set of the filenames that are marked as coming
1194 from the second parent when the dirstate is currently being merged.
1199 from the second parent when the dirstate is currently being merged.
1195
1200
1196 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1201 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1197 form that they appear as in the dirstate.
1202 form that they appear as in the dirstate.
1198
1203
1199 - `dirfoldmap` is a dict mapping normalized directory names to the
1204 - `dirfoldmap` is a dict mapping normalized directory names to the
1200 denormalized form that they appear as in the dirstate.
1205 denormalized form that they appear as in the dirstate.
1201 """
1206 """
1202
1207
1203 def __init__(self, ui, opener, root):
1208 def __init__(self, ui, opener, root):
1204 self._ui = ui
1209 self._ui = ui
1205 self._opener = opener
1210 self._opener = opener
1206 self._root = root
1211 self._root = root
1207 self._filename = 'dirstate'
1212 self._filename = 'dirstate'
1208
1213
1209 self._parents = None
1214 self._parents = None
1210 self._dirtyparents = False
1215 self._dirtyparents = False
1211
1216
1212 # for consistent view between _pl() and _read() invocations
1217 # for consistent view between _pl() and _read() invocations
1213 self._pendingmode = None
1218 self._pendingmode = None
1214
1219
1215 @propertycache
1220 @propertycache
1216 def _map(self):
1221 def _map(self):
1217 self._map = {}
1222 self._map = {}
1218 self.read()
1223 self.read()
1219 return self._map
1224 return self._map
1220
1225
1221 @propertycache
1226 @propertycache
1222 def copymap(self):
1227 def copymap(self):
1223 self.copymap = {}
1228 self.copymap = {}
1224 self._map
1229 self._map
1225 return self.copymap
1230 return self.copymap
1226
1231
1227 def clear(self):
1232 def clear(self):
1228 self._map.clear()
1233 self._map.clear()
1229 self.copymap.clear()
1234 self.copymap.clear()
1230 self.setparents(nullid, nullid)
1235 self.setparents(nullid, nullid)
1231 util.clearcachedproperty(self, "_dirs")
1236 util.clearcachedproperty(self, "_dirs")
1232 util.clearcachedproperty(self, "_alldirs")
1237 util.clearcachedproperty(self, "_alldirs")
1233 util.clearcachedproperty(self, "filefoldmap")
1238 util.clearcachedproperty(self, "filefoldmap")
1234 util.clearcachedproperty(self, "dirfoldmap")
1239 util.clearcachedproperty(self, "dirfoldmap")
1235 util.clearcachedproperty(self, "nonnormalset")
1240 util.clearcachedproperty(self, "nonnormalset")
1236 util.clearcachedproperty(self, "otherparentset")
1241 util.clearcachedproperty(self, "otherparentset")
1237
1242
1238 def items(self):
1243 def items(self):
1239 return self._map.iteritems()
1244 return self._map.iteritems()
1240
1245
1241 # forward for python2,3 compat
1246 # forward for python2,3 compat
1242 iteritems = items
1247 iteritems = items
1243
1248
1244 def __len__(self):
1249 def __len__(self):
1245 return len(self._map)
1250 return len(self._map)
1246
1251
1247 def __iter__(self):
1252 def __iter__(self):
1248 return iter(self._map)
1253 return iter(self._map)
1249
1254
1250 def get(self, key, default=None):
1255 def get(self, key, default=None):
1251 return self._map.get(key, default)
1256 return self._map.get(key, default)
1252
1257
1253 def __contains__(self, key):
1258 def __contains__(self, key):
1254 return key in self._map
1259 return key in self._map
1255
1260
1256 def __getitem__(self, key):
1261 def __getitem__(self, key):
1257 return self._map[key]
1262 return self._map[key]
1258
1263
1259 def keys(self):
1264 def keys(self):
1260 return self._map.keys()
1265 return self._map.keys()
1261
1266
1262 def preload(self):
1267 def preload(self):
1263 """Loads the underlying data, if it's not already loaded"""
1268 """Loads the underlying data, if it's not already loaded"""
1264 self._map
1269 self._map
1265
1270
1266 def addfile(self, f, oldstate, state, mode, size, mtime):
1271 def addfile(self, f, oldstate, state, mode, size, mtime):
1267 """Add a tracked file to the dirstate."""
1272 """Add a tracked file to the dirstate."""
1268 if oldstate in "?r" and r"_dirs" in self.__dict__:
1273 if oldstate in "?r" and r"_dirs" in self.__dict__:
1269 self._dirs.addpath(f)
1274 self._dirs.addpath(f)
1270 if oldstate == "?" and r"_alldirs" in self.__dict__:
1275 if oldstate == "?" and r"_alldirs" in self.__dict__:
1271 self._alldirs.addpath(f)
1276 self._alldirs.addpath(f)
1272 self._map[f] = dirstatetuple(state, mode, size, mtime)
1277 self._map[f] = dirstatetuple(state, mode, size, mtime)
1273 if state != 'n' or mtime == -1:
1278 if state != 'n' or mtime == -1:
1274 self.nonnormalset.add(f)
1279 self.nonnormalset.add(f)
1275 if size == -2:
1280 if size == -2:
1276 self.otherparentset.add(f)
1281 self.otherparentset.add(f)
1277
1282
1278 def removefile(self, f, oldstate, size):
1283 def removefile(self, f, oldstate, size):
1279 """
1284 """
1280 Mark a file as removed in the dirstate.
1285 Mark a file as removed in the dirstate.
1281
1286
1282 The `size` parameter is used to store sentinel values that indicate
1287 The `size` parameter is used to store sentinel values that indicate
1283 the file's previous state. In the future, we should refactor this
1288 the file's previous state. In the future, we should refactor this
1284 to be more explicit about what that state is.
1289 to be more explicit about what that state is.
1285 """
1290 """
1286 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1291 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1287 self._dirs.delpath(f)
1292 self._dirs.delpath(f)
1288 if oldstate == "?" and r"_alldirs" in self.__dict__:
1293 if oldstate == "?" and r"_alldirs" in self.__dict__:
1289 self._alldirs.addpath(f)
1294 self._alldirs.addpath(f)
1290 if r"filefoldmap" in self.__dict__:
1295 if r"filefoldmap" in self.__dict__:
1291 normed = util.normcase(f)
1296 normed = util.normcase(f)
1292 self.filefoldmap.pop(normed, None)
1297 self.filefoldmap.pop(normed, None)
1293 self._map[f] = dirstatetuple('r', 0, size, 0)
1298 self._map[f] = dirstatetuple('r', 0, size, 0)
1294 self.nonnormalset.add(f)
1299 self.nonnormalset.add(f)
1295
1300
1296 def dropfile(self, f, oldstate):
1301 def dropfile(self, f, oldstate):
1297 """
1302 """
1298 Remove a file from the dirstate. Returns True if the file was
1303 Remove a file from the dirstate. Returns True if the file was
1299 previously recorded.
1304 previously recorded.
1300 """
1305 """
1301 exists = self._map.pop(f, None) is not None
1306 exists = self._map.pop(f, None) is not None
1302 if exists:
1307 if exists:
1303 if oldstate != "r" and r"_dirs" in self.__dict__:
1308 if oldstate != "r" and r"_dirs" in self.__dict__:
1304 self._dirs.delpath(f)
1309 self._dirs.delpath(f)
1305 if r"_alldirs" in self.__dict__:
1310 if r"_alldirs" in self.__dict__:
1306 self._alldirs.delpath(f)
1311 self._alldirs.delpath(f)
1307 if r"filefoldmap" in self.__dict__:
1312 if r"filefoldmap" in self.__dict__:
1308 normed = util.normcase(f)
1313 normed = util.normcase(f)
1309 self.filefoldmap.pop(normed, None)
1314 self.filefoldmap.pop(normed, None)
1310 self.nonnormalset.discard(f)
1315 self.nonnormalset.discard(f)
1311 return exists
1316 return exists
1312
1317
1313 def clearambiguoustimes(self, files, now):
1318 def clearambiguoustimes(self, files, now):
1314 for f in files:
1319 for f in files:
1315 e = self.get(f)
1320 e = self.get(f)
1316 if e is not None and e[0] == 'n' and e[3] == now:
1321 if e is not None and e[0] == 'n' and e[3] == now:
1317 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1322 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1318 self.nonnormalset.add(f)
1323 self.nonnormalset.add(f)
1319
1324
1320 def nonnormalentries(self):
1325 def nonnormalentries(self):
1321 '''Compute the nonnormal dirstate entries from the dmap'''
1326 '''Compute the nonnormal dirstate entries from the dmap'''
1322 try:
1327 try:
1323 return parsers.nonnormalotherparententries(self._map)
1328 return parsers.nonnormalotherparententries(self._map)
1324 except AttributeError:
1329 except AttributeError:
1325 nonnorm = set()
1330 nonnorm = set()
1326 otherparent = set()
1331 otherparent = set()
1327 for fname, e in self._map.iteritems():
1332 for fname, e in self._map.iteritems():
1328 if e[0] != 'n' or e[3] == -1:
1333 if e[0] != 'n' or e[3] == -1:
1329 nonnorm.add(fname)
1334 nonnorm.add(fname)
1330 if e[0] == 'n' and e[2] == -2:
1335 if e[0] == 'n' and e[2] == -2:
1331 otherparent.add(fname)
1336 otherparent.add(fname)
1332 return nonnorm, otherparent
1337 return nonnorm, otherparent
1333
1338
1334 @propertycache
1339 @propertycache
1335 def filefoldmap(self):
1340 def filefoldmap(self):
1336 """Returns a dictionary mapping normalized case paths to their
1341 """Returns a dictionary mapping normalized case paths to their
1337 non-normalized versions.
1342 non-normalized versions.
1338 """
1343 """
1339 try:
1344 try:
1340 makefilefoldmap = parsers.make_file_foldmap
1345 makefilefoldmap = parsers.make_file_foldmap
1341 except AttributeError:
1346 except AttributeError:
1342 pass
1347 pass
1343 else:
1348 else:
1344 return makefilefoldmap(self._map, util.normcasespec,
1349 return makefilefoldmap(self._map, util.normcasespec,
1345 util.normcasefallback)
1350 util.normcasefallback)
1346
1351
1347 f = {}
1352 f = {}
1348 normcase = util.normcase
1353 normcase = util.normcase
1349 for name, s in self._map.iteritems():
1354 for name, s in self._map.iteritems():
1350 if s[0] != 'r':
1355 if s[0] != 'r':
1351 f[normcase(name)] = name
1356 f[normcase(name)] = name
1352 f['.'] = '.' # prevents useless util.fspath() invocation
1357 f['.'] = '.' # prevents useless util.fspath() invocation
1353 return f
1358 return f
1354
1359
1355 def hastrackeddir(self, d):
1360 def hastrackeddir(self, d):
1356 """
1361 """
1357 Returns True if the dirstate contains a tracked (not removed) file
1362 Returns True if the dirstate contains a tracked (not removed) file
1358 in this directory.
1363 in this directory.
1359 """
1364 """
1360 return d in self._dirs
1365 return d in self._dirs
1361
1366
1362 def hasdir(self, d):
1367 def hasdir(self, d):
1363 """
1368 """
1364 Returns True if the dirstate contains a file (tracked or removed)
1369 Returns True if the dirstate contains a file (tracked or removed)
1365 in this directory.
1370 in this directory.
1366 """
1371 """
1367 return d in self._alldirs
1372 return d in self._alldirs
1368
1373
1369 @propertycache
1374 @propertycache
1370 def _dirs(self):
1375 def _dirs(self):
1371 return util.dirs(self._map, 'r')
1376 return util.dirs(self._map, 'r')
1372
1377
1373 @propertycache
1378 @propertycache
1374 def _alldirs(self):
1379 def _alldirs(self):
1375 return util.dirs(self._map)
1380 return util.dirs(self._map)
1376
1381
1377 def _opendirstatefile(self):
1382 def _opendirstatefile(self):
1378 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1383 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1379 if self._pendingmode is not None and self._pendingmode != mode:
1384 if self._pendingmode is not None and self._pendingmode != mode:
1380 fp.close()
1385 fp.close()
1381 raise error.Abort(_('working directory state may be '
1386 raise error.Abort(_('working directory state may be '
1382 'changed parallelly'))
1387 'changed parallelly'))
1383 self._pendingmode = mode
1388 self._pendingmode = mode
1384 return fp
1389 return fp
1385
1390
1386 def parents(self):
1391 def parents(self):
1387 if not self._parents:
1392 if not self._parents:
1388 try:
1393 try:
1389 fp = self._opendirstatefile()
1394 fp = self._opendirstatefile()
1390 st = fp.read(40)
1395 st = fp.read(40)
1391 fp.close()
1396 fp.close()
1392 except IOError as err:
1397 except IOError as err:
1393 if err.errno != errno.ENOENT:
1398 if err.errno != errno.ENOENT:
1394 raise
1399 raise
1395 # File doesn't exist, so the current state is empty
1400 # File doesn't exist, so the current state is empty
1396 st = ''
1401 st = ''
1397
1402
1398 l = len(st)
1403 l = len(st)
1399 if l == 40:
1404 if l == 40:
1400 self._parents = st[:20], st[20:40]
1405 self._parents = st[:20], st[20:40]
1401 elif l == 0:
1406 elif l == 0:
1402 self._parents = [nullid, nullid]
1407 self._parents = [nullid, nullid]
1403 else:
1408 else:
1404 raise error.Abort(_('working directory state appears '
1409 raise error.Abort(_('working directory state appears '
1405 'damaged!'))
1410 'damaged!'))
1406
1411
1407 return self._parents
1412 return self._parents
1408
1413
1409 def setparents(self, p1, p2):
1414 def setparents(self, p1, p2):
1410 self._parents = (p1, p2)
1415 self._parents = (p1, p2)
1411 self._dirtyparents = True
1416 self._dirtyparents = True
1412
1417
1413 def read(self):
1418 def read(self):
1414 # ignore HG_PENDING because identity is used only for writing
1419 # ignore HG_PENDING because identity is used only for writing
1415 self.identity = util.filestat.frompath(
1420 self.identity = util.filestat.frompath(
1416 self._opener.join(self._filename))
1421 self._opener.join(self._filename))
1417
1422
1418 try:
1423 try:
1419 fp = self._opendirstatefile()
1424 fp = self._opendirstatefile()
1420 try:
1425 try:
1421 st = fp.read()
1426 st = fp.read()
1422 finally:
1427 finally:
1423 fp.close()
1428 fp.close()
1424 except IOError as err:
1429 except IOError as err:
1425 if err.errno != errno.ENOENT:
1430 if err.errno != errno.ENOENT:
1426 raise
1431 raise
1427 return
1432 return
1428 if not st:
1433 if not st:
1429 return
1434 return
1430
1435
1431 if util.safehasattr(parsers, 'dict_new_presized'):
1436 if util.safehasattr(parsers, 'dict_new_presized'):
1432 # Make an estimate of the number of files in the dirstate based on
1437 # Make an estimate of the number of files in the dirstate based on
1433 # its size. From a linear regression on a set of real-world repos,
1438 # its size. From a linear regression on a set of real-world repos,
1434 # all over 10,000 files, the size of a dirstate entry is 85
1439 # all over 10,000 files, the size of a dirstate entry is 85
1435 # bytes. The cost of resizing is significantly higher than the cost
1440 # bytes. The cost of resizing is significantly higher than the cost
1436 # of filling in a larger presized dict, so subtract 20% from the
1441 # of filling in a larger presized dict, so subtract 20% from the
1437 # size.
1442 # size.
1438 #
1443 #
1439 # This heuristic is imperfect in many ways, so in a future dirstate
1444 # This heuristic is imperfect in many ways, so in a future dirstate
1440 # format update it makes sense to just record the number of entries
1445 # format update it makes sense to just record the number of entries
1441 # on write.
1446 # on write.
1442 self._map = parsers.dict_new_presized(len(st) // 71)
1447 self._map = parsers.dict_new_presized(len(st) // 71)
1443
1448
1444 # Python's garbage collector triggers a GC each time a certain number
1449 # Python's garbage collector triggers a GC each time a certain number
1445 # of container objects (the number being defined by
1450 # of container objects (the number being defined by
1446 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1451 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1447 # for each file in the dirstate. The C version then immediately marks
1452 # for each file in the dirstate. The C version then immediately marks
1448 # them as not to be tracked by the collector. However, this has no
1453 # them as not to be tracked by the collector. However, this has no
1449 # effect on when GCs are triggered, only on what objects the GC looks
1454 # effect on when GCs are triggered, only on what objects the GC looks
1450 # into. This means that O(number of files) GCs are unavoidable.
1455 # into. This means that O(number of files) GCs are unavoidable.
1451 # Depending on when in the process's lifetime the dirstate is parsed,
1456 # Depending on when in the process's lifetime the dirstate is parsed,
1452 # this can get very expensive. As a workaround, disable GC while
1457 # this can get very expensive. As a workaround, disable GC while
1453 # parsing the dirstate.
1458 # parsing the dirstate.
1454 #
1459 #
1455 # (we cannot decorate the function directly since it is in a C module)
1460 # (we cannot decorate the function directly since it is in a C module)
1456 parse_dirstate = util.nogc(parsers.parse_dirstate)
1461 parse_dirstate = util.nogc(parsers.parse_dirstate)
1457 p = parse_dirstate(self._map, self.copymap, st)
1462 p = parse_dirstate(self._map, self.copymap, st)
1458 if not self._dirtyparents:
1463 if not self._dirtyparents:
1459 self.setparents(*p)
1464 self.setparents(*p)
1460
1465
1461 # Avoid excess attribute lookups by fast pathing certain checks
1466 # Avoid excess attribute lookups by fast pathing certain checks
1462 self.__contains__ = self._map.__contains__
1467 self.__contains__ = self._map.__contains__
1463 self.__getitem__ = self._map.__getitem__
1468 self.__getitem__ = self._map.__getitem__
1464 self.get = self._map.get
1469 self.get = self._map.get
1465
1470
1466 def write(self, st, now):
1471 def write(self, st, now):
1467 st.write(parsers.pack_dirstate(self._map, self.copymap,
1472 st.write(parsers.pack_dirstate(self._map, self.copymap,
1468 self.parents(), now))
1473 self.parents(), now))
1469 st.close()
1474 st.close()
1470 self._dirtyparents = False
1475 self._dirtyparents = False
1471 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1476 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1472
1477
1473 @propertycache
1478 @propertycache
1474 def nonnormalset(self):
1479 def nonnormalset(self):
1475 nonnorm, otherparents = self.nonnormalentries()
1480 nonnorm, otherparents = self.nonnormalentries()
1476 self.otherparentset = otherparents
1481 self.otherparentset = otherparents
1477 return nonnorm
1482 return nonnorm
1478
1483
1479 @propertycache
1484 @propertycache
1480 def otherparentset(self):
1485 def otherparentset(self):
1481 nonnorm, otherparents = self.nonnormalentries()
1486 nonnorm, otherparents = self.nonnormalentries()
1482 self.nonnormalset = nonnorm
1487 self.nonnormalset = nonnorm
1483 return otherparents
1488 return otherparents
1484
1489
1485 @propertycache
1490 @propertycache
1486 def identity(self):
1491 def identity(self):
1487 self._map
1492 self._map
1488 return self.identity
1493 return self.identity
1489
1494
1490 @propertycache
1495 @propertycache
1491 def dirfoldmap(self):
1496 def dirfoldmap(self):
1492 f = {}
1497 f = {}
1493 normcase = util.normcase
1498 normcase = util.normcase
1494 for name in self._dirs:
1499 for name in self._dirs:
1495 f[normcase(name)] = name
1500 f[normcase(name)] = name
1496 return f
1501 return f
General Comments 0
You need to be logged in to leave comments. Login now