##// END OF EJS Templates
dirstate: add explicit methods for modifying dirstate...
Mark Thomas -
r35078:853b7c41 default
parent child Browse files
Show More
@@ -1,1464 +1,1477 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = dirstatemap(self._ui, self._opener, self._root)
131 self._map = dirstatemap(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache('branch')
147 @repocache('branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read("branch").strip() or "default"
150 return self._opener.read("branch").strip() or "default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return "default"
154 return "default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def dirs(self):
160 def dirs(self):
161 return self._map.dirs
161 return self._map.dirs
162
162
163 @rootcache('.hgignore')
163 @rootcache('.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never(self._root, '')
167 return matchmod.never(self._root, '')
168
168
169 pats = ['include:%s' % f for f in files]
169 pats = ['include:%s' % f for f in files]
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join('.hg'))
186 return not util.fscasesensitive(self._join('.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195 def f(x):
195 def f(x):
196 try:
196 try:
197 st = os.lstat(self._join(x))
197 st = os.lstat(self._join(x))
198 if util.statislink(st):
198 if util.statislink(st):
199 return 'l'
199 return 'l'
200 if util.statisexec(st):
200 if util.statisexec(st):
201 return 'x'
201 return 'x'
202 except OSError:
202 except OSError:
203 pass
203 pass
204 return ''
204 return ''
205 return f
205 return f
206
206
207 fallback = buildfallback()
207 fallback = buildfallback()
208 if self._checklink:
208 if self._checklink:
209 def f(x):
209 def f(x):
210 if os.path.islink(self._join(x)):
210 if os.path.islink(self._join(x)):
211 return 'l'
211 return 'l'
212 if 'x' in fallback(x):
212 if 'x' in fallback(x):
213 return 'x'
213 return 'x'
214 return ''
214 return ''
215 return f
215 return f
216 if self._checkexec:
216 if self._checkexec:
217 def f(x):
217 def f(x):
218 if 'l' in fallback(x):
218 if 'l' in fallback(x):
219 return 'l'
219 return 'l'
220 if util.isexec(self._join(x)):
220 if util.isexec(self._join(x)):
221 return 'x'
221 return 'x'
222 return ''
222 return ''
223 return f
223 return f
224 else:
224 else:
225 return fallback
225 return fallback
226
226
227 @propertycache
227 @propertycache
228 def _cwd(self):
228 def _cwd(self):
229 # internal config: ui.forcecwd
229 # internal config: ui.forcecwd
230 forcecwd = self._ui.config('ui', 'forcecwd')
230 forcecwd = self._ui.config('ui', 'forcecwd')
231 if forcecwd:
231 if forcecwd:
232 return forcecwd
232 return forcecwd
233 return pycompat.getcwd()
233 return pycompat.getcwd()
234
234
235 def getcwd(self):
235 def getcwd(self):
236 '''Return the path from which a canonical path is calculated.
236 '''Return the path from which a canonical path is calculated.
237
237
238 This path should be used to resolve file patterns or to convert
238 This path should be used to resolve file patterns or to convert
239 canonical paths back to file paths for display. It shouldn't be
239 canonical paths back to file paths for display. It shouldn't be
240 used to get real file paths. Use vfs functions instead.
240 used to get real file paths. Use vfs functions instead.
241 '''
241 '''
242 cwd = self._cwd
242 cwd = self._cwd
243 if cwd == self._root:
243 if cwd == self._root:
244 return ''
244 return ''
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 rootsep = self._root
246 rootsep = self._root
247 if not util.endswithsep(rootsep):
247 if not util.endswithsep(rootsep):
248 rootsep += pycompat.ossep
248 rootsep += pycompat.ossep
249 if cwd.startswith(rootsep):
249 if cwd.startswith(rootsep):
250 return cwd[len(rootsep):]
250 return cwd[len(rootsep):]
251 else:
251 else:
252 # we're outside the repo. return an absolute path.
252 # we're outside the repo. return an absolute path.
253 return cwd
253 return cwd
254
254
255 def pathto(self, f, cwd=None):
255 def pathto(self, f, cwd=None):
256 if cwd is None:
256 if cwd is None:
257 cwd = self.getcwd()
257 cwd = self.getcwd()
258 path = util.pathto(self._root, cwd, f)
258 path = util.pathto(self._root, cwd, f)
259 if self._slash:
259 if self._slash:
260 return util.pconvert(path)
260 return util.pconvert(path)
261 return path
261 return path
262
262
263 def __getitem__(self, key):
263 def __getitem__(self, key):
264 '''Return the current state of key (a filename) in the dirstate.
264 '''Return the current state of key (a filename) in the dirstate.
265
265
266 States are:
266 States are:
267 n normal
267 n normal
268 m needs merging
268 m needs merging
269 r marked for removal
269 r marked for removal
270 a marked for addition
270 a marked for addition
271 ? not tracked
271 ? not tracked
272 '''
272 '''
273 return self._map.get(key, ("?",))[0]
273 return self._map.get(key, ("?",))[0]
274
274
275 def __contains__(self, key):
275 def __contains__(self, key):
276 return key in self._map
276 return key in self._map
277
277
278 def __iter__(self):
278 def __iter__(self):
279 return iter(sorted(self._map))
279 return iter(sorted(self._map))
280
280
281 def items(self):
281 def items(self):
282 return self._map.iteritems()
282 return self._map.iteritems()
283
283
284 iteritems = items
284 iteritems = items
285
285
286 def parents(self):
286 def parents(self):
287 return [self._validate(p) for p in self._pl]
287 return [self._validate(p) for p in self._pl]
288
288
289 def p1(self):
289 def p1(self):
290 return self._validate(self._pl[0])
290 return self._validate(self._pl[0])
291
291
292 def p2(self):
292 def p2(self):
293 return self._validate(self._pl[1])
293 return self._validate(self._pl[1])
294
294
295 def branch(self):
295 def branch(self):
296 return encoding.tolocal(self._branch)
296 return encoding.tolocal(self._branch)
297
297
298 def setparents(self, p1, p2=nullid):
298 def setparents(self, p1, p2=nullid):
299 """Set dirstate parents to p1 and p2.
299 """Set dirstate parents to p1 and p2.
300
300
301 When moving from two parents to one, 'm' merged entries a
301 When moving from two parents to one, 'm' merged entries a
302 adjusted to normal and previous copy records discarded and
302 adjusted to normal and previous copy records discarded and
303 returned by the call.
303 returned by the call.
304
304
305 See localrepo.setparents()
305 See localrepo.setparents()
306 """
306 """
307 if self._parentwriters == 0:
307 if self._parentwriters == 0:
308 raise ValueError("cannot set dirstate parent without "
308 raise ValueError("cannot set dirstate parent without "
309 "calling dirstate.beginparentchange")
309 "calling dirstate.beginparentchange")
310
310
311 self._dirty = True
311 self._dirty = True
312 oldp2 = self._pl[1]
312 oldp2 = self._pl[1]
313 if self._origpl is None:
313 if self._origpl is None:
314 self._origpl = self._pl
314 self._origpl = self._pl
315 self._map.setparents(p1, p2)
315 self._map.setparents(p1, p2)
316 copies = {}
316 copies = {}
317 if oldp2 != nullid and p2 == nullid:
317 if oldp2 != nullid and p2 == nullid:
318 candidatefiles = self._map.nonnormalset.union(
318 candidatefiles = self._map.nonnormalset.union(
319 self._map.otherparentset)
319 self._map.otherparentset)
320 for f in candidatefiles:
320 for f in candidatefiles:
321 s = self._map.get(f)
321 s = self._map.get(f)
322 if s is None:
322 if s is None:
323 continue
323 continue
324
324
325 # Discard 'm' markers when moving away from a merge state
325 # Discard 'm' markers when moving away from a merge state
326 if s[0] == 'm':
326 if s[0] == 'm':
327 source = self._map.copymap.get(f)
327 source = self._map.copymap.get(f)
328 if source:
328 if source:
329 copies[f] = source
329 copies[f] = source
330 self.normallookup(f)
330 self.normallookup(f)
331 # Also fix up otherparent markers
331 # Also fix up otherparent markers
332 elif s[0] == 'n' and s[2] == -2:
332 elif s[0] == 'n' and s[2] == -2:
333 source = self._map.copymap.get(f)
333 source = self._map.copymap.get(f)
334 if source:
334 if source:
335 copies[f] = source
335 copies[f] = source
336 self.add(f)
336 self.add(f)
337 return copies
337 return copies
338
338
339 def setbranch(self, branch):
339 def setbranch(self, branch):
340 self._branch = encoding.fromlocal(branch)
340 self._branch = encoding.fromlocal(branch)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 try:
342 try:
343 f.write(self._branch + '\n')
343 f.write(self._branch + '\n')
344 f.close()
344 f.close()
345
345
346 # make sure filecache has the correct stat info for _branch after
346 # make sure filecache has the correct stat info for _branch after
347 # replacing the underlying file
347 # replacing the underlying file
348 ce = self._filecache['_branch']
348 ce = self._filecache['_branch']
349 if ce:
349 if ce:
350 ce.refresh()
350 ce.refresh()
351 except: # re-raises
351 except: # re-raises
352 f.discard()
352 f.discard()
353 raise
353 raise
354
354
355 def invalidate(self):
355 def invalidate(self):
356 '''Causes the next access to reread the dirstate.
356 '''Causes the next access to reread the dirstate.
357
357
358 This is different from localrepo.invalidatedirstate() because it always
358 This is different from localrepo.invalidatedirstate() because it always
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 check whether the dirstate has changed before rereading it.'''
360 check whether the dirstate has changed before rereading it.'''
361
361
362 for a in ("_map", "_branch", "_ignore"):
362 for a in ("_map", "_branch", "_ignore"):
363 if a in self.__dict__:
363 if a in self.__dict__:
364 delattr(self, a)
364 delattr(self, a)
365 self._lastnormaltime = 0
365 self._lastnormaltime = 0
366 self._dirty = False
366 self._dirty = False
367 self._updatedfiles.clear()
367 self._updatedfiles.clear()
368 self._parentwriters = 0
368 self._parentwriters = 0
369 self._origpl = None
369 self._origpl = None
370
370
371 def copy(self, source, dest):
371 def copy(self, source, dest):
372 """Mark dest as a copy of source. Unmark dest if source is None."""
372 """Mark dest as a copy of source. Unmark dest if source is None."""
373 if source == dest:
373 if source == dest:
374 return
374 return
375 self._dirty = True
375 self._dirty = True
376 if source is not None:
376 if source is not None:
377 self._map.copymap[dest] = source
377 self._map.copymap[dest] = source
378 self._updatedfiles.add(source)
378 self._updatedfiles.add(source)
379 self._updatedfiles.add(dest)
379 self._updatedfiles.add(dest)
380 elif self._map.copymap.pop(dest, None):
380 elif self._map.copymap.pop(dest, None):
381 self._updatedfiles.add(dest)
381 self._updatedfiles.add(dest)
382
382
383 def copied(self, file):
383 def copied(self, file):
384 return self._map.copymap.get(file, None)
384 return self._map.copymap.get(file, None)
385
385
386 def copies(self):
386 def copies(self):
387 return self._map.copymap
387 return self._map.copymap
388
388
389 def _droppath(self, f):
389 def _droppath(self, f):
390 if self[f] not in "?r" and "dirs" in self._map.__dict__:
390 if self[f] not in "?r" and "dirs" in self._map.__dict__:
391 self._map.dirs.delpath(f)
391 self._map.dirs.delpath(f)
392
392
393 if "filefoldmap" in self._map.__dict__:
393 if "filefoldmap" in self._map.__dict__:
394 normed = util.normcase(f)
394 normed = util.normcase(f)
395 if normed in self._map.filefoldmap:
395 if normed in self._map.filefoldmap:
396 del self._map.filefoldmap[normed]
396 del self._map.filefoldmap[normed]
397
397
398 self._updatedfiles.add(f)
398 self._updatedfiles.add(f)
399
399
400 def _addpath(self, f, state, mode, size, mtime):
400 def _addpath(self, f, state, mode, size, mtime):
401 oldstate = self[f]
401 oldstate = self[f]
402 if state == 'a' or oldstate == 'r':
402 if state == 'a' or oldstate == 'r':
403 scmutil.checkfilename(f)
403 scmutil.checkfilename(f)
404 if f in self._map.dirs:
404 if f in self._map.dirs:
405 raise error.Abort(_('directory %r already in dirstate') % f)
405 raise error.Abort(_('directory %r already in dirstate') % f)
406 # shadows
406 # shadows
407 for d in util.finddirs(f):
407 for d in util.finddirs(f):
408 if d in self._map.dirs:
408 if d in self._map.dirs:
409 break
409 break
410 entry = self._map.get(d)
410 entry = self._map.get(d)
411 if entry is not None and entry[0] != 'r':
411 if entry is not None and entry[0] != 'r':
412 raise error.Abort(
412 raise error.Abort(
413 _('file %r in dirstate clashes with %r') % (d, f))
413 _('file %r in dirstate clashes with %r') % (d, f))
414 if oldstate in "?r" and "dirs" in self._map.__dict__:
414 if oldstate in "?r" and "dirs" in self._map.__dict__:
415 self._map.dirs.addpath(f)
415 self._map.dirs.addpath(f)
416 self._dirty = True
416 self._dirty = True
417 self._updatedfiles.add(f)
417 self._updatedfiles.add(f)
418 self._map[f] = dirstatetuple(state, mode, size, mtime)
419 if state != 'n' or mtime == -1:
418 if state != 'n' or mtime == -1:
420 self._map.nonnormalset.add(f)
419 self._map.nonnormalset.add(f)
421 if size == -2:
420 if size == -2:
422 self._map.otherparentset.add(f)
421 self._map.otherparentset.add(f)
422 self._map.addfile(f, state, mode, size, mtime)
423
423
424 def normal(self, f):
424 def normal(self, f):
425 '''Mark a file normal and clean.'''
425 '''Mark a file normal and clean.'''
426 s = os.lstat(self._join(f))
426 s = os.lstat(self._join(f))
427 mtime = s.st_mtime
427 mtime = s.st_mtime
428 self._addpath(f, 'n', s.st_mode,
428 self._addpath(f, 'n', s.st_mode,
429 s.st_size & _rangemask, mtime & _rangemask)
429 s.st_size & _rangemask, mtime & _rangemask)
430 self._map.copymap.pop(f, None)
430 self._map.copymap.pop(f, None)
431 if f in self._map.nonnormalset:
431 if f in self._map.nonnormalset:
432 self._map.nonnormalset.remove(f)
432 self._map.nonnormalset.remove(f)
433 if mtime > self._lastnormaltime:
433 if mtime > self._lastnormaltime:
434 # Remember the most recent modification timeslot for status(),
434 # Remember the most recent modification timeslot for status(),
435 # to make sure we won't miss future size-preserving file content
435 # to make sure we won't miss future size-preserving file content
436 # modifications that happen within the same timeslot.
436 # modifications that happen within the same timeslot.
437 self._lastnormaltime = mtime
437 self._lastnormaltime = mtime
438
438
439 def normallookup(self, f):
439 def normallookup(self, f):
440 '''Mark a file normal, but possibly dirty.'''
440 '''Mark a file normal, but possibly dirty.'''
441 if self._pl[1] != nullid:
441 if self._pl[1] != nullid:
442 # if there is a merge going on and the file was either
442 # if there is a merge going on and the file was either
443 # in state 'm' (-1) or coming from other parent (-2) before
443 # in state 'm' (-1) or coming from other parent (-2) before
444 # being removed, restore that state.
444 # being removed, restore that state.
445 entry = self._map.get(f)
445 entry = self._map.get(f)
446 if entry is not None:
446 if entry is not None:
447 if entry[0] == 'r' and entry[2] in (-1, -2):
447 if entry[0] == 'r' and entry[2] in (-1, -2):
448 source = self._map.copymap.get(f)
448 source = self._map.copymap.get(f)
449 if entry[2] == -1:
449 if entry[2] == -1:
450 self.merge(f)
450 self.merge(f)
451 elif entry[2] == -2:
451 elif entry[2] == -2:
452 self.otherparent(f)
452 self.otherparent(f)
453 if source:
453 if source:
454 self.copy(source, f)
454 self.copy(source, f)
455 return
455 return
456 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
456 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
457 return
457 return
458 self._addpath(f, 'n', 0, -1, -1)
458 self._addpath(f, 'n', 0, -1, -1)
459 self._map.copymap.pop(f, None)
459 self._map.copymap.pop(f, None)
460
460
461 def otherparent(self, f):
461 def otherparent(self, f):
462 '''Mark as coming from the other parent, always dirty.'''
462 '''Mark as coming from the other parent, always dirty.'''
463 if self._pl[1] == nullid:
463 if self._pl[1] == nullid:
464 raise error.Abort(_("setting %r to other parent "
464 raise error.Abort(_("setting %r to other parent "
465 "only allowed in merges") % f)
465 "only allowed in merges") % f)
466 if f in self and self[f] == 'n':
466 if f in self and self[f] == 'n':
467 # merge-like
467 # merge-like
468 self._addpath(f, 'm', 0, -2, -1)
468 self._addpath(f, 'm', 0, -2, -1)
469 else:
469 else:
470 # add-like
470 # add-like
471 self._addpath(f, 'n', 0, -2, -1)
471 self._addpath(f, 'n', 0, -2, -1)
472 self._map.copymap.pop(f, None)
472 self._map.copymap.pop(f, None)
473
473
474 def add(self, f):
474 def add(self, f):
475 '''Mark a file added.'''
475 '''Mark a file added.'''
476 self._addpath(f, 'a', 0, -1, -1)
476 self._addpath(f, 'a', 0, -1, -1)
477 self._map.copymap.pop(f, None)
477 self._map.copymap.pop(f, None)
478
478
479 def remove(self, f):
479 def remove(self, f):
480 '''Mark a file removed.'''
480 '''Mark a file removed.'''
481 self._dirty = True
481 self._dirty = True
482 self._droppath(f)
482 self._droppath(f)
483 size = 0
483 size = 0
484 if self._pl[1] != nullid:
484 if self._pl[1] != nullid:
485 entry = self._map.get(f)
485 entry = self._map.get(f)
486 if entry is not None:
486 if entry is not None:
487 # backup the previous state
487 # backup the previous state
488 if entry[0] == 'm': # merge
488 if entry[0] == 'm': # merge
489 size = -1
489 size = -1
490 elif entry[0] == 'n' and entry[2] == -2: # other parent
490 elif entry[0] == 'n' and entry[2] == -2: # other parent
491 size = -2
491 size = -2
492 self._map.otherparentset.add(f)
492 self._map.otherparentset.add(f)
493 self._map[f] = dirstatetuple('r', 0, size, 0)
494 self._map.nonnormalset.add(f)
493 self._map.nonnormalset.add(f)
494 self._map.removefile(f, size)
495 if size == 0:
495 if size == 0:
496 self._map.copymap.pop(f, None)
496 self._map.copymap.pop(f, None)
497
497
498 def merge(self, f):
498 def merge(self, f):
499 '''Mark a file merged.'''
499 '''Mark a file merged.'''
500 if self._pl[1] == nullid:
500 if self._pl[1] == nullid:
501 return self.normallookup(f)
501 return self.normallookup(f)
502 return self.otherparent(f)
502 return self.otherparent(f)
503
503
504 def drop(self, f):
504 def drop(self, f):
505 '''Drop a file from the dirstate'''
505 '''Drop a file from the dirstate'''
506 if f in self._map:
506 if self._map.dropfile(f):
507 self._dirty = True
507 self._dirty = True
508 self._droppath(f)
508 self._droppath(f)
509 del self._map[f]
510 if f in self._map.nonnormalset:
509 if f in self._map.nonnormalset:
511 self._map.nonnormalset.remove(f)
510 self._map.nonnormalset.remove(f)
512 self._map.copymap.pop(f, None)
511 self._map.copymap.pop(f, None)
513
512
514 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
513 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
515 if exists is None:
514 if exists is None:
516 exists = os.path.lexists(os.path.join(self._root, path))
515 exists = os.path.lexists(os.path.join(self._root, path))
517 if not exists:
516 if not exists:
518 # Maybe a path component exists
517 # Maybe a path component exists
519 if not ignoremissing and '/' in path:
518 if not ignoremissing and '/' in path:
520 d, f = path.rsplit('/', 1)
519 d, f = path.rsplit('/', 1)
521 d = self._normalize(d, False, ignoremissing, None)
520 d = self._normalize(d, False, ignoremissing, None)
522 folded = d + "/" + f
521 folded = d + "/" + f
523 else:
522 else:
524 # No path components, preserve original case
523 # No path components, preserve original case
525 folded = path
524 folded = path
526 else:
525 else:
527 # recursively normalize leading directory components
526 # recursively normalize leading directory components
528 # against dirstate
527 # against dirstate
529 if '/' in normed:
528 if '/' in normed:
530 d, f = normed.rsplit('/', 1)
529 d, f = normed.rsplit('/', 1)
531 d = self._normalize(d, False, ignoremissing, True)
530 d = self._normalize(d, False, ignoremissing, True)
532 r = self._root + "/" + d
531 r = self._root + "/" + d
533 folded = d + "/" + util.fspath(f, r)
532 folded = d + "/" + util.fspath(f, r)
534 else:
533 else:
535 folded = util.fspath(normed, self._root)
534 folded = util.fspath(normed, self._root)
536 storemap[normed] = folded
535 storemap[normed] = folded
537
536
538 return folded
537 return folded
539
538
540 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
539 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
541 normed = util.normcase(path)
540 normed = util.normcase(path)
542 folded = self._map.filefoldmap.get(normed, None)
541 folded = self._map.filefoldmap.get(normed, None)
543 if folded is None:
542 if folded is None:
544 if isknown:
543 if isknown:
545 folded = path
544 folded = path
546 else:
545 else:
547 folded = self._discoverpath(path, normed, ignoremissing, exists,
546 folded = self._discoverpath(path, normed, ignoremissing, exists,
548 self._map.filefoldmap)
547 self._map.filefoldmap)
549 return folded
548 return folded
550
549
551 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
550 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
552 normed = util.normcase(path)
551 normed = util.normcase(path)
553 folded = self._map.filefoldmap.get(normed, None)
552 folded = self._map.filefoldmap.get(normed, None)
554 if folded is None:
553 if folded is None:
555 folded = self._map.dirfoldmap.get(normed, None)
554 folded = self._map.dirfoldmap.get(normed, None)
556 if folded is None:
555 if folded is None:
557 if isknown:
556 if isknown:
558 folded = path
557 folded = path
559 else:
558 else:
560 # store discovered result in dirfoldmap so that future
559 # store discovered result in dirfoldmap so that future
561 # normalizefile calls don't start matching directories
560 # normalizefile calls don't start matching directories
562 folded = self._discoverpath(path, normed, ignoremissing, exists,
561 folded = self._discoverpath(path, normed, ignoremissing, exists,
563 self._map.dirfoldmap)
562 self._map.dirfoldmap)
564 return folded
563 return folded
565
564
566 def normalize(self, path, isknown=False, ignoremissing=False):
565 def normalize(self, path, isknown=False, ignoremissing=False):
567 '''
566 '''
568 normalize the case of a pathname when on a casefolding filesystem
567 normalize the case of a pathname when on a casefolding filesystem
569
568
570 isknown specifies whether the filename came from walking the
569 isknown specifies whether the filename came from walking the
571 disk, to avoid extra filesystem access.
570 disk, to avoid extra filesystem access.
572
571
573 If ignoremissing is True, missing path are returned
572 If ignoremissing is True, missing path are returned
574 unchanged. Otherwise, we try harder to normalize possibly
573 unchanged. Otherwise, we try harder to normalize possibly
575 existing path components.
574 existing path components.
576
575
577 The normalized case is determined based on the following precedence:
576 The normalized case is determined based on the following precedence:
578
577
579 - version of name already stored in the dirstate
578 - version of name already stored in the dirstate
580 - version of name stored on disk
579 - version of name stored on disk
581 - version provided via command arguments
580 - version provided via command arguments
582 '''
581 '''
583
582
584 if self._checkcase:
583 if self._checkcase:
585 return self._normalize(path, isknown, ignoremissing)
584 return self._normalize(path, isknown, ignoremissing)
586 return path
585 return path
587
586
588 def clear(self):
587 def clear(self):
589 self._map.clear()
588 self._map.clear()
590 self._lastnormaltime = 0
589 self._lastnormaltime = 0
591 self._updatedfiles.clear()
590 self._updatedfiles.clear()
592 self._dirty = True
591 self._dirty = True
593
592
594 def rebuild(self, parent, allfiles, changedfiles=None):
593 def rebuild(self, parent, allfiles, changedfiles=None):
595 if changedfiles is None:
594 if changedfiles is None:
596 # Rebuild entire dirstate
595 # Rebuild entire dirstate
597 changedfiles = allfiles
596 changedfiles = allfiles
598 lastnormaltime = self._lastnormaltime
597 lastnormaltime = self._lastnormaltime
599 self.clear()
598 self.clear()
600 self._lastnormaltime = lastnormaltime
599 self._lastnormaltime = lastnormaltime
601
600
602 if self._origpl is None:
601 if self._origpl is None:
603 self._origpl = self._pl
602 self._origpl = self._pl
604 self._map.setparents(parent, nullid)
603 self._map.setparents(parent, nullid)
605 for f in changedfiles:
604 for f in changedfiles:
606 if f in allfiles:
605 if f in allfiles:
607 self.normallookup(f)
606 self.normallookup(f)
608 else:
607 else:
609 self.drop(f)
608 self.drop(f)
610
609
611 self._dirty = True
610 self._dirty = True
612
611
613 def identity(self):
612 def identity(self):
614 '''Return identity of dirstate itself to detect changing in storage
613 '''Return identity of dirstate itself to detect changing in storage
615
614
616 If identity of previous dirstate is equal to this, writing
615 If identity of previous dirstate is equal to this, writing
617 changes based on the former dirstate out can keep consistency.
616 changes based on the former dirstate out can keep consistency.
618 '''
617 '''
619 return self._map.identity
618 return self._map.identity
620
619
621 def write(self, tr):
620 def write(self, tr):
622 if not self._dirty:
621 if not self._dirty:
623 return
622 return
624
623
625 filename = self._filename
624 filename = self._filename
626 if tr:
625 if tr:
627 # 'dirstate.write()' is not only for writing in-memory
626 # 'dirstate.write()' is not only for writing in-memory
628 # changes out, but also for dropping ambiguous timestamp.
627 # changes out, but also for dropping ambiguous timestamp.
629 # delayed writing re-raise "ambiguous timestamp issue".
628 # delayed writing re-raise "ambiguous timestamp issue".
630 # See also the wiki page below for detail:
629 # See also the wiki page below for detail:
631 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
630 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
632
631
633 # emulate dropping timestamp in 'parsers.pack_dirstate'
632 # emulate dropping timestamp in 'parsers.pack_dirstate'
634 now = _getfsnow(self._opener)
633 now = _getfsnow(self._opener)
635 dmap = self._map
634 dmap = self._map
636 for f in self._updatedfiles:
635 for f in self._updatedfiles:
637 e = dmap.get(f)
636 e = dmap.get(f)
638 if e is not None and e[0] == 'n' and e[3] == now:
637 if e is not None and e[0] == 'n' and e[3] == now:
639 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
638 dmap.addfile(f, e[0], e[1], e[2], -1)
640 self._map.nonnormalset.add(f)
639 self._map.nonnormalset.add(f)
641
640
642 # emulate that all 'dirstate.normal' results are written out
641 # emulate that all 'dirstate.normal' results are written out
643 self._lastnormaltime = 0
642 self._lastnormaltime = 0
644 self._updatedfiles.clear()
643 self._updatedfiles.clear()
645
644
646 # delay writing in-memory changes out
645 # delay writing in-memory changes out
647 tr.addfilegenerator('dirstate', (self._filename,),
646 tr.addfilegenerator('dirstate', (self._filename,),
648 self._writedirstate, location='plain')
647 self._writedirstate, location='plain')
649 return
648 return
650
649
651 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
650 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
652 self._writedirstate(st)
651 self._writedirstate(st)
653
652
654 def addparentchangecallback(self, category, callback):
653 def addparentchangecallback(self, category, callback):
655 """add a callback to be called when the wd parents are changed
654 """add a callback to be called when the wd parents are changed
656
655
657 Callback will be called with the following arguments:
656 Callback will be called with the following arguments:
658 dirstate, (oldp1, oldp2), (newp1, newp2)
657 dirstate, (oldp1, oldp2), (newp1, newp2)
659
658
660 Category is a unique identifier to allow overwriting an old callback
659 Category is a unique identifier to allow overwriting an old callback
661 with a newer callback.
660 with a newer callback.
662 """
661 """
663 self._plchangecallbacks[category] = callback
662 self._plchangecallbacks[category] = callback
664
663
665 def _writedirstate(self, st):
664 def _writedirstate(self, st):
666 # notify callbacks about parents change
665 # notify callbacks about parents change
667 if self._origpl is not None and self._origpl != self._pl:
666 if self._origpl is not None and self._origpl != self._pl:
668 for c, callback in sorted(self._plchangecallbacks.iteritems()):
667 for c, callback in sorted(self._plchangecallbacks.iteritems()):
669 callback(self, self._origpl, self._pl)
668 callback(self, self._origpl, self._pl)
670 self._origpl = None
669 self._origpl = None
671 # use the modification time of the newly created temporary file as the
670 # use the modification time of the newly created temporary file as the
672 # filesystem's notion of 'now'
671 # filesystem's notion of 'now'
673 now = util.fstat(st).st_mtime & _rangemask
672 now = util.fstat(st).st_mtime & _rangemask
674
673
675 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
674 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
676 # timestamp of each entries in dirstate, because of 'now > mtime'
675 # timestamp of each entries in dirstate, because of 'now > mtime'
677 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
676 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
678 if delaywrite > 0:
677 if delaywrite > 0:
679 # do we have any files to delay for?
678 # do we have any files to delay for?
680 for f, e in self._map.iteritems():
679 for f, e in self._map.iteritems():
681 if e[0] == 'n' and e[3] == now:
680 if e[0] == 'n' and e[3] == now:
682 import time # to avoid useless import
681 import time # to avoid useless import
683 # rather than sleep n seconds, sleep until the next
682 # rather than sleep n seconds, sleep until the next
684 # multiple of n seconds
683 # multiple of n seconds
685 clock = time.time()
684 clock = time.time()
686 start = int(clock) - (int(clock) % delaywrite)
685 start = int(clock) - (int(clock) % delaywrite)
687 end = start + delaywrite
686 end = start + delaywrite
688 time.sleep(end - clock)
687 time.sleep(end - clock)
689 now = end # trust our estimate that the end is near now
688 now = end # trust our estimate that the end is near now
690 break
689 break
691
690
692 self._map.write(st, now)
691 self._map.write(st, now)
693 self._lastnormaltime = 0
692 self._lastnormaltime = 0
694 self._dirty = False
693 self._dirty = False
695
694
696 def _dirignore(self, f):
695 def _dirignore(self, f):
697 if f == '.':
696 if f == '.':
698 return False
697 return False
699 if self._ignore(f):
698 if self._ignore(f):
700 return True
699 return True
701 for p in util.finddirs(f):
700 for p in util.finddirs(f):
702 if self._ignore(p):
701 if self._ignore(p):
703 return True
702 return True
704 return False
703 return False
705
704
706 def _ignorefiles(self):
705 def _ignorefiles(self):
707 files = []
706 files = []
708 if os.path.exists(self._join('.hgignore')):
707 if os.path.exists(self._join('.hgignore')):
709 files.append(self._join('.hgignore'))
708 files.append(self._join('.hgignore'))
710 for name, path in self._ui.configitems("ui"):
709 for name, path in self._ui.configitems("ui"):
711 if name == 'ignore' or name.startswith('ignore.'):
710 if name == 'ignore' or name.startswith('ignore.'):
712 # we need to use os.path.join here rather than self._join
711 # we need to use os.path.join here rather than self._join
713 # because path is arbitrary and user-specified
712 # because path is arbitrary and user-specified
714 files.append(os.path.join(self._rootdir, util.expandpath(path)))
713 files.append(os.path.join(self._rootdir, util.expandpath(path)))
715 return files
714 return files
716
715
717 def _ignorefileandline(self, f):
716 def _ignorefileandline(self, f):
718 files = collections.deque(self._ignorefiles())
717 files = collections.deque(self._ignorefiles())
719 visited = set()
718 visited = set()
720 while files:
719 while files:
721 i = files.popleft()
720 i = files.popleft()
722 patterns = matchmod.readpatternfile(i, self._ui.warn,
721 patterns = matchmod.readpatternfile(i, self._ui.warn,
723 sourceinfo=True)
722 sourceinfo=True)
724 for pattern, lineno, line in patterns:
723 for pattern, lineno, line in patterns:
725 kind, p = matchmod._patsplit(pattern, 'glob')
724 kind, p = matchmod._patsplit(pattern, 'glob')
726 if kind == "subinclude":
725 if kind == "subinclude":
727 if p not in visited:
726 if p not in visited:
728 files.append(p)
727 files.append(p)
729 continue
728 continue
730 m = matchmod.match(self._root, '', [], [pattern],
729 m = matchmod.match(self._root, '', [], [pattern],
731 warn=self._ui.warn)
730 warn=self._ui.warn)
732 if m(f):
731 if m(f):
733 return (i, lineno, line)
732 return (i, lineno, line)
734 visited.add(i)
733 visited.add(i)
735 return (None, -1, "")
734 return (None, -1, "")
736
735
737 def _walkexplicit(self, match, subrepos):
736 def _walkexplicit(self, match, subrepos):
738 '''Get stat data about the files explicitly specified by match.
737 '''Get stat data about the files explicitly specified by match.
739
738
740 Return a triple (results, dirsfound, dirsnotfound).
739 Return a triple (results, dirsfound, dirsnotfound).
741 - results is a mapping from filename to stat result. It also contains
740 - results is a mapping from filename to stat result. It also contains
742 listings mapping subrepos and .hg to None.
741 listings mapping subrepos and .hg to None.
743 - dirsfound is a list of files found to be directories.
742 - dirsfound is a list of files found to be directories.
744 - dirsnotfound is a list of files that the dirstate thinks are
743 - dirsnotfound is a list of files that the dirstate thinks are
745 directories and that were not found.'''
744 directories and that were not found.'''
746
745
747 def badtype(mode):
746 def badtype(mode):
748 kind = _('unknown')
747 kind = _('unknown')
749 if stat.S_ISCHR(mode):
748 if stat.S_ISCHR(mode):
750 kind = _('character device')
749 kind = _('character device')
751 elif stat.S_ISBLK(mode):
750 elif stat.S_ISBLK(mode):
752 kind = _('block device')
751 kind = _('block device')
753 elif stat.S_ISFIFO(mode):
752 elif stat.S_ISFIFO(mode):
754 kind = _('fifo')
753 kind = _('fifo')
755 elif stat.S_ISSOCK(mode):
754 elif stat.S_ISSOCK(mode):
756 kind = _('socket')
755 kind = _('socket')
757 elif stat.S_ISDIR(mode):
756 elif stat.S_ISDIR(mode):
758 kind = _('directory')
757 kind = _('directory')
759 return _('unsupported file type (type is %s)') % kind
758 return _('unsupported file type (type is %s)') % kind
760
759
761 matchedir = match.explicitdir
760 matchedir = match.explicitdir
762 badfn = match.bad
761 badfn = match.bad
763 dmap = self._map
762 dmap = self._map
764 lstat = os.lstat
763 lstat = os.lstat
765 getkind = stat.S_IFMT
764 getkind = stat.S_IFMT
766 dirkind = stat.S_IFDIR
765 dirkind = stat.S_IFDIR
767 regkind = stat.S_IFREG
766 regkind = stat.S_IFREG
768 lnkkind = stat.S_IFLNK
767 lnkkind = stat.S_IFLNK
769 join = self._join
768 join = self._join
770 dirsfound = []
769 dirsfound = []
771 foundadd = dirsfound.append
770 foundadd = dirsfound.append
772 dirsnotfound = []
771 dirsnotfound = []
773 notfoundadd = dirsnotfound.append
772 notfoundadd = dirsnotfound.append
774
773
775 if not match.isexact() and self._checkcase:
774 if not match.isexact() and self._checkcase:
776 normalize = self._normalize
775 normalize = self._normalize
777 else:
776 else:
778 normalize = None
777 normalize = None
779
778
780 files = sorted(match.files())
779 files = sorted(match.files())
781 subrepos.sort()
780 subrepos.sort()
782 i, j = 0, 0
781 i, j = 0, 0
783 while i < len(files) and j < len(subrepos):
782 while i < len(files) and j < len(subrepos):
784 subpath = subrepos[j] + "/"
783 subpath = subrepos[j] + "/"
785 if files[i] < subpath:
784 if files[i] < subpath:
786 i += 1
785 i += 1
787 continue
786 continue
788 while i < len(files) and files[i].startswith(subpath):
787 while i < len(files) and files[i].startswith(subpath):
789 del files[i]
788 del files[i]
790 j += 1
789 j += 1
791
790
792 if not files or '.' in files:
791 if not files or '.' in files:
793 files = ['.']
792 files = ['.']
794 results = dict.fromkeys(subrepos)
793 results = dict.fromkeys(subrepos)
795 results['.hg'] = None
794 results['.hg'] = None
796
795
797 alldirs = None
796 alldirs = None
798 for ff in files:
797 for ff in files:
799 # constructing the foldmap is expensive, so don't do it for the
798 # constructing the foldmap is expensive, so don't do it for the
800 # common case where files is ['.']
799 # common case where files is ['.']
801 if normalize and ff != '.':
800 if normalize and ff != '.':
802 nf = normalize(ff, False, True)
801 nf = normalize(ff, False, True)
803 else:
802 else:
804 nf = ff
803 nf = ff
805 if nf in results:
804 if nf in results:
806 continue
805 continue
807
806
808 try:
807 try:
809 st = lstat(join(nf))
808 st = lstat(join(nf))
810 kind = getkind(st.st_mode)
809 kind = getkind(st.st_mode)
811 if kind == dirkind:
810 if kind == dirkind:
812 if nf in dmap:
811 if nf in dmap:
813 # file replaced by dir on disk but still in dirstate
812 # file replaced by dir on disk but still in dirstate
814 results[nf] = None
813 results[nf] = None
815 if matchedir:
814 if matchedir:
816 matchedir(nf)
815 matchedir(nf)
817 foundadd((nf, ff))
816 foundadd((nf, ff))
818 elif kind == regkind or kind == lnkkind:
817 elif kind == regkind or kind == lnkkind:
819 results[nf] = st
818 results[nf] = st
820 else:
819 else:
821 badfn(ff, badtype(kind))
820 badfn(ff, badtype(kind))
822 if nf in dmap:
821 if nf in dmap:
823 results[nf] = None
822 results[nf] = None
824 except OSError as inst: # nf not found on disk - it is dirstate only
823 except OSError as inst: # nf not found on disk - it is dirstate only
825 if nf in dmap: # does it exactly match a missing file?
824 if nf in dmap: # does it exactly match a missing file?
826 results[nf] = None
825 results[nf] = None
827 else: # does it match a missing directory?
826 else: # does it match a missing directory?
828 if alldirs is None:
827 if alldirs is None:
829 alldirs = util.dirs(dmap._map)
828 alldirs = util.dirs(dmap._map)
830 if nf in alldirs:
829 if nf in alldirs:
831 if matchedir:
830 if matchedir:
832 matchedir(nf)
831 matchedir(nf)
833 notfoundadd(nf)
832 notfoundadd(nf)
834 else:
833 else:
835 badfn(ff, encoding.strtolocal(inst.strerror))
834 badfn(ff, encoding.strtolocal(inst.strerror))
836
835
837 # Case insensitive filesystems cannot rely on lstat() failing to detect
836 # Case insensitive filesystems cannot rely on lstat() failing to detect
838 # a case-only rename. Prune the stat object for any file that does not
837 # a case-only rename. Prune the stat object for any file that does not
839 # match the case in the filesystem, if there are multiple files that
838 # match the case in the filesystem, if there are multiple files that
840 # normalize to the same path.
839 # normalize to the same path.
841 if match.isexact() and self._checkcase:
840 if match.isexact() and self._checkcase:
842 normed = {}
841 normed = {}
843
842
844 for f, st in results.iteritems():
843 for f, st in results.iteritems():
845 if st is None:
844 if st is None:
846 continue
845 continue
847
846
848 nc = util.normcase(f)
847 nc = util.normcase(f)
849 paths = normed.get(nc)
848 paths = normed.get(nc)
850
849
851 if paths is None:
850 if paths is None:
852 paths = set()
851 paths = set()
853 normed[nc] = paths
852 normed[nc] = paths
854
853
855 paths.add(f)
854 paths.add(f)
856
855
857 for norm, paths in normed.iteritems():
856 for norm, paths in normed.iteritems():
858 if len(paths) > 1:
857 if len(paths) > 1:
859 for path in paths:
858 for path in paths:
860 folded = self._discoverpath(path, norm, True, None,
859 folded = self._discoverpath(path, norm, True, None,
861 self._map.dirfoldmap)
860 self._map.dirfoldmap)
862 if path != folded:
861 if path != folded:
863 results[path] = None
862 results[path] = None
864
863
865 return results, dirsfound, dirsnotfound
864 return results, dirsfound, dirsnotfound
866
865
867 def walk(self, match, subrepos, unknown, ignored, full=True):
866 def walk(self, match, subrepos, unknown, ignored, full=True):
868 '''
867 '''
869 Walk recursively through the directory tree, finding all files
868 Walk recursively through the directory tree, finding all files
870 matched by match.
869 matched by match.
871
870
872 If full is False, maybe skip some known-clean files.
871 If full is False, maybe skip some known-clean files.
873
872
874 Return a dict mapping filename to stat-like object (either
873 Return a dict mapping filename to stat-like object (either
875 mercurial.osutil.stat instance or return value of os.stat()).
874 mercurial.osutil.stat instance or return value of os.stat()).
876
875
877 '''
876 '''
878 # full is a flag that extensions that hook into walk can use -- this
877 # full is a flag that extensions that hook into walk can use -- this
879 # implementation doesn't use it at all. This satisfies the contract
878 # implementation doesn't use it at all. This satisfies the contract
880 # because we only guarantee a "maybe".
879 # because we only guarantee a "maybe".
881
880
882 if ignored:
881 if ignored:
883 ignore = util.never
882 ignore = util.never
884 dirignore = util.never
883 dirignore = util.never
885 elif unknown:
884 elif unknown:
886 ignore = self._ignore
885 ignore = self._ignore
887 dirignore = self._dirignore
886 dirignore = self._dirignore
888 else:
887 else:
889 # if not unknown and not ignored, drop dir recursion and step 2
888 # if not unknown and not ignored, drop dir recursion and step 2
890 ignore = util.always
889 ignore = util.always
891 dirignore = util.always
890 dirignore = util.always
892
891
893 matchfn = match.matchfn
892 matchfn = match.matchfn
894 matchalways = match.always()
893 matchalways = match.always()
895 matchtdir = match.traversedir
894 matchtdir = match.traversedir
896 dmap = self._map
895 dmap = self._map
897 listdir = util.listdir
896 listdir = util.listdir
898 lstat = os.lstat
897 lstat = os.lstat
899 dirkind = stat.S_IFDIR
898 dirkind = stat.S_IFDIR
900 regkind = stat.S_IFREG
899 regkind = stat.S_IFREG
901 lnkkind = stat.S_IFLNK
900 lnkkind = stat.S_IFLNK
902 join = self._join
901 join = self._join
903
902
904 exact = skipstep3 = False
903 exact = skipstep3 = False
905 if match.isexact(): # match.exact
904 if match.isexact(): # match.exact
906 exact = True
905 exact = True
907 dirignore = util.always # skip step 2
906 dirignore = util.always # skip step 2
908 elif match.prefix(): # match.match, no patterns
907 elif match.prefix(): # match.match, no patterns
909 skipstep3 = True
908 skipstep3 = True
910
909
911 if not exact and self._checkcase:
910 if not exact and self._checkcase:
912 normalize = self._normalize
911 normalize = self._normalize
913 normalizefile = self._normalizefile
912 normalizefile = self._normalizefile
914 skipstep3 = False
913 skipstep3 = False
915 else:
914 else:
916 normalize = self._normalize
915 normalize = self._normalize
917 normalizefile = None
916 normalizefile = None
918
917
919 # step 1: find all explicit files
918 # step 1: find all explicit files
920 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
919 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
921
920
922 skipstep3 = skipstep3 and not (work or dirsnotfound)
921 skipstep3 = skipstep3 and not (work or dirsnotfound)
923 work = [d for d in work if not dirignore(d[0])]
922 work = [d for d in work if not dirignore(d[0])]
924
923
925 # step 2: visit subdirectories
924 # step 2: visit subdirectories
926 def traverse(work, alreadynormed):
925 def traverse(work, alreadynormed):
927 wadd = work.append
926 wadd = work.append
928 while work:
927 while work:
929 nd = work.pop()
928 nd = work.pop()
930 if not match.visitdir(nd):
929 if not match.visitdir(nd):
931 continue
930 continue
932 skip = None
931 skip = None
933 if nd == '.':
932 if nd == '.':
934 nd = ''
933 nd = ''
935 else:
934 else:
936 skip = '.hg'
935 skip = '.hg'
937 try:
936 try:
938 entries = listdir(join(nd), stat=True, skip=skip)
937 entries = listdir(join(nd), stat=True, skip=skip)
939 except OSError as inst:
938 except OSError as inst:
940 if inst.errno in (errno.EACCES, errno.ENOENT):
939 if inst.errno in (errno.EACCES, errno.ENOENT):
941 match.bad(self.pathto(nd),
940 match.bad(self.pathto(nd),
942 encoding.strtolocal(inst.strerror))
941 encoding.strtolocal(inst.strerror))
943 continue
942 continue
944 raise
943 raise
945 for f, kind, st in entries:
944 for f, kind, st in entries:
946 if normalizefile:
945 if normalizefile:
947 # even though f might be a directory, we're only
946 # even though f might be a directory, we're only
948 # interested in comparing it to files currently in the
947 # interested in comparing it to files currently in the
949 # dmap -- therefore normalizefile is enough
948 # dmap -- therefore normalizefile is enough
950 nf = normalizefile(nd and (nd + "/" + f) or f, True,
949 nf = normalizefile(nd and (nd + "/" + f) or f, True,
951 True)
950 True)
952 else:
951 else:
953 nf = nd and (nd + "/" + f) or f
952 nf = nd and (nd + "/" + f) or f
954 if nf not in results:
953 if nf not in results:
955 if kind == dirkind:
954 if kind == dirkind:
956 if not ignore(nf):
955 if not ignore(nf):
957 if matchtdir:
956 if matchtdir:
958 matchtdir(nf)
957 matchtdir(nf)
959 wadd(nf)
958 wadd(nf)
960 if nf in dmap and (matchalways or matchfn(nf)):
959 if nf in dmap and (matchalways or matchfn(nf)):
961 results[nf] = None
960 results[nf] = None
962 elif kind == regkind or kind == lnkkind:
961 elif kind == regkind or kind == lnkkind:
963 if nf in dmap:
962 if nf in dmap:
964 if matchalways or matchfn(nf):
963 if matchalways or matchfn(nf):
965 results[nf] = st
964 results[nf] = st
966 elif ((matchalways or matchfn(nf))
965 elif ((matchalways or matchfn(nf))
967 and not ignore(nf)):
966 and not ignore(nf)):
968 # unknown file -- normalize if necessary
967 # unknown file -- normalize if necessary
969 if not alreadynormed:
968 if not alreadynormed:
970 nf = normalize(nf, False, True)
969 nf = normalize(nf, False, True)
971 results[nf] = st
970 results[nf] = st
972 elif nf in dmap and (matchalways or matchfn(nf)):
971 elif nf in dmap and (matchalways or matchfn(nf)):
973 results[nf] = None
972 results[nf] = None
974
973
975 for nd, d in work:
974 for nd, d in work:
976 # alreadynormed means that processwork doesn't have to do any
975 # alreadynormed means that processwork doesn't have to do any
977 # expensive directory normalization
976 # expensive directory normalization
978 alreadynormed = not normalize or nd == d
977 alreadynormed = not normalize or nd == d
979 traverse([d], alreadynormed)
978 traverse([d], alreadynormed)
980
979
981 for s in subrepos:
980 for s in subrepos:
982 del results[s]
981 del results[s]
983 del results['.hg']
982 del results['.hg']
984
983
985 # step 3: visit remaining files from dmap
984 # step 3: visit remaining files from dmap
986 if not skipstep3 and not exact:
985 if not skipstep3 and not exact:
987 # If a dmap file is not in results yet, it was either
986 # If a dmap file is not in results yet, it was either
988 # a) not matching matchfn b) ignored, c) missing, or d) under a
987 # a) not matching matchfn b) ignored, c) missing, or d) under a
989 # symlink directory.
988 # symlink directory.
990 if not results and matchalways:
989 if not results and matchalways:
991 visit = [f for f in dmap]
990 visit = [f for f in dmap]
992 else:
991 else:
993 visit = [f for f in dmap if f not in results and matchfn(f)]
992 visit = [f for f in dmap if f not in results and matchfn(f)]
994 visit.sort()
993 visit.sort()
995
994
996 if unknown:
995 if unknown:
997 # unknown == True means we walked all dirs under the roots
996 # unknown == True means we walked all dirs under the roots
998 # that wasn't ignored, and everything that matched was stat'ed
997 # that wasn't ignored, and everything that matched was stat'ed
999 # and is already in results.
998 # and is already in results.
1000 # The rest must thus be ignored or under a symlink.
999 # The rest must thus be ignored or under a symlink.
1001 audit_path = pathutil.pathauditor(self._root, cached=True)
1000 audit_path = pathutil.pathauditor(self._root, cached=True)
1002
1001
1003 for nf in iter(visit):
1002 for nf in iter(visit):
1004 # If a stat for the same file was already added with a
1003 # If a stat for the same file was already added with a
1005 # different case, don't add one for this, since that would
1004 # different case, don't add one for this, since that would
1006 # make it appear as if the file exists under both names
1005 # make it appear as if the file exists under both names
1007 # on disk.
1006 # on disk.
1008 if (normalizefile and
1007 if (normalizefile and
1009 normalizefile(nf, True, True) in results):
1008 normalizefile(nf, True, True) in results):
1010 results[nf] = None
1009 results[nf] = None
1011 # Report ignored items in the dmap as long as they are not
1010 # Report ignored items in the dmap as long as they are not
1012 # under a symlink directory.
1011 # under a symlink directory.
1013 elif audit_path.check(nf):
1012 elif audit_path.check(nf):
1014 try:
1013 try:
1015 results[nf] = lstat(join(nf))
1014 results[nf] = lstat(join(nf))
1016 # file was just ignored, no links, and exists
1015 # file was just ignored, no links, and exists
1017 except OSError:
1016 except OSError:
1018 # file doesn't exist
1017 # file doesn't exist
1019 results[nf] = None
1018 results[nf] = None
1020 else:
1019 else:
1021 # It's either missing or under a symlink directory
1020 # It's either missing or under a symlink directory
1022 # which we in this case report as missing
1021 # which we in this case report as missing
1023 results[nf] = None
1022 results[nf] = None
1024 else:
1023 else:
1025 # We may not have walked the full directory tree above,
1024 # We may not have walked the full directory tree above,
1026 # so stat and check everything we missed.
1025 # so stat and check everything we missed.
1027 iv = iter(visit)
1026 iv = iter(visit)
1028 for st in util.statfiles([join(i) for i in visit]):
1027 for st in util.statfiles([join(i) for i in visit]):
1029 results[next(iv)] = st
1028 results[next(iv)] = st
1030 return results
1029 return results
1031
1030
1032 def status(self, match, subrepos, ignored, clean, unknown):
1031 def status(self, match, subrepos, ignored, clean, unknown):
1033 '''Determine the status of the working copy relative to the
1032 '''Determine the status of the working copy relative to the
1034 dirstate and return a pair of (unsure, status), where status is of type
1033 dirstate and return a pair of (unsure, status), where status is of type
1035 scmutil.status and:
1034 scmutil.status and:
1036
1035
1037 unsure:
1036 unsure:
1038 files that might have been modified since the dirstate was
1037 files that might have been modified since the dirstate was
1039 written, but need to be read to be sure (size is the same
1038 written, but need to be read to be sure (size is the same
1040 but mtime differs)
1039 but mtime differs)
1041 status.modified:
1040 status.modified:
1042 files that have definitely been modified since the dirstate
1041 files that have definitely been modified since the dirstate
1043 was written (different size or mode)
1042 was written (different size or mode)
1044 status.clean:
1043 status.clean:
1045 files that have definitely not been modified since the
1044 files that have definitely not been modified since the
1046 dirstate was written
1045 dirstate was written
1047 '''
1046 '''
1048 listignored, listclean, listunknown = ignored, clean, unknown
1047 listignored, listclean, listunknown = ignored, clean, unknown
1049 lookup, modified, added, unknown, ignored = [], [], [], [], []
1048 lookup, modified, added, unknown, ignored = [], [], [], [], []
1050 removed, deleted, clean = [], [], []
1049 removed, deleted, clean = [], [], []
1051
1050
1052 dmap = self._map
1051 dmap = self._map
1053 dmap.preload()
1052 dmap.preload()
1054 dcontains = dmap.__contains__
1053 dcontains = dmap.__contains__
1055 dget = dmap.__getitem__
1054 dget = dmap.__getitem__
1056 ladd = lookup.append # aka "unsure"
1055 ladd = lookup.append # aka "unsure"
1057 madd = modified.append
1056 madd = modified.append
1058 aadd = added.append
1057 aadd = added.append
1059 uadd = unknown.append
1058 uadd = unknown.append
1060 iadd = ignored.append
1059 iadd = ignored.append
1061 radd = removed.append
1060 radd = removed.append
1062 dadd = deleted.append
1061 dadd = deleted.append
1063 cadd = clean.append
1062 cadd = clean.append
1064 mexact = match.exact
1063 mexact = match.exact
1065 dirignore = self._dirignore
1064 dirignore = self._dirignore
1066 checkexec = self._checkexec
1065 checkexec = self._checkexec
1067 copymap = self._map.copymap
1066 copymap = self._map.copymap
1068 lastnormaltime = self._lastnormaltime
1067 lastnormaltime = self._lastnormaltime
1069
1068
1070 # We need to do full walks when either
1069 # We need to do full walks when either
1071 # - we're listing all clean files, or
1070 # - we're listing all clean files, or
1072 # - match.traversedir does something, because match.traversedir should
1071 # - match.traversedir does something, because match.traversedir should
1073 # be called for every dir in the working dir
1072 # be called for every dir in the working dir
1074 full = listclean or match.traversedir is not None
1073 full = listclean or match.traversedir is not None
1075 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1074 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1076 full=full).iteritems():
1075 full=full).iteritems():
1077 if not dcontains(fn):
1076 if not dcontains(fn):
1078 if (listignored or mexact(fn)) and dirignore(fn):
1077 if (listignored or mexact(fn)) and dirignore(fn):
1079 if listignored:
1078 if listignored:
1080 iadd(fn)
1079 iadd(fn)
1081 else:
1080 else:
1082 uadd(fn)
1081 uadd(fn)
1083 continue
1082 continue
1084
1083
1085 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1084 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1086 # written like that for performance reasons. dmap[fn] is not a
1085 # written like that for performance reasons. dmap[fn] is not a
1087 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1086 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1088 # opcode has fast paths when the value to be unpacked is a tuple or
1087 # opcode has fast paths when the value to be unpacked is a tuple or
1089 # a list, but falls back to creating a full-fledged iterator in
1088 # a list, but falls back to creating a full-fledged iterator in
1090 # general. That is much slower than simply accessing and storing the
1089 # general. That is much slower than simply accessing and storing the
1091 # tuple members one by one.
1090 # tuple members one by one.
1092 t = dget(fn)
1091 t = dget(fn)
1093 state = t[0]
1092 state = t[0]
1094 mode = t[1]
1093 mode = t[1]
1095 size = t[2]
1094 size = t[2]
1096 time = t[3]
1095 time = t[3]
1097
1096
1098 if not st and state in "nma":
1097 if not st and state in "nma":
1099 dadd(fn)
1098 dadd(fn)
1100 elif state == 'n':
1099 elif state == 'n':
1101 if (size >= 0 and
1100 if (size >= 0 and
1102 ((size != st.st_size and size != st.st_size & _rangemask)
1101 ((size != st.st_size and size != st.st_size & _rangemask)
1103 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1102 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1104 or size == -2 # other parent
1103 or size == -2 # other parent
1105 or fn in copymap):
1104 or fn in copymap):
1106 madd(fn)
1105 madd(fn)
1107 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1106 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1108 ladd(fn)
1107 ladd(fn)
1109 elif st.st_mtime == lastnormaltime:
1108 elif st.st_mtime == lastnormaltime:
1110 # fn may have just been marked as normal and it may have
1109 # fn may have just been marked as normal and it may have
1111 # changed in the same second without changing its size.
1110 # changed in the same second without changing its size.
1112 # This can happen if we quickly do multiple commits.
1111 # This can happen if we quickly do multiple commits.
1113 # Force lookup, so we don't miss such a racy file change.
1112 # Force lookup, so we don't miss such a racy file change.
1114 ladd(fn)
1113 ladd(fn)
1115 elif listclean:
1114 elif listclean:
1116 cadd(fn)
1115 cadd(fn)
1117 elif state == 'm':
1116 elif state == 'm':
1118 madd(fn)
1117 madd(fn)
1119 elif state == 'a':
1118 elif state == 'a':
1120 aadd(fn)
1119 aadd(fn)
1121 elif state == 'r':
1120 elif state == 'r':
1122 radd(fn)
1121 radd(fn)
1123
1122
1124 return (lookup, scmutil.status(modified, added, removed, deleted,
1123 return (lookup, scmutil.status(modified, added, removed, deleted,
1125 unknown, ignored, clean))
1124 unknown, ignored, clean))
1126
1125
1127 def matches(self, match):
1126 def matches(self, match):
1128 '''
1127 '''
1129 return files in the dirstate (in whatever state) filtered by match
1128 return files in the dirstate (in whatever state) filtered by match
1130 '''
1129 '''
1131 dmap = self._map
1130 dmap = self._map
1132 if match.always():
1131 if match.always():
1133 return dmap.keys()
1132 return dmap.keys()
1134 files = match.files()
1133 files = match.files()
1135 if match.isexact():
1134 if match.isexact():
1136 # fast path -- filter the other way around, since typically files is
1135 # fast path -- filter the other way around, since typically files is
1137 # much smaller than dmap
1136 # much smaller than dmap
1138 return [f for f in files if f in dmap]
1137 return [f for f in files if f in dmap]
1139 if match.prefix() and all(fn in dmap for fn in files):
1138 if match.prefix() and all(fn in dmap for fn in files):
1140 # fast path -- all the values are known to be files, so just return
1139 # fast path -- all the values are known to be files, so just return
1141 # that
1140 # that
1142 return list(files)
1141 return list(files)
1143 return [f for f in dmap if match(f)]
1142 return [f for f in dmap if match(f)]
1144
1143
1145 def _actualfilename(self, tr):
1144 def _actualfilename(self, tr):
1146 if tr:
1145 if tr:
1147 return self._pendingfilename
1146 return self._pendingfilename
1148 else:
1147 else:
1149 return self._filename
1148 return self._filename
1150
1149
1151 def savebackup(self, tr, backupname):
1150 def savebackup(self, tr, backupname):
1152 '''Save current dirstate into backup file'''
1151 '''Save current dirstate into backup file'''
1153 filename = self._actualfilename(tr)
1152 filename = self._actualfilename(tr)
1154 assert backupname != filename
1153 assert backupname != filename
1155
1154
1156 # use '_writedirstate' instead of 'write' to write changes certainly,
1155 # use '_writedirstate' instead of 'write' to write changes certainly,
1157 # because the latter omits writing out if transaction is running.
1156 # because the latter omits writing out if transaction is running.
1158 # output file will be used to create backup of dirstate at this point.
1157 # output file will be used to create backup of dirstate at this point.
1159 if self._dirty or not self._opener.exists(filename):
1158 if self._dirty or not self._opener.exists(filename):
1160 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1159 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1161 checkambig=True))
1160 checkambig=True))
1162
1161
1163 if tr:
1162 if tr:
1164 # ensure that subsequent tr.writepending returns True for
1163 # ensure that subsequent tr.writepending returns True for
1165 # changes written out above, even if dirstate is never
1164 # changes written out above, even if dirstate is never
1166 # changed after this
1165 # changed after this
1167 tr.addfilegenerator('dirstate', (self._filename,),
1166 tr.addfilegenerator('dirstate', (self._filename,),
1168 self._writedirstate, location='plain')
1167 self._writedirstate, location='plain')
1169
1168
1170 # ensure that pending file written above is unlinked at
1169 # ensure that pending file written above is unlinked at
1171 # failure, even if tr.writepending isn't invoked until the
1170 # failure, even if tr.writepending isn't invoked until the
1172 # end of this transaction
1171 # end of this transaction
1173 tr.registertmp(filename, location='plain')
1172 tr.registertmp(filename, location='plain')
1174
1173
1175 self._opener.tryunlink(backupname)
1174 self._opener.tryunlink(backupname)
1176 # hardlink backup is okay because _writedirstate is always called
1175 # hardlink backup is okay because _writedirstate is always called
1177 # with an "atomictemp=True" file.
1176 # with an "atomictemp=True" file.
1178 util.copyfile(self._opener.join(filename),
1177 util.copyfile(self._opener.join(filename),
1179 self._opener.join(backupname), hardlink=True)
1178 self._opener.join(backupname), hardlink=True)
1180
1179
1181 def restorebackup(self, tr, backupname):
1180 def restorebackup(self, tr, backupname):
1182 '''Restore dirstate by backup file'''
1181 '''Restore dirstate by backup file'''
1183 # this "invalidate()" prevents "wlock.release()" from writing
1182 # this "invalidate()" prevents "wlock.release()" from writing
1184 # changes of dirstate out after restoring from backup file
1183 # changes of dirstate out after restoring from backup file
1185 self.invalidate()
1184 self.invalidate()
1186 filename = self._actualfilename(tr)
1185 filename = self._actualfilename(tr)
1187 o = self._opener
1186 o = self._opener
1188 if util.samefile(o.join(backupname), o.join(filename)):
1187 if util.samefile(o.join(backupname), o.join(filename)):
1189 o.unlink(backupname)
1188 o.unlink(backupname)
1190 else:
1189 else:
1191 o.rename(backupname, filename, checkambig=True)
1190 o.rename(backupname, filename, checkambig=True)
1192
1191
1193 def clearbackup(self, tr, backupname):
1192 def clearbackup(self, tr, backupname):
1194 '''Clear backup file'''
1193 '''Clear backup file'''
1195 self._opener.unlink(backupname)
1194 self._opener.unlink(backupname)
1196
1195
1197 class dirstatemap(object):
1196 class dirstatemap(object):
1198 """Map encapsulating the dirstate's contents.
1197 """Map encapsulating the dirstate's contents.
1199
1198
1200 The dirstate contains the following state:
1199 The dirstate contains the following state:
1201
1200
1202 - `identity` is the identity of the dirstate file, which can be used to
1201 - `identity` is the identity of the dirstate file, which can be used to
1203 detect when changes have occurred to the dirstate file.
1202 detect when changes have occurred to the dirstate file.
1204
1203
1205 - `parents` is a pair containing the parents of the working copy. The
1204 - `parents` is a pair containing the parents of the working copy. The
1206 parents are updated by calling `setparents`.
1205 parents are updated by calling `setparents`.
1207
1206
1208 - the state map maps filenames to tuples of (state, mode, size, mtime),
1207 - the state map maps filenames to tuples of (state, mode, size, mtime),
1209 where state is a single character representing 'normal', 'added',
1208 where state is a single character representing 'normal', 'added',
1210 'removed', or 'merged'. It is accessed by treating the dirstate as a
1209 'removed', or 'merged'. It is read by treating the dirstate as a
1211 dict.
1210 dict. File state is updated by calling the `addfile`, `removefile` and
1211 `dropfile` methods.
1212
1212
1213 - `copymap` maps destination filenames to their source filename.
1213 - `copymap` maps destination filenames to their source filename.
1214
1214
1215 The dirstate also provides the following views onto the state:
1215 The dirstate also provides the following views onto the state:
1216
1216
1217 - `nonnormalset` is a set of the filenames that have state other
1217 - `nonnormalset` is a set of the filenames that have state other
1218 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1218 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1219
1219
1220 - `otherparentset` is a set of the filenames that are marked as coming
1220 - `otherparentset` is a set of the filenames that are marked as coming
1221 from the second parent when the dirstate is currently being merged.
1221 from the second parent when the dirstate is currently being merged.
1222
1222
1223 - `dirs` is a set-like object containing all the directories that contain
1223 - `dirs` is a set-like object containing all the directories that contain
1224 files in the dirstate, excluding any files that are marked as removed.
1224 files in the dirstate, excluding any files that are marked as removed.
1225
1225
1226 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1226 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1227 form that they appear as in the dirstate.
1227 form that they appear as in the dirstate.
1228
1228
1229 - `dirfoldmap` is a dict mapping normalized directory names to the
1229 - `dirfoldmap` is a dict mapping normalized directory names to the
1230 denormalized form that they appear as in the dirstate.
1230 denormalized form that they appear as in the dirstate.
1231
1231
1232 Once instantiated, the nonnormalset, otherparentset, dirs, filefoldmap and
1232 Once instantiated, the nonnormalset, otherparentset, dirs, filefoldmap and
1233 dirfoldmap views must be maintained by the caller.
1233 dirfoldmap views must be maintained by the caller.
1234 """
1234 """
1235
1235
1236 def __init__(self, ui, opener, root):
1236 def __init__(self, ui, opener, root):
1237 self._ui = ui
1237 self._ui = ui
1238 self._opener = opener
1238 self._opener = opener
1239 self._root = root
1239 self._root = root
1240 self._filename = 'dirstate'
1240 self._filename = 'dirstate'
1241
1241
1242 self._parents = None
1242 self._parents = None
1243 self._dirtyparents = False
1243 self._dirtyparents = False
1244
1244
1245 # for consistent view between _pl() and _read() invocations
1245 # for consistent view between _pl() and _read() invocations
1246 self._pendingmode = None
1246 self._pendingmode = None
1247
1247
1248 @propertycache
1248 @propertycache
1249 def _map(self):
1249 def _map(self):
1250 self._map = {}
1250 self._map = {}
1251 self.read()
1251 self.read()
1252 return self._map
1252 return self._map
1253
1253
1254 @propertycache
1254 @propertycache
1255 def copymap(self):
1255 def copymap(self):
1256 self.copymap = {}
1256 self.copymap = {}
1257 self._map
1257 self._map
1258 return self.copymap
1258 return self.copymap
1259
1259
1260 def clear(self):
1260 def clear(self):
1261 self._map.clear()
1261 self._map.clear()
1262 self.copymap.clear()
1262 self.copymap.clear()
1263 self.setparents(nullid, nullid)
1263 self.setparents(nullid, nullid)
1264 util.clearcachedproperty(self, "dirs")
1264 util.clearcachedproperty(self, "dirs")
1265 util.clearcachedproperty(self, "filefoldmap")
1265 util.clearcachedproperty(self, "filefoldmap")
1266 util.clearcachedproperty(self, "dirfoldmap")
1266 util.clearcachedproperty(self, "dirfoldmap")
1267 util.clearcachedproperty(self, "nonnormalset")
1267 util.clearcachedproperty(self, "nonnormalset")
1268 util.clearcachedproperty(self, "otherparentset")
1268 util.clearcachedproperty(self, "otherparentset")
1269
1269
1270 def iteritems(self):
1270 def iteritems(self):
1271 return self._map.iteritems()
1271 return self._map.iteritems()
1272
1272
1273 def __len__(self):
1273 def __len__(self):
1274 return len(self._map)
1274 return len(self._map)
1275
1275
1276 def __iter__(self):
1276 def __iter__(self):
1277 return iter(self._map)
1277 return iter(self._map)
1278
1278
1279 def get(self, key, default=None):
1279 def get(self, key, default=None):
1280 return self._map.get(key, default)
1280 return self._map.get(key, default)
1281
1281
1282 def __contains__(self, key):
1282 def __contains__(self, key):
1283 return key in self._map
1283 return key in self._map
1284
1284
1285 def __setitem__(self, key, value):
1286 self._map[key] = value
1287
1288 def __getitem__(self, key):
1285 def __getitem__(self, key):
1289 return self._map[key]
1286 return self._map[key]
1290
1287
1291 def __delitem__(self, key):
1292 del self._map[key]
1293
1294 def keys(self):
1288 def keys(self):
1295 return self._map.keys()
1289 return self._map.keys()
1296
1290
1297 def preload(self):
1291 def preload(self):
1298 """Loads the underlying data, if it's not already loaded"""
1292 """Loads the underlying data, if it's not already loaded"""
1299 self._map
1293 self._map
1300
1294
1295 def addfile(self, f, state, mode, size, mtime):
1296 """Add a tracked file to the dirstate."""
1297 self._map[f] = dirstatetuple(state, mode, size, mtime)
1298
1299 def removefile(self, f, size):
1300 """
1301 Mark a file as removed in the dirstate.
1302
1303 The `size` parameter is used to store sentinel values that indicate
1304 the file's previous state. In the future, we should refactor this
1305 to be more explicit about what that state is.
1306 """
1307 self._map[f] = dirstatetuple('r', 0, size, 0)
1308
1309 def dropfile(self, f):
1310 """
1311 Remove a file from the dirstate. Returns True if the file was
1312 previously recorded.
1313 """
1314 return self._map.pop(f, None) is not None
1315
1301 def nonnormalentries(self):
1316 def nonnormalentries(self):
1302 '''Compute the nonnormal dirstate entries from the dmap'''
1317 '''Compute the nonnormal dirstate entries from the dmap'''
1303 try:
1318 try:
1304 return parsers.nonnormalotherparententries(self._map)
1319 return parsers.nonnormalotherparententries(self._map)
1305 except AttributeError:
1320 except AttributeError:
1306 nonnorm = set()
1321 nonnorm = set()
1307 otherparent = set()
1322 otherparent = set()
1308 for fname, e in self._map.iteritems():
1323 for fname, e in self._map.iteritems():
1309 if e[0] != 'n' or e[3] == -1:
1324 if e[0] != 'n' or e[3] == -1:
1310 nonnorm.add(fname)
1325 nonnorm.add(fname)
1311 if e[0] == 'n' and e[2] == -2:
1326 if e[0] == 'n' and e[2] == -2:
1312 otherparent.add(fname)
1327 otherparent.add(fname)
1313 return nonnorm, otherparent
1328 return nonnorm, otherparent
1314
1329
1315 @propertycache
1330 @propertycache
1316 def filefoldmap(self):
1331 def filefoldmap(self):
1317 """Returns a dictionary mapping normalized case paths to their
1332 """Returns a dictionary mapping normalized case paths to their
1318 non-normalized versions.
1333 non-normalized versions.
1319 """
1334 """
1320 try:
1335 try:
1321 makefilefoldmap = parsers.make_file_foldmap
1336 makefilefoldmap = parsers.make_file_foldmap
1322 except AttributeError:
1337 except AttributeError:
1323 pass
1338 pass
1324 else:
1339 else:
1325 return makefilefoldmap(self._map, util.normcasespec,
1340 return makefilefoldmap(self._map, util.normcasespec,
1326 util.normcasefallback)
1341 util.normcasefallback)
1327
1342
1328 f = {}
1343 f = {}
1329 normcase = util.normcase
1344 normcase = util.normcase
1330 for name, s in self._map.iteritems():
1345 for name, s in self._map.iteritems():
1331 if s[0] != 'r':
1346 if s[0] != 'r':
1332 f[normcase(name)] = name
1347 f[normcase(name)] = name
1333 f['.'] = '.' # prevents useless util.fspath() invocation
1348 f['.'] = '.' # prevents useless util.fspath() invocation
1334 return f
1349 return f
1335
1350
1336 @propertycache
1351 @propertycache
1337 def dirs(self):
1352 def dirs(self):
1338 """Returns a set-like object containing all the directories in the
1353 """Returns a set-like object containing all the directories in the
1339 current dirstate.
1354 current dirstate.
1340 """
1355 """
1341 return util.dirs(self._map, 'r')
1356 return util.dirs(self._map, 'r')
1342
1357
1343 def _opendirstatefile(self):
1358 def _opendirstatefile(self):
1344 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1359 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1345 if self._pendingmode is not None and self._pendingmode != mode:
1360 if self._pendingmode is not None and self._pendingmode != mode:
1346 fp.close()
1361 fp.close()
1347 raise error.Abort(_('working directory state may be '
1362 raise error.Abort(_('working directory state may be '
1348 'changed parallelly'))
1363 'changed parallelly'))
1349 self._pendingmode = mode
1364 self._pendingmode = mode
1350 return fp
1365 return fp
1351
1366
1352 def parents(self):
1367 def parents(self):
1353 if not self._parents:
1368 if not self._parents:
1354 try:
1369 try:
1355 fp = self._opendirstatefile()
1370 fp = self._opendirstatefile()
1356 st = fp.read(40)
1371 st = fp.read(40)
1357 fp.close()
1372 fp.close()
1358 except IOError as err:
1373 except IOError as err:
1359 if err.errno != errno.ENOENT:
1374 if err.errno != errno.ENOENT:
1360 raise
1375 raise
1361 # File doesn't exist, so the current state is empty
1376 # File doesn't exist, so the current state is empty
1362 st = ''
1377 st = ''
1363
1378
1364 l = len(st)
1379 l = len(st)
1365 if l == 40:
1380 if l == 40:
1366 self._parents = st[:20], st[20:40]
1381 self._parents = st[:20], st[20:40]
1367 elif l == 0:
1382 elif l == 0:
1368 self._parents = [nullid, nullid]
1383 self._parents = [nullid, nullid]
1369 else:
1384 else:
1370 raise error.Abort(_('working directory state appears '
1385 raise error.Abort(_('working directory state appears '
1371 'damaged!'))
1386 'damaged!'))
1372
1387
1373 return self._parents
1388 return self._parents
1374
1389
1375 def setparents(self, p1, p2):
1390 def setparents(self, p1, p2):
1376 self._parents = (p1, p2)
1391 self._parents = (p1, p2)
1377 self._dirtyparents = True
1392 self._dirtyparents = True
1378
1393
1379 def read(self):
1394 def read(self):
1380 # ignore HG_PENDING because identity is used only for writing
1395 # ignore HG_PENDING because identity is used only for writing
1381 self.identity = util.filestat.frompath(
1396 self.identity = util.filestat.frompath(
1382 self._opener.join(self._filename))
1397 self._opener.join(self._filename))
1383
1398
1384 try:
1399 try:
1385 fp = self._opendirstatefile()
1400 fp = self._opendirstatefile()
1386 try:
1401 try:
1387 st = fp.read()
1402 st = fp.read()
1388 finally:
1403 finally:
1389 fp.close()
1404 fp.close()
1390 except IOError as err:
1405 except IOError as err:
1391 if err.errno != errno.ENOENT:
1406 if err.errno != errno.ENOENT:
1392 raise
1407 raise
1393 return
1408 return
1394 if not st:
1409 if not st:
1395 return
1410 return
1396
1411
1397 if util.safehasattr(parsers, 'dict_new_presized'):
1412 if util.safehasattr(parsers, 'dict_new_presized'):
1398 # Make an estimate of the number of files in the dirstate based on
1413 # Make an estimate of the number of files in the dirstate based on
1399 # its size. From a linear regression on a set of real-world repos,
1414 # its size. From a linear regression on a set of real-world repos,
1400 # all over 10,000 files, the size of a dirstate entry is 85
1415 # all over 10,000 files, the size of a dirstate entry is 85
1401 # bytes. The cost of resizing is significantly higher than the cost
1416 # bytes. The cost of resizing is significantly higher than the cost
1402 # of filling in a larger presized dict, so subtract 20% from the
1417 # of filling in a larger presized dict, so subtract 20% from the
1403 # size.
1418 # size.
1404 #
1419 #
1405 # This heuristic is imperfect in many ways, so in a future dirstate
1420 # This heuristic is imperfect in many ways, so in a future dirstate
1406 # format update it makes sense to just record the number of entries
1421 # format update it makes sense to just record the number of entries
1407 # on write.
1422 # on write.
1408 self._map = parsers.dict_new_presized(len(st) / 71)
1423 self._map = parsers.dict_new_presized(len(st) / 71)
1409
1424
1410 # Python's garbage collector triggers a GC each time a certain number
1425 # Python's garbage collector triggers a GC each time a certain number
1411 # of container objects (the number being defined by
1426 # of container objects (the number being defined by
1412 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1427 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1413 # for each file in the dirstate. The C version then immediately marks
1428 # for each file in the dirstate. The C version then immediately marks
1414 # them as not to be tracked by the collector. However, this has no
1429 # them as not to be tracked by the collector. However, this has no
1415 # effect on when GCs are triggered, only on what objects the GC looks
1430 # effect on when GCs are triggered, only on what objects the GC looks
1416 # into. This means that O(number of files) GCs are unavoidable.
1431 # into. This means that O(number of files) GCs are unavoidable.
1417 # Depending on when in the process's lifetime the dirstate is parsed,
1432 # Depending on when in the process's lifetime the dirstate is parsed,
1418 # this can get very expensive. As a workaround, disable GC while
1433 # this can get very expensive. As a workaround, disable GC while
1419 # parsing the dirstate.
1434 # parsing the dirstate.
1420 #
1435 #
1421 # (we cannot decorate the function directly since it is in a C module)
1436 # (we cannot decorate the function directly since it is in a C module)
1422 parse_dirstate = util.nogc(parsers.parse_dirstate)
1437 parse_dirstate = util.nogc(parsers.parse_dirstate)
1423 p = parse_dirstate(self._map, self.copymap, st)
1438 p = parse_dirstate(self._map, self.copymap, st)
1424 if not self._dirtyparents:
1439 if not self._dirtyparents:
1425 self.setparents(*p)
1440 self.setparents(*p)
1426
1441
1427 # Avoid excess attribute lookups by fast pathing certain checks
1442 # Avoid excess attribute lookups by fast pathing certain checks
1428 self.__contains__ = self._map.__contains__
1443 self.__contains__ = self._map.__contains__
1429 self.__getitem__ = self._map.__getitem__
1444 self.__getitem__ = self._map.__getitem__
1430 self.__setitem__ = self._map.__setitem__
1431 self.__delitem__ = self._map.__delitem__
1432 self.get = self._map.get
1445 self.get = self._map.get
1433
1446
1434 def write(self, st, now):
1447 def write(self, st, now):
1435 st.write(parsers.pack_dirstate(self._map, self.copymap,
1448 st.write(parsers.pack_dirstate(self._map, self.copymap,
1436 self.parents(), now))
1449 self.parents(), now))
1437 st.close()
1450 st.close()
1438 self._dirtyparents = False
1451 self._dirtyparents = False
1439 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1452 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1440
1453
1441 @propertycache
1454 @propertycache
1442 def nonnormalset(self):
1455 def nonnormalset(self):
1443 nonnorm, otherparents = self.nonnormalentries()
1456 nonnorm, otherparents = self.nonnormalentries()
1444 self.otherparentset = otherparents
1457 self.otherparentset = otherparents
1445 return nonnorm
1458 return nonnorm
1446
1459
1447 @propertycache
1460 @propertycache
1448 def otherparentset(self):
1461 def otherparentset(self):
1449 nonnorm, otherparents = self.nonnormalentries()
1462 nonnorm, otherparents = self.nonnormalentries()
1450 self.nonnormalset = nonnorm
1463 self.nonnormalset = nonnorm
1451 return otherparents
1464 return otherparents
1452
1465
1453 @propertycache
1466 @propertycache
1454 def identity(self):
1467 def identity(self):
1455 self._map
1468 self._map
1456 return self.identity
1469 return self.identity
1457
1470
1458 @propertycache
1471 @propertycache
1459 def dirfoldmap(self):
1472 def dirfoldmap(self):
1460 f = {}
1473 f = {}
1461 normcase = util.normcase
1474 normcase = util.normcase
1462 for name in self.dirs:
1475 for name in self.dirs:
1463 f[normcase(name)] = name
1476 f[normcase(name)] = name
1464 return f
1477 return f
General Comments 0
You need to be logged in to leave comments. Login now