##// END OF EJS Templates
dirstate: move nonnormal and otherparent sets to dirstatemap...
Durham Goode -
r34675:60927b19 default
parent child Browse files
Show More
@@ -1,57 +1,58 b''
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
2 # dirstate's non-normal map
2 # dirstate's non-normal map
3 #
3 #
4 # For most operations on dirstate, this extensions checks that the nonnormalset
4 # For most operations on dirstate, this extensions checks that the nonnormalset
5 # contains the right entries.
5 # contains the right entries.
6 # It compares the nonnormal file to a nonnormalset built from the map of all
6 # It compares the nonnormal file to a nonnormalset built from the map of all
7 # the files in the dirstate to check that they contain the same files.
7 # the files in the dirstate to check that they contain the same files.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 from mercurial import (
11 from mercurial import (
12 dirstate,
12 dirstate,
13 extensions,
13 extensions,
14 )
14 )
15
15
16 def nonnormalentries(dmap):
16 def nonnormalentries(dmap):
17 """Compute nonnormal entries from dirstate's dmap"""
17 """Compute nonnormal entries from dirstate's dmap"""
18 res = set()
18 res = set()
19 for f, e in dmap.iteritems():
19 for f, e in dmap.iteritems():
20 if e[0] != 'n' or e[3] == -1:
20 if e[0] != 'n' or e[3] == -1:
21 res.add(f)
21 res.add(f)
22 return res
22 return res
23
23
24 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
24 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
25 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
25 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
26 nonnormalcomputedmap = nonnormalentries(dmap)
26 nonnormalcomputedmap = nonnormalentries(dmap)
27 if _nonnormalset != nonnormalcomputedmap:
27 if _nonnormalset != nonnormalcomputedmap:
28 ui.develwarn("%s call to %s\n" % (label, orig), config='dirstate')
28 ui.develwarn("%s call to %s\n" % (label, orig), config='dirstate')
29 ui.develwarn("inconsistency in nonnormalset\n", config='dirstate')
29 ui.develwarn("inconsistency in nonnormalset\n", config='dirstate')
30 ui.develwarn("[nonnormalset] %s\n" % _nonnormalset, config='dirstate')
30 ui.develwarn("[nonnormalset] %s\n" % _nonnormalset, config='dirstate')
31 ui.develwarn("[map] %s\n" % nonnormalcomputedmap, config='dirstate')
31 ui.develwarn("[map] %s\n" % nonnormalcomputedmap, config='dirstate')
32
32
33 def _checkdirstate(orig, self, arg):
33 def _checkdirstate(orig, self, arg):
34 """Check nonnormal set consistency before and after the call to orig"""
34 """Check nonnormal set consistency before and after the call to orig"""
35 checkconsistency(self._ui, orig, self._map, self._nonnormalset, "before")
35 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
36 "before")
36 r = orig(self, arg)
37 r = orig(self, arg)
37 checkconsistency(self._ui, orig, self._map, self._nonnormalset, "after")
38 checkconsistency(self._ui, orig, self._map, self._map.nonnormalset, "after")
38 return r
39 return r
39
40
40 def extsetup(ui):
41 def extsetup(ui):
41 """Wrap functions modifying dirstate to check nonnormalset consistency"""
42 """Wrap functions modifying dirstate to check nonnormalset consistency"""
42 dirstatecl = dirstate.dirstate
43 dirstatecl = dirstate.dirstate
43 devel = ui.configbool('devel', 'all-warnings')
44 devel = ui.configbool('devel', 'all-warnings')
44 paranoid = ui.configbool('experimental', 'nonnormalparanoidcheck')
45 paranoid = ui.configbool('experimental', 'nonnormalparanoidcheck')
45 if devel:
46 if devel:
46 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
47 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
47 if paranoid:
48 if paranoid:
48 # We don't do all these checks when paranoid is disable as it would
49 # We don't do all these checks when paranoid is disable as it would
49 # make the extension run very slowly on large repos
50 # make the extension run very slowly on large repos
50 extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate)
51 extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate)
51 extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate)
52 extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate)
52 extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate)
53 extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate)
53 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
54 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
54 extensions.wrapfunction(dirstatecl, 'add', _checkdirstate)
55 extensions.wrapfunction(dirstatecl, 'add', _checkdirstate)
55 extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate)
56 extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate)
56 extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate)
57 extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate)
57 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
58 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
@@ -1,1407 +1,1406 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._read()
132 self._read()
133 return self._map
133 return self._map
134
134
135 @propertycache
135 @propertycache
136 def _identity(self):
136 def _identity(self):
137 self._read()
137 self._read()
138 return self._identity
138 return self._identity
139
139
140 @propertycache
140 @propertycache
141 def _nonnormalset(self):
142 nonnorm, otherparents = self._map.nonnormalentries()
143 self._otherparentset = otherparents
144 return nonnorm
145
146 @propertycache
147 def _otherparentset(self):
148 nonnorm, otherparents = self._map.nonnormalentries()
149 self._nonnormalset = nonnorm
150 return otherparents
151
152 @propertycache
153 def _filefoldmap(self):
141 def _filefoldmap(self):
154 return self._map.filefoldmap()
142 return self._map.filefoldmap()
155
143
156 @propertycache
144 @propertycache
157 def _dirfoldmap(self):
145 def _dirfoldmap(self):
158 f = {}
146 f = {}
159 normcase = util.normcase
147 normcase = util.normcase
160 for name in self._dirs:
148 for name in self._dirs:
161 f[normcase(name)] = name
149 f[normcase(name)] = name
162 return f
150 return f
163
151
164 @property
152 @property
165 def _sparsematcher(self):
153 def _sparsematcher(self):
166 """The matcher for the sparse checkout.
154 """The matcher for the sparse checkout.
167
155
168 The working directory may not include every file from a manifest. The
156 The working directory may not include every file from a manifest. The
169 matcher obtained by this property will match a path if it is to be
157 matcher obtained by this property will match a path if it is to be
170 included in the working directory.
158 included in the working directory.
171 """
159 """
172 # TODO there is potential to cache this property. For now, the matcher
160 # TODO there is potential to cache this property. For now, the matcher
173 # is resolved on every access. (But the called function does use a
161 # is resolved on every access. (But the called function does use a
174 # cache to keep the lookup fast.)
162 # cache to keep the lookup fast.)
175 return self._sparsematchfn()
163 return self._sparsematchfn()
176
164
177 @repocache('branch')
165 @repocache('branch')
178 def _branch(self):
166 def _branch(self):
179 try:
167 try:
180 return self._opener.read("branch").strip() or "default"
168 return self._opener.read("branch").strip() or "default"
181 except IOError as inst:
169 except IOError as inst:
182 if inst.errno != errno.ENOENT:
170 if inst.errno != errno.ENOENT:
183 raise
171 raise
184 return "default"
172 return "default"
185
173
186 @property
174 @property
187 def _pl(self):
175 def _pl(self):
188 return self._map.parents()
176 return self._map.parents()
189
177
190 @propertycache
178 @propertycache
191 def _dirs(self):
179 def _dirs(self):
192 return self._map.dirs()
180 return self._map.dirs()
193
181
194 def dirs(self):
182 def dirs(self):
195 return self._dirs
183 return self._dirs
196
184
197 @rootcache('.hgignore')
185 @rootcache('.hgignore')
198 def _ignore(self):
186 def _ignore(self):
199 files = self._ignorefiles()
187 files = self._ignorefiles()
200 if not files:
188 if not files:
201 return matchmod.never(self._root, '')
189 return matchmod.never(self._root, '')
202
190
203 pats = ['include:%s' % f for f in files]
191 pats = ['include:%s' % f for f in files]
204 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
192 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
205
193
206 @propertycache
194 @propertycache
207 def _slash(self):
195 def _slash(self):
208 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
196 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
209
197
210 @propertycache
198 @propertycache
211 def _checklink(self):
199 def _checklink(self):
212 return util.checklink(self._root)
200 return util.checklink(self._root)
213
201
214 @propertycache
202 @propertycache
215 def _checkexec(self):
203 def _checkexec(self):
216 return util.checkexec(self._root)
204 return util.checkexec(self._root)
217
205
218 @propertycache
206 @propertycache
219 def _checkcase(self):
207 def _checkcase(self):
220 return not util.fscasesensitive(self._join('.hg'))
208 return not util.fscasesensitive(self._join('.hg'))
221
209
222 def _join(self, f):
210 def _join(self, f):
223 # much faster than os.path.join()
211 # much faster than os.path.join()
224 # it's safe because f is always a relative path
212 # it's safe because f is always a relative path
225 return self._rootdir + f
213 return self._rootdir + f
226
214
227 def flagfunc(self, buildfallback):
215 def flagfunc(self, buildfallback):
228 if self._checklink and self._checkexec:
216 if self._checklink and self._checkexec:
229 def f(x):
217 def f(x):
230 try:
218 try:
231 st = os.lstat(self._join(x))
219 st = os.lstat(self._join(x))
232 if util.statislink(st):
220 if util.statislink(st):
233 return 'l'
221 return 'l'
234 if util.statisexec(st):
222 if util.statisexec(st):
235 return 'x'
223 return 'x'
236 except OSError:
224 except OSError:
237 pass
225 pass
238 return ''
226 return ''
239 return f
227 return f
240
228
241 fallback = buildfallback()
229 fallback = buildfallback()
242 if self._checklink:
230 if self._checklink:
243 def f(x):
231 def f(x):
244 if os.path.islink(self._join(x)):
232 if os.path.islink(self._join(x)):
245 return 'l'
233 return 'l'
246 if 'x' in fallback(x):
234 if 'x' in fallback(x):
247 return 'x'
235 return 'x'
248 return ''
236 return ''
249 return f
237 return f
250 if self._checkexec:
238 if self._checkexec:
251 def f(x):
239 def f(x):
252 if 'l' in fallback(x):
240 if 'l' in fallback(x):
253 return 'l'
241 return 'l'
254 if util.isexec(self._join(x)):
242 if util.isexec(self._join(x)):
255 return 'x'
243 return 'x'
256 return ''
244 return ''
257 return f
245 return f
258 else:
246 else:
259 return fallback
247 return fallback
260
248
261 @propertycache
249 @propertycache
262 def _cwd(self):
250 def _cwd(self):
263 # internal config: ui.forcecwd
251 # internal config: ui.forcecwd
264 forcecwd = self._ui.config('ui', 'forcecwd')
252 forcecwd = self._ui.config('ui', 'forcecwd')
265 if forcecwd:
253 if forcecwd:
266 return forcecwd
254 return forcecwd
267 return pycompat.getcwd()
255 return pycompat.getcwd()
268
256
269 def getcwd(self):
257 def getcwd(self):
270 '''Return the path from which a canonical path is calculated.
258 '''Return the path from which a canonical path is calculated.
271
259
272 This path should be used to resolve file patterns or to convert
260 This path should be used to resolve file patterns or to convert
273 canonical paths back to file paths for display. It shouldn't be
261 canonical paths back to file paths for display. It shouldn't be
274 used to get real file paths. Use vfs functions instead.
262 used to get real file paths. Use vfs functions instead.
275 '''
263 '''
276 cwd = self._cwd
264 cwd = self._cwd
277 if cwd == self._root:
265 if cwd == self._root:
278 return ''
266 return ''
279 # self._root ends with a path separator if self._root is '/' or 'C:\'
267 # self._root ends with a path separator if self._root is '/' or 'C:\'
280 rootsep = self._root
268 rootsep = self._root
281 if not util.endswithsep(rootsep):
269 if not util.endswithsep(rootsep):
282 rootsep += pycompat.ossep
270 rootsep += pycompat.ossep
283 if cwd.startswith(rootsep):
271 if cwd.startswith(rootsep):
284 return cwd[len(rootsep):]
272 return cwd[len(rootsep):]
285 else:
273 else:
286 # we're outside the repo. return an absolute path.
274 # we're outside the repo. return an absolute path.
287 return cwd
275 return cwd
288
276
289 def pathto(self, f, cwd=None):
277 def pathto(self, f, cwd=None):
290 if cwd is None:
278 if cwd is None:
291 cwd = self.getcwd()
279 cwd = self.getcwd()
292 path = util.pathto(self._root, cwd, f)
280 path = util.pathto(self._root, cwd, f)
293 if self._slash:
281 if self._slash:
294 return util.pconvert(path)
282 return util.pconvert(path)
295 return path
283 return path
296
284
297 def __getitem__(self, key):
285 def __getitem__(self, key):
298 '''Return the current state of key (a filename) in the dirstate.
286 '''Return the current state of key (a filename) in the dirstate.
299
287
300 States are:
288 States are:
301 n normal
289 n normal
302 m needs merging
290 m needs merging
303 r marked for removal
291 r marked for removal
304 a marked for addition
292 a marked for addition
305 ? not tracked
293 ? not tracked
306 '''
294 '''
307 return self._map.get(key, ("?",))[0]
295 return self._map.get(key, ("?",))[0]
308
296
309 def __contains__(self, key):
297 def __contains__(self, key):
310 return key in self._map
298 return key in self._map
311
299
312 def __iter__(self):
300 def __iter__(self):
313 return iter(sorted(self._map))
301 return iter(sorted(self._map))
314
302
315 def items(self):
303 def items(self):
316 return self._map.iteritems()
304 return self._map.iteritems()
317
305
318 iteritems = items
306 iteritems = items
319
307
320 def parents(self):
308 def parents(self):
321 return [self._validate(p) for p in self._pl]
309 return [self._validate(p) for p in self._pl]
322
310
323 def p1(self):
311 def p1(self):
324 return self._validate(self._pl[0])
312 return self._validate(self._pl[0])
325
313
326 def p2(self):
314 def p2(self):
327 return self._validate(self._pl[1])
315 return self._validate(self._pl[1])
328
316
329 def branch(self):
317 def branch(self):
330 return encoding.tolocal(self._branch)
318 return encoding.tolocal(self._branch)
331
319
332 def setparents(self, p1, p2=nullid):
320 def setparents(self, p1, p2=nullid):
333 """Set dirstate parents to p1 and p2.
321 """Set dirstate parents to p1 and p2.
334
322
335 When moving from two parents to one, 'm' merged entries a
323 When moving from two parents to one, 'm' merged entries a
336 adjusted to normal and previous copy records discarded and
324 adjusted to normal and previous copy records discarded and
337 returned by the call.
325 returned by the call.
338
326
339 See localrepo.setparents()
327 See localrepo.setparents()
340 """
328 """
341 if self._parentwriters == 0:
329 if self._parentwriters == 0:
342 raise ValueError("cannot set dirstate parent without "
330 raise ValueError("cannot set dirstate parent without "
343 "calling dirstate.beginparentchange")
331 "calling dirstate.beginparentchange")
344
332
345 self._dirty = True
333 self._dirty = True
346 oldp2 = self._pl[1]
334 oldp2 = self._pl[1]
347 if self._origpl is None:
335 if self._origpl is None:
348 self._origpl = self._pl
336 self._origpl = self._pl
349 self._map.setparents(p1, p2)
337 self._map.setparents(p1, p2)
350 copies = {}
338 copies = {}
351 if oldp2 != nullid and p2 == nullid:
339 if oldp2 != nullid and p2 == nullid:
352 candidatefiles = self._nonnormalset.union(self._otherparentset)
340 candidatefiles = self._map.nonnormalset.union(
341 self._map.otherparentset)
353 for f in candidatefiles:
342 for f in candidatefiles:
354 s = self._map.get(f)
343 s = self._map.get(f)
355 if s is None:
344 if s is None:
356 continue
345 continue
357
346
358 # Discard 'm' markers when moving away from a merge state
347 # Discard 'm' markers when moving away from a merge state
359 if s[0] == 'm':
348 if s[0] == 'm':
360 source = self._map.copymap.get(f)
349 source = self._map.copymap.get(f)
361 if source:
350 if source:
362 copies[f] = source
351 copies[f] = source
363 self.normallookup(f)
352 self.normallookup(f)
364 # Also fix up otherparent markers
353 # Also fix up otherparent markers
365 elif s[0] == 'n' and s[2] == -2:
354 elif s[0] == 'n' and s[2] == -2:
366 source = self._map.copymap.get(f)
355 source = self._map.copymap.get(f)
367 if source:
356 if source:
368 copies[f] = source
357 copies[f] = source
369 self.add(f)
358 self.add(f)
370 return copies
359 return copies
371
360
372 def setbranch(self, branch):
361 def setbranch(self, branch):
373 self._branch = encoding.fromlocal(branch)
362 self._branch = encoding.fromlocal(branch)
374 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
363 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
375 try:
364 try:
376 f.write(self._branch + '\n')
365 f.write(self._branch + '\n')
377 f.close()
366 f.close()
378
367
379 # make sure filecache has the correct stat info for _branch after
368 # make sure filecache has the correct stat info for _branch after
380 # replacing the underlying file
369 # replacing the underlying file
381 ce = self._filecache['_branch']
370 ce = self._filecache['_branch']
382 if ce:
371 if ce:
383 ce.refresh()
372 ce.refresh()
384 except: # re-raises
373 except: # re-raises
385 f.discard()
374 f.discard()
386 raise
375 raise
387
376
388 def _read(self):
377 def _read(self):
389 # ignore HG_PENDING because identity is used only for writing
378 # ignore HG_PENDING because identity is used only for writing
390 self._identity = util.filestat.frompath(
379 self._identity = util.filestat.frompath(
391 self._opener.join(self._filename))
380 self._opener.join(self._filename))
392 self._map = dirstatemap(self._ui, self._opener, self._root)
381 self._map = dirstatemap(self._ui, self._opener, self._root)
393 self._map.read()
382 self._map.read()
394
383
395 def invalidate(self):
384 def invalidate(self):
396 '''Causes the next access to reread the dirstate.
385 '''Causes the next access to reread the dirstate.
397
386
398 This is different from localrepo.invalidatedirstate() because it always
387 This is different from localrepo.invalidatedirstate() because it always
399 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
388 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
400 check whether the dirstate has changed before rereading it.'''
389 check whether the dirstate has changed before rereading it.'''
401
390
402 for a in ("_map", "_identity",
391 for a in ("_map", "_identity",
403 "_filefoldmap", "_dirfoldmap", "_branch",
392 "_filefoldmap", "_dirfoldmap", "_branch",
404 "_dirs", "_ignore", "_nonnormalset",
393 "_dirs", "_ignore"):
405 "_otherparentset"):
406 if a in self.__dict__:
394 if a in self.__dict__:
407 delattr(self, a)
395 delattr(self, a)
408 self._lastnormaltime = 0
396 self._lastnormaltime = 0
409 self._dirty = False
397 self._dirty = False
410 self._updatedfiles.clear()
398 self._updatedfiles.clear()
411 self._parentwriters = 0
399 self._parentwriters = 0
412 self._origpl = None
400 self._origpl = None
413
401
414 def copy(self, source, dest):
402 def copy(self, source, dest):
415 """Mark dest as a copy of source. Unmark dest if source is None."""
403 """Mark dest as a copy of source. Unmark dest if source is None."""
416 if source == dest:
404 if source == dest:
417 return
405 return
418 self._dirty = True
406 self._dirty = True
419 if source is not None:
407 if source is not None:
420 self._map.copymap[dest] = source
408 self._map.copymap[dest] = source
421 self._updatedfiles.add(source)
409 self._updatedfiles.add(source)
422 self._updatedfiles.add(dest)
410 self._updatedfiles.add(dest)
423 elif self._map.copymap.pop(dest, None):
411 elif self._map.copymap.pop(dest, None):
424 self._updatedfiles.add(dest)
412 self._updatedfiles.add(dest)
425
413
426 def copied(self, file):
414 def copied(self, file):
427 return self._map.copymap.get(file, None)
415 return self._map.copymap.get(file, None)
428
416
429 def copies(self):
417 def copies(self):
430 return self._map.copymap
418 return self._map.copymap
431
419
432 def _droppath(self, f):
420 def _droppath(self, f):
433 if self[f] not in "?r" and "_dirs" in self.__dict__:
421 if self[f] not in "?r" and "_dirs" in self.__dict__:
434 self._dirs.delpath(f)
422 self._dirs.delpath(f)
435
423
436 if "_filefoldmap" in self.__dict__:
424 if "_filefoldmap" in self.__dict__:
437 normed = util.normcase(f)
425 normed = util.normcase(f)
438 if normed in self._filefoldmap:
426 if normed in self._filefoldmap:
439 del self._filefoldmap[normed]
427 del self._filefoldmap[normed]
440
428
441 self._updatedfiles.add(f)
429 self._updatedfiles.add(f)
442
430
443 def _addpath(self, f, state, mode, size, mtime):
431 def _addpath(self, f, state, mode, size, mtime):
444 oldstate = self[f]
432 oldstate = self[f]
445 if state == 'a' or oldstate == 'r':
433 if state == 'a' or oldstate == 'r':
446 scmutil.checkfilename(f)
434 scmutil.checkfilename(f)
447 if f in self._dirs:
435 if f in self._dirs:
448 raise error.Abort(_('directory %r already in dirstate') % f)
436 raise error.Abort(_('directory %r already in dirstate') % f)
449 # shadows
437 # shadows
450 for d in util.finddirs(f):
438 for d in util.finddirs(f):
451 if d in self._dirs:
439 if d in self._dirs:
452 break
440 break
453 entry = self._map.get(d)
441 entry = self._map.get(d)
454 if entry is not None and entry[0] != 'r':
442 if entry is not None and entry[0] != 'r':
455 raise error.Abort(
443 raise error.Abort(
456 _('file %r in dirstate clashes with %r') % (d, f))
444 _('file %r in dirstate clashes with %r') % (d, f))
457 if oldstate in "?r" and "_dirs" in self.__dict__:
445 if oldstate in "?r" and "_dirs" in self.__dict__:
458 self._dirs.addpath(f)
446 self._dirs.addpath(f)
459 self._dirty = True
447 self._dirty = True
460 self._updatedfiles.add(f)
448 self._updatedfiles.add(f)
461 self._map[f] = dirstatetuple(state, mode, size, mtime)
449 self._map[f] = dirstatetuple(state, mode, size, mtime)
462 if state != 'n' or mtime == -1:
450 if state != 'n' or mtime == -1:
463 self._nonnormalset.add(f)
451 self._map.nonnormalset.add(f)
464 if size == -2:
452 if size == -2:
465 self._otherparentset.add(f)
453 self._map.otherparentset.add(f)
466
454
467 def normal(self, f):
455 def normal(self, f):
468 '''Mark a file normal and clean.'''
456 '''Mark a file normal and clean.'''
469 s = os.lstat(self._join(f))
457 s = os.lstat(self._join(f))
470 mtime = s.st_mtime
458 mtime = s.st_mtime
471 self._addpath(f, 'n', s.st_mode,
459 self._addpath(f, 'n', s.st_mode,
472 s.st_size & _rangemask, mtime & _rangemask)
460 s.st_size & _rangemask, mtime & _rangemask)
473 self._map.copymap.pop(f, None)
461 self._map.copymap.pop(f, None)
474 if f in self._nonnormalset:
462 if f in self._map.nonnormalset:
475 self._nonnormalset.remove(f)
463 self._map.nonnormalset.remove(f)
476 if mtime > self._lastnormaltime:
464 if mtime > self._lastnormaltime:
477 # Remember the most recent modification timeslot for status(),
465 # Remember the most recent modification timeslot for status(),
478 # to make sure we won't miss future size-preserving file content
466 # to make sure we won't miss future size-preserving file content
479 # modifications that happen within the same timeslot.
467 # modifications that happen within the same timeslot.
480 self._lastnormaltime = mtime
468 self._lastnormaltime = mtime
481
469
482 def normallookup(self, f):
470 def normallookup(self, f):
483 '''Mark a file normal, but possibly dirty.'''
471 '''Mark a file normal, but possibly dirty.'''
484 if self._pl[1] != nullid:
472 if self._pl[1] != nullid:
485 # if there is a merge going on and the file was either
473 # if there is a merge going on and the file was either
486 # in state 'm' (-1) or coming from other parent (-2) before
474 # in state 'm' (-1) or coming from other parent (-2) before
487 # being removed, restore that state.
475 # being removed, restore that state.
488 entry = self._map.get(f)
476 entry = self._map.get(f)
489 if entry is not None:
477 if entry is not None:
490 if entry[0] == 'r' and entry[2] in (-1, -2):
478 if entry[0] == 'r' and entry[2] in (-1, -2):
491 source = self._map.copymap.get(f)
479 source = self._map.copymap.get(f)
492 if entry[2] == -1:
480 if entry[2] == -1:
493 self.merge(f)
481 self.merge(f)
494 elif entry[2] == -2:
482 elif entry[2] == -2:
495 self.otherparent(f)
483 self.otherparent(f)
496 if source:
484 if source:
497 self.copy(source, f)
485 self.copy(source, f)
498 return
486 return
499 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
487 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
500 return
488 return
501 self._addpath(f, 'n', 0, -1, -1)
489 self._addpath(f, 'n', 0, -1, -1)
502 self._map.copymap.pop(f, None)
490 self._map.copymap.pop(f, None)
503 if f in self._nonnormalset:
491 if f in self._map.nonnormalset:
504 self._nonnormalset.remove(f)
492 self._map.nonnormalset.remove(f)
505
493
506 def otherparent(self, f):
494 def otherparent(self, f):
507 '''Mark as coming from the other parent, always dirty.'''
495 '''Mark as coming from the other parent, always dirty.'''
508 if self._pl[1] == nullid:
496 if self._pl[1] == nullid:
509 raise error.Abort(_("setting %r to other parent "
497 raise error.Abort(_("setting %r to other parent "
510 "only allowed in merges") % f)
498 "only allowed in merges") % f)
511 if f in self and self[f] == 'n':
499 if f in self and self[f] == 'n':
512 # merge-like
500 # merge-like
513 self._addpath(f, 'm', 0, -2, -1)
501 self._addpath(f, 'm', 0, -2, -1)
514 else:
502 else:
515 # add-like
503 # add-like
516 self._addpath(f, 'n', 0, -2, -1)
504 self._addpath(f, 'n', 0, -2, -1)
517 self._map.copymap.pop(f, None)
505 self._map.copymap.pop(f, None)
518
506
519 def add(self, f):
507 def add(self, f):
520 '''Mark a file added.'''
508 '''Mark a file added.'''
521 self._addpath(f, 'a', 0, -1, -1)
509 self._addpath(f, 'a', 0, -1, -1)
522 self._map.copymap.pop(f, None)
510 self._map.copymap.pop(f, None)
523
511
524 def remove(self, f):
512 def remove(self, f):
525 '''Mark a file removed.'''
513 '''Mark a file removed.'''
526 self._dirty = True
514 self._dirty = True
527 self._droppath(f)
515 self._droppath(f)
528 size = 0
516 size = 0
529 if self._pl[1] != nullid:
517 if self._pl[1] != nullid:
530 entry = self._map.get(f)
518 entry = self._map.get(f)
531 if entry is not None:
519 if entry is not None:
532 # backup the previous state
520 # backup the previous state
533 if entry[0] == 'm': # merge
521 if entry[0] == 'm': # merge
534 size = -1
522 size = -1
535 elif entry[0] == 'n' and entry[2] == -2: # other parent
523 elif entry[0] == 'n' and entry[2] == -2: # other parent
536 size = -2
524 size = -2
537 self._otherparentset.add(f)
525 self._map.otherparentset.add(f)
538 self._map[f] = dirstatetuple('r', 0, size, 0)
526 self._map[f] = dirstatetuple('r', 0, size, 0)
539 self._nonnormalset.add(f)
527 self._map.nonnormalset.add(f)
540 if size == 0:
528 if size == 0:
541 self._map.copymap.pop(f, None)
529 self._map.copymap.pop(f, None)
542
530
543 def merge(self, f):
531 def merge(self, f):
544 '''Mark a file merged.'''
532 '''Mark a file merged.'''
545 if self._pl[1] == nullid:
533 if self._pl[1] == nullid:
546 return self.normallookup(f)
534 return self.normallookup(f)
547 return self.otherparent(f)
535 return self.otherparent(f)
548
536
549 def drop(self, f):
537 def drop(self, f):
550 '''Drop a file from the dirstate'''
538 '''Drop a file from the dirstate'''
551 if f in self._map:
539 if f in self._map:
552 self._dirty = True
540 self._dirty = True
553 self._droppath(f)
541 self._droppath(f)
554 del self._map[f]
542 del self._map[f]
555 if f in self._nonnormalset:
543 if f in self._map.nonnormalset:
556 self._nonnormalset.remove(f)
544 self._map.nonnormalset.remove(f)
557 self._map.copymap.pop(f, None)
545 self._map.copymap.pop(f, None)
558
546
559 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
547 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
560 if exists is None:
548 if exists is None:
561 exists = os.path.lexists(os.path.join(self._root, path))
549 exists = os.path.lexists(os.path.join(self._root, path))
562 if not exists:
550 if not exists:
563 # Maybe a path component exists
551 # Maybe a path component exists
564 if not ignoremissing and '/' in path:
552 if not ignoremissing and '/' in path:
565 d, f = path.rsplit('/', 1)
553 d, f = path.rsplit('/', 1)
566 d = self._normalize(d, False, ignoremissing, None)
554 d = self._normalize(d, False, ignoremissing, None)
567 folded = d + "/" + f
555 folded = d + "/" + f
568 else:
556 else:
569 # No path components, preserve original case
557 # No path components, preserve original case
570 folded = path
558 folded = path
571 else:
559 else:
572 # recursively normalize leading directory components
560 # recursively normalize leading directory components
573 # against dirstate
561 # against dirstate
574 if '/' in normed:
562 if '/' in normed:
575 d, f = normed.rsplit('/', 1)
563 d, f = normed.rsplit('/', 1)
576 d = self._normalize(d, False, ignoremissing, True)
564 d = self._normalize(d, False, ignoremissing, True)
577 r = self._root + "/" + d
565 r = self._root + "/" + d
578 folded = d + "/" + util.fspath(f, r)
566 folded = d + "/" + util.fspath(f, r)
579 else:
567 else:
580 folded = util.fspath(normed, self._root)
568 folded = util.fspath(normed, self._root)
581 storemap[normed] = folded
569 storemap[normed] = folded
582
570
583 return folded
571 return folded
584
572
585 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
573 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
586 normed = util.normcase(path)
574 normed = util.normcase(path)
587 folded = self._filefoldmap.get(normed, None)
575 folded = self._filefoldmap.get(normed, None)
588 if folded is None:
576 if folded is None:
589 if isknown:
577 if isknown:
590 folded = path
578 folded = path
591 else:
579 else:
592 folded = self._discoverpath(path, normed, ignoremissing, exists,
580 folded = self._discoverpath(path, normed, ignoremissing, exists,
593 self._filefoldmap)
581 self._filefoldmap)
594 return folded
582 return folded
595
583
596 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
584 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
597 normed = util.normcase(path)
585 normed = util.normcase(path)
598 folded = self._filefoldmap.get(normed, None)
586 folded = self._filefoldmap.get(normed, None)
599 if folded is None:
587 if folded is None:
600 folded = self._dirfoldmap.get(normed, None)
588 folded = self._dirfoldmap.get(normed, None)
601 if folded is None:
589 if folded is None:
602 if isknown:
590 if isknown:
603 folded = path
591 folded = path
604 else:
592 else:
605 # store discovered result in dirfoldmap so that future
593 # store discovered result in dirfoldmap so that future
606 # normalizefile calls don't start matching directories
594 # normalizefile calls don't start matching directories
607 folded = self._discoverpath(path, normed, ignoremissing, exists,
595 folded = self._discoverpath(path, normed, ignoremissing, exists,
608 self._dirfoldmap)
596 self._dirfoldmap)
609 return folded
597 return folded
610
598
611 def normalize(self, path, isknown=False, ignoremissing=False):
599 def normalize(self, path, isknown=False, ignoremissing=False):
612 '''
600 '''
613 normalize the case of a pathname when on a casefolding filesystem
601 normalize the case of a pathname when on a casefolding filesystem
614
602
615 isknown specifies whether the filename came from walking the
603 isknown specifies whether the filename came from walking the
616 disk, to avoid extra filesystem access.
604 disk, to avoid extra filesystem access.
617
605
618 If ignoremissing is True, missing path are returned
606 If ignoremissing is True, missing path are returned
619 unchanged. Otherwise, we try harder to normalize possibly
607 unchanged. Otherwise, we try harder to normalize possibly
620 existing path components.
608 existing path components.
621
609
622 The normalized case is determined based on the following precedence:
610 The normalized case is determined based on the following precedence:
623
611
624 - version of name already stored in the dirstate
612 - version of name already stored in the dirstate
625 - version of name stored on disk
613 - version of name stored on disk
626 - version provided via command arguments
614 - version provided via command arguments
627 '''
615 '''
628
616
629 if self._checkcase:
617 if self._checkcase:
630 return self._normalize(path, isknown, ignoremissing)
618 return self._normalize(path, isknown, ignoremissing)
631 return path
619 return path
632
620
633 def clear(self):
621 def clear(self):
634 self._map = dirstatemap(self._ui, self._opener, self._root)
622 self._map = dirstatemap(self._ui, self._opener, self._root)
635 self._nonnormalset = set()
636 self._otherparentset = set()
637 if "_dirs" in self.__dict__:
623 if "_dirs" in self.__dict__:
638 delattr(self, "_dirs")
624 delattr(self, "_dirs")
639 self._map.setparents(nullid, nullid)
625 self._map.setparents(nullid, nullid)
640 self._lastnormaltime = 0
626 self._lastnormaltime = 0
641 self._updatedfiles.clear()
627 self._updatedfiles.clear()
642 self._dirty = True
628 self._dirty = True
643
629
644 def rebuild(self, parent, allfiles, changedfiles=None):
630 def rebuild(self, parent, allfiles, changedfiles=None):
645 if changedfiles is None:
631 if changedfiles is None:
646 # Rebuild entire dirstate
632 # Rebuild entire dirstate
647 changedfiles = allfiles
633 changedfiles = allfiles
648 lastnormaltime = self._lastnormaltime
634 lastnormaltime = self._lastnormaltime
649 self.clear()
635 self.clear()
650 self._lastnormaltime = lastnormaltime
636 self._lastnormaltime = lastnormaltime
651
637
652 if self._origpl is None:
638 if self._origpl is None:
653 self._origpl = self._pl
639 self._origpl = self._pl
654 self._map.setparents(parent, nullid)
640 self._map.setparents(parent, nullid)
655 for f in changedfiles:
641 for f in changedfiles:
656 if f in allfiles:
642 if f in allfiles:
657 self.normallookup(f)
643 self.normallookup(f)
658 else:
644 else:
659 self.drop(f)
645 self.drop(f)
660
646
661 self._dirty = True
647 self._dirty = True
662
648
663 def identity(self):
649 def identity(self):
664 '''Return identity of dirstate itself to detect changing in storage
650 '''Return identity of dirstate itself to detect changing in storage
665
651
666 If identity of previous dirstate is equal to this, writing
652 If identity of previous dirstate is equal to this, writing
667 changes based on the former dirstate out can keep consistency.
653 changes based on the former dirstate out can keep consistency.
668 '''
654 '''
669 return self._identity
655 return self._identity
670
656
671 def write(self, tr):
657 def write(self, tr):
672 if not self._dirty:
658 if not self._dirty:
673 return
659 return
674
660
675 filename = self._filename
661 filename = self._filename
676 if tr:
662 if tr:
677 # 'dirstate.write()' is not only for writing in-memory
663 # 'dirstate.write()' is not only for writing in-memory
678 # changes out, but also for dropping ambiguous timestamp.
664 # changes out, but also for dropping ambiguous timestamp.
679 # delayed writing re-raise "ambiguous timestamp issue".
665 # delayed writing re-raise "ambiguous timestamp issue".
680 # See also the wiki page below for detail:
666 # See also the wiki page below for detail:
681 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
667 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
682
668
683 # emulate dropping timestamp in 'parsers.pack_dirstate'
669 # emulate dropping timestamp in 'parsers.pack_dirstate'
684 now = _getfsnow(self._opener)
670 now = _getfsnow(self._opener)
685 dmap = self._map
671 dmap = self._map
686 for f in self._updatedfiles:
672 for f in self._updatedfiles:
687 e = dmap.get(f)
673 e = dmap.get(f)
688 if e is not None and e[0] == 'n' and e[3] == now:
674 if e is not None and e[0] == 'n' and e[3] == now:
689 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
675 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
690 self._nonnormalset.add(f)
676 self._map.nonnormalset.add(f)
691
677
692 # emulate that all 'dirstate.normal' results are written out
678 # emulate that all 'dirstate.normal' results are written out
693 self._lastnormaltime = 0
679 self._lastnormaltime = 0
694 self._updatedfiles.clear()
680 self._updatedfiles.clear()
695
681
696 # delay writing in-memory changes out
682 # delay writing in-memory changes out
697 tr.addfilegenerator('dirstate', (self._filename,),
683 tr.addfilegenerator('dirstate', (self._filename,),
698 self._writedirstate, location='plain')
684 self._writedirstate, location='plain')
699 return
685 return
700
686
701 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
687 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
702 self._writedirstate(st)
688 self._writedirstate(st)
703
689
704 def addparentchangecallback(self, category, callback):
690 def addparentchangecallback(self, category, callback):
705 """add a callback to be called when the wd parents are changed
691 """add a callback to be called when the wd parents are changed
706
692
707 Callback will be called with the following arguments:
693 Callback will be called with the following arguments:
708 dirstate, (oldp1, oldp2), (newp1, newp2)
694 dirstate, (oldp1, oldp2), (newp1, newp2)
709
695
710 Category is a unique identifier to allow overwriting an old callback
696 Category is a unique identifier to allow overwriting an old callback
711 with a newer callback.
697 with a newer callback.
712 """
698 """
713 self._plchangecallbacks[category] = callback
699 self._plchangecallbacks[category] = callback
714
700
715 def _writedirstate(self, st):
701 def _writedirstate(self, st):
716 # notify callbacks about parents change
702 # notify callbacks about parents change
717 if self._origpl is not None and self._origpl != self._pl:
703 if self._origpl is not None and self._origpl != self._pl:
718 for c, callback in sorted(self._plchangecallbacks.iteritems()):
704 for c, callback in sorted(self._plchangecallbacks.iteritems()):
719 callback(self, self._origpl, self._pl)
705 callback(self, self._origpl, self._pl)
720 self._origpl = None
706 self._origpl = None
721 # use the modification time of the newly created temporary file as the
707 # use the modification time of the newly created temporary file as the
722 # filesystem's notion of 'now'
708 # filesystem's notion of 'now'
723 now = util.fstat(st).st_mtime & _rangemask
709 now = util.fstat(st).st_mtime & _rangemask
724
710
725 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
711 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
726 # timestamp of each entries in dirstate, because of 'now > mtime'
712 # timestamp of each entries in dirstate, because of 'now > mtime'
727 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
713 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
728 if delaywrite > 0:
714 if delaywrite > 0:
729 # do we have any files to delay for?
715 # do we have any files to delay for?
730 for f, e in self._map.iteritems():
716 for f, e in self._map.iteritems():
731 if e[0] == 'n' and e[3] == now:
717 if e[0] == 'n' and e[3] == now:
732 import time # to avoid useless import
718 import time # to avoid useless import
733 # rather than sleep n seconds, sleep until the next
719 # rather than sleep n seconds, sleep until the next
734 # multiple of n seconds
720 # multiple of n seconds
735 clock = time.time()
721 clock = time.time()
736 start = int(clock) - (int(clock) % delaywrite)
722 start = int(clock) - (int(clock) % delaywrite)
737 end = start + delaywrite
723 end = start + delaywrite
738 time.sleep(end - clock)
724 time.sleep(end - clock)
739 now = end # trust our estimate that the end is near now
725 now = end # trust our estimate that the end is near now
740 break
726 break
741
727
742 self._map.write(st, now)
728 self._map.write(st, now)
743 self._nonnormalset, self._otherparentset = self._map.nonnormalentries()
744 self._lastnormaltime = 0
729 self._lastnormaltime = 0
745 self._dirty = False
730 self._dirty = False
746
731
747 def _dirignore(self, f):
732 def _dirignore(self, f):
748 if f == '.':
733 if f == '.':
749 return False
734 return False
750 if self._ignore(f):
735 if self._ignore(f):
751 return True
736 return True
752 for p in util.finddirs(f):
737 for p in util.finddirs(f):
753 if self._ignore(p):
738 if self._ignore(p):
754 return True
739 return True
755 return False
740 return False
756
741
757 def _ignorefiles(self):
742 def _ignorefiles(self):
758 files = []
743 files = []
759 if os.path.exists(self._join('.hgignore')):
744 if os.path.exists(self._join('.hgignore')):
760 files.append(self._join('.hgignore'))
745 files.append(self._join('.hgignore'))
761 for name, path in self._ui.configitems("ui"):
746 for name, path in self._ui.configitems("ui"):
762 if name == 'ignore' or name.startswith('ignore.'):
747 if name == 'ignore' or name.startswith('ignore.'):
763 # we need to use os.path.join here rather than self._join
748 # we need to use os.path.join here rather than self._join
764 # because path is arbitrary and user-specified
749 # because path is arbitrary and user-specified
765 files.append(os.path.join(self._rootdir, util.expandpath(path)))
750 files.append(os.path.join(self._rootdir, util.expandpath(path)))
766 return files
751 return files
767
752
768 def _ignorefileandline(self, f):
753 def _ignorefileandline(self, f):
769 files = collections.deque(self._ignorefiles())
754 files = collections.deque(self._ignorefiles())
770 visited = set()
755 visited = set()
771 while files:
756 while files:
772 i = files.popleft()
757 i = files.popleft()
773 patterns = matchmod.readpatternfile(i, self._ui.warn,
758 patterns = matchmod.readpatternfile(i, self._ui.warn,
774 sourceinfo=True)
759 sourceinfo=True)
775 for pattern, lineno, line in patterns:
760 for pattern, lineno, line in patterns:
776 kind, p = matchmod._patsplit(pattern, 'glob')
761 kind, p = matchmod._patsplit(pattern, 'glob')
777 if kind == "subinclude":
762 if kind == "subinclude":
778 if p not in visited:
763 if p not in visited:
779 files.append(p)
764 files.append(p)
780 continue
765 continue
781 m = matchmod.match(self._root, '', [], [pattern],
766 m = matchmod.match(self._root, '', [], [pattern],
782 warn=self._ui.warn)
767 warn=self._ui.warn)
783 if m(f):
768 if m(f):
784 return (i, lineno, line)
769 return (i, lineno, line)
785 visited.add(i)
770 visited.add(i)
786 return (None, -1, "")
771 return (None, -1, "")
787
772
788 def _walkexplicit(self, match, subrepos):
773 def _walkexplicit(self, match, subrepos):
789 '''Get stat data about the files explicitly specified by match.
774 '''Get stat data about the files explicitly specified by match.
790
775
791 Return a triple (results, dirsfound, dirsnotfound).
776 Return a triple (results, dirsfound, dirsnotfound).
792 - results is a mapping from filename to stat result. It also contains
777 - results is a mapping from filename to stat result. It also contains
793 listings mapping subrepos and .hg to None.
778 listings mapping subrepos and .hg to None.
794 - dirsfound is a list of files found to be directories.
779 - dirsfound is a list of files found to be directories.
795 - dirsnotfound is a list of files that the dirstate thinks are
780 - dirsnotfound is a list of files that the dirstate thinks are
796 directories and that were not found.'''
781 directories and that were not found.'''
797
782
798 def badtype(mode):
783 def badtype(mode):
799 kind = _('unknown')
784 kind = _('unknown')
800 if stat.S_ISCHR(mode):
785 if stat.S_ISCHR(mode):
801 kind = _('character device')
786 kind = _('character device')
802 elif stat.S_ISBLK(mode):
787 elif stat.S_ISBLK(mode):
803 kind = _('block device')
788 kind = _('block device')
804 elif stat.S_ISFIFO(mode):
789 elif stat.S_ISFIFO(mode):
805 kind = _('fifo')
790 kind = _('fifo')
806 elif stat.S_ISSOCK(mode):
791 elif stat.S_ISSOCK(mode):
807 kind = _('socket')
792 kind = _('socket')
808 elif stat.S_ISDIR(mode):
793 elif stat.S_ISDIR(mode):
809 kind = _('directory')
794 kind = _('directory')
810 return _('unsupported file type (type is %s)') % kind
795 return _('unsupported file type (type is %s)') % kind
811
796
812 matchedir = match.explicitdir
797 matchedir = match.explicitdir
813 badfn = match.bad
798 badfn = match.bad
814 dmap = self._map
799 dmap = self._map
815 lstat = os.lstat
800 lstat = os.lstat
816 getkind = stat.S_IFMT
801 getkind = stat.S_IFMT
817 dirkind = stat.S_IFDIR
802 dirkind = stat.S_IFDIR
818 regkind = stat.S_IFREG
803 regkind = stat.S_IFREG
819 lnkkind = stat.S_IFLNK
804 lnkkind = stat.S_IFLNK
820 join = self._join
805 join = self._join
821 dirsfound = []
806 dirsfound = []
822 foundadd = dirsfound.append
807 foundadd = dirsfound.append
823 dirsnotfound = []
808 dirsnotfound = []
824 notfoundadd = dirsnotfound.append
809 notfoundadd = dirsnotfound.append
825
810
826 if not match.isexact() and self._checkcase:
811 if not match.isexact() and self._checkcase:
827 normalize = self._normalize
812 normalize = self._normalize
828 else:
813 else:
829 normalize = None
814 normalize = None
830
815
831 files = sorted(match.files())
816 files = sorted(match.files())
832 subrepos.sort()
817 subrepos.sort()
833 i, j = 0, 0
818 i, j = 0, 0
834 while i < len(files) and j < len(subrepos):
819 while i < len(files) and j < len(subrepos):
835 subpath = subrepos[j] + "/"
820 subpath = subrepos[j] + "/"
836 if files[i] < subpath:
821 if files[i] < subpath:
837 i += 1
822 i += 1
838 continue
823 continue
839 while i < len(files) and files[i].startswith(subpath):
824 while i < len(files) and files[i].startswith(subpath):
840 del files[i]
825 del files[i]
841 j += 1
826 j += 1
842
827
843 if not files or '.' in files:
828 if not files or '.' in files:
844 files = ['.']
829 files = ['.']
845 results = dict.fromkeys(subrepos)
830 results = dict.fromkeys(subrepos)
846 results['.hg'] = None
831 results['.hg'] = None
847
832
848 alldirs = None
833 alldirs = None
849 for ff in files:
834 for ff in files:
850 # constructing the foldmap is expensive, so don't do it for the
835 # constructing the foldmap is expensive, so don't do it for the
851 # common case where files is ['.']
836 # common case where files is ['.']
852 if normalize and ff != '.':
837 if normalize and ff != '.':
853 nf = normalize(ff, False, True)
838 nf = normalize(ff, False, True)
854 else:
839 else:
855 nf = ff
840 nf = ff
856 if nf in results:
841 if nf in results:
857 continue
842 continue
858
843
859 try:
844 try:
860 st = lstat(join(nf))
845 st = lstat(join(nf))
861 kind = getkind(st.st_mode)
846 kind = getkind(st.st_mode)
862 if kind == dirkind:
847 if kind == dirkind:
863 if nf in dmap:
848 if nf in dmap:
864 # file replaced by dir on disk but still in dirstate
849 # file replaced by dir on disk but still in dirstate
865 results[nf] = None
850 results[nf] = None
866 if matchedir:
851 if matchedir:
867 matchedir(nf)
852 matchedir(nf)
868 foundadd((nf, ff))
853 foundadd((nf, ff))
869 elif kind == regkind or kind == lnkkind:
854 elif kind == regkind or kind == lnkkind:
870 results[nf] = st
855 results[nf] = st
871 else:
856 else:
872 badfn(ff, badtype(kind))
857 badfn(ff, badtype(kind))
873 if nf in dmap:
858 if nf in dmap:
874 results[nf] = None
859 results[nf] = None
875 except OSError as inst: # nf not found on disk - it is dirstate only
860 except OSError as inst: # nf not found on disk - it is dirstate only
876 if nf in dmap: # does it exactly match a missing file?
861 if nf in dmap: # does it exactly match a missing file?
877 results[nf] = None
862 results[nf] = None
878 else: # does it match a missing directory?
863 else: # does it match a missing directory?
879 if alldirs is None:
864 if alldirs is None:
880 alldirs = util.dirs(dmap._map)
865 alldirs = util.dirs(dmap._map)
881 if nf in alldirs:
866 if nf in alldirs:
882 if matchedir:
867 if matchedir:
883 matchedir(nf)
868 matchedir(nf)
884 notfoundadd(nf)
869 notfoundadd(nf)
885 else:
870 else:
886 badfn(ff, encoding.strtolocal(inst.strerror))
871 badfn(ff, encoding.strtolocal(inst.strerror))
887
872
888 # Case insensitive filesystems cannot rely on lstat() failing to detect
873 # Case insensitive filesystems cannot rely on lstat() failing to detect
889 # a case-only rename. Prune the stat object for any file that does not
874 # a case-only rename. Prune the stat object for any file that does not
890 # match the case in the filesystem, if there are multiple files that
875 # match the case in the filesystem, if there are multiple files that
891 # normalize to the same path.
876 # normalize to the same path.
892 if match.isexact() and self._checkcase:
877 if match.isexact() and self._checkcase:
893 normed = {}
878 normed = {}
894
879
895 for f, st in results.iteritems():
880 for f, st in results.iteritems():
896 if st is None:
881 if st is None:
897 continue
882 continue
898
883
899 nc = util.normcase(f)
884 nc = util.normcase(f)
900 paths = normed.get(nc)
885 paths = normed.get(nc)
901
886
902 if paths is None:
887 if paths is None:
903 paths = set()
888 paths = set()
904 normed[nc] = paths
889 normed[nc] = paths
905
890
906 paths.add(f)
891 paths.add(f)
907
892
908 for norm, paths in normed.iteritems():
893 for norm, paths in normed.iteritems():
909 if len(paths) > 1:
894 if len(paths) > 1:
910 for path in paths:
895 for path in paths:
911 folded = self._discoverpath(path, norm, True, None,
896 folded = self._discoverpath(path, norm, True, None,
912 self._dirfoldmap)
897 self._dirfoldmap)
913 if path != folded:
898 if path != folded:
914 results[path] = None
899 results[path] = None
915
900
916 return results, dirsfound, dirsnotfound
901 return results, dirsfound, dirsnotfound
917
902
918 def walk(self, match, subrepos, unknown, ignored, full=True):
903 def walk(self, match, subrepos, unknown, ignored, full=True):
919 '''
904 '''
920 Walk recursively through the directory tree, finding all files
905 Walk recursively through the directory tree, finding all files
921 matched by match.
906 matched by match.
922
907
923 If full is False, maybe skip some known-clean files.
908 If full is False, maybe skip some known-clean files.
924
909
925 Return a dict mapping filename to stat-like object (either
910 Return a dict mapping filename to stat-like object (either
926 mercurial.osutil.stat instance or return value of os.stat()).
911 mercurial.osutil.stat instance or return value of os.stat()).
927
912
928 '''
913 '''
929 # full is a flag that extensions that hook into walk can use -- this
914 # full is a flag that extensions that hook into walk can use -- this
930 # implementation doesn't use it at all. This satisfies the contract
915 # implementation doesn't use it at all. This satisfies the contract
931 # because we only guarantee a "maybe".
916 # because we only guarantee a "maybe".
932
917
933 if ignored:
918 if ignored:
934 ignore = util.never
919 ignore = util.never
935 dirignore = util.never
920 dirignore = util.never
936 elif unknown:
921 elif unknown:
937 ignore = self._ignore
922 ignore = self._ignore
938 dirignore = self._dirignore
923 dirignore = self._dirignore
939 else:
924 else:
940 # if not unknown and not ignored, drop dir recursion and step 2
925 # if not unknown and not ignored, drop dir recursion and step 2
941 ignore = util.always
926 ignore = util.always
942 dirignore = util.always
927 dirignore = util.always
943
928
944 matchfn = match.matchfn
929 matchfn = match.matchfn
945 matchalways = match.always()
930 matchalways = match.always()
946 matchtdir = match.traversedir
931 matchtdir = match.traversedir
947 dmap = self._map
932 dmap = self._map
948 listdir = util.listdir
933 listdir = util.listdir
949 lstat = os.lstat
934 lstat = os.lstat
950 dirkind = stat.S_IFDIR
935 dirkind = stat.S_IFDIR
951 regkind = stat.S_IFREG
936 regkind = stat.S_IFREG
952 lnkkind = stat.S_IFLNK
937 lnkkind = stat.S_IFLNK
953 join = self._join
938 join = self._join
954
939
955 exact = skipstep3 = False
940 exact = skipstep3 = False
956 if match.isexact(): # match.exact
941 if match.isexact(): # match.exact
957 exact = True
942 exact = True
958 dirignore = util.always # skip step 2
943 dirignore = util.always # skip step 2
959 elif match.prefix(): # match.match, no patterns
944 elif match.prefix(): # match.match, no patterns
960 skipstep3 = True
945 skipstep3 = True
961
946
962 if not exact and self._checkcase:
947 if not exact and self._checkcase:
963 normalize = self._normalize
948 normalize = self._normalize
964 normalizefile = self._normalizefile
949 normalizefile = self._normalizefile
965 skipstep3 = False
950 skipstep3 = False
966 else:
951 else:
967 normalize = self._normalize
952 normalize = self._normalize
968 normalizefile = None
953 normalizefile = None
969
954
970 # step 1: find all explicit files
955 # step 1: find all explicit files
971 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
956 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
972
957
973 skipstep3 = skipstep3 and not (work or dirsnotfound)
958 skipstep3 = skipstep3 and not (work or dirsnotfound)
974 work = [d for d in work if not dirignore(d[0])]
959 work = [d for d in work if not dirignore(d[0])]
975
960
976 # step 2: visit subdirectories
961 # step 2: visit subdirectories
977 def traverse(work, alreadynormed):
962 def traverse(work, alreadynormed):
978 wadd = work.append
963 wadd = work.append
979 while work:
964 while work:
980 nd = work.pop()
965 nd = work.pop()
981 if not match.visitdir(nd):
966 if not match.visitdir(nd):
982 continue
967 continue
983 skip = None
968 skip = None
984 if nd == '.':
969 if nd == '.':
985 nd = ''
970 nd = ''
986 else:
971 else:
987 skip = '.hg'
972 skip = '.hg'
988 try:
973 try:
989 entries = listdir(join(nd), stat=True, skip=skip)
974 entries = listdir(join(nd), stat=True, skip=skip)
990 except OSError as inst:
975 except OSError as inst:
991 if inst.errno in (errno.EACCES, errno.ENOENT):
976 if inst.errno in (errno.EACCES, errno.ENOENT):
992 match.bad(self.pathto(nd),
977 match.bad(self.pathto(nd),
993 encoding.strtolocal(inst.strerror))
978 encoding.strtolocal(inst.strerror))
994 continue
979 continue
995 raise
980 raise
996 for f, kind, st in entries:
981 for f, kind, st in entries:
997 if normalizefile:
982 if normalizefile:
998 # even though f might be a directory, we're only
983 # even though f might be a directory, we're only
999 # interested in comparing it to files currently in the
984 # interested in comparing it to files currently in the
1000 # dmap -- therefore normalizefile is enough
985 # dmap -- therefore normalizefile is enough
1001 nf = normalizefile(nd and (nd + "/" + f) or f, True,
986 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1002 True)
987 True)
1003 else:
988 else:
1004 nf = nd and (nd + "/" + f) or f
989 nf = nd and (nd + "/" + f) or f
1005 if nf not in results:
990 if nf not in results:
1006 if kind == dirkind:
991 if kind == dirkind:
1007 if not ignore(nf):
992 if not ignore(nf):
1008 if matchtdir:
993 if matchtdir:
1009 matchtdir(nf)
994 matchtdir(nf)
1010 wadd(nf)
995 wadd(nf)
1011 if nf in dmap and (matchalways or matchfn(nf)):
996 if nf in dmap and (matchalways or matchfn(nf)):
1012 results[nf] = None
997 results[nf] = None
1013 elif kind == regkind or kind == lnkkind:
998 elif kind == regkind or kind == lnkkind:
1014 if nf in dmap:
999 if nf in dmap:
1015 if matchalways or matchfn(nf):
1000 if matchalways or matchfn(nf):
1016 results[nf] = st
1001 results[nf] = st
1017 elif ((matchalways or matchfn(nf))
1002 elif ((matchalways or matchfn(nf))
1018 and not ignore(nf)):
1003 and not ignore(nf)):
1019 # unknown file -- normalize if necessary
1004 # unknown file -- normalize if necessary
1020 if not alreadynormed:
1005 if not alreadynormed:
1021 nf = normalize(nf, False, True)
1006 nf = normalize(nf, False, True)
1022 results[nf] = st
1007 results[nf] = st
1023 elif nf in dmap and (matchalways or matchfn(nf)):
1008 elif nf in dmap and (matchalways or matchfn(nf)):
1024 results[nf] = None
1009 results[nf] = None
1025
1010
1026 for nd, d in work:
1011 for nd, d in work:
1027 # alreadynormed means that processwork doesn't have to do any
1012 # alreadynormed means that processwork doesn't have to do any
1028 # expensive directory normalization
1013 # expensive directory normalization
1029 alreadynormed = not normalize or nd == d
1014 alreadynormed = not normalize or nd == d
1030 traverse([d], alreadynormed)
1015 traverse([d], alreadynormed)
1031
1016
1032 for s in subrepos:
1017 for s in subrepos:
1033 del results[s]
1018 del results[s]
1034 del results['.hg']
1019 del results['.hg']
1035
1020
1036 # step 3: visit remaining files from dmap
1021 # step 3: visit remaining files from dmap
1037 if not skipstep3 and not exact:
1022 if not skipstep3 and not exact:
1038 # If a dmap file is not in results yet, it was either
1023 # If a dmap file is not in results yet, it was either
1039 # a) not matching matchfn b) ignored, c) missing, or d) under a
1024 # a) not matching matchfn b) ignored, c) missing, or d) under a
1040 # symlink directory.
1025 # symlink directory.
1041 if not results and matchalways:
1026 if not results and matchalways:
1042 visit = [f for f in dmap]
1027 visit = [f for f in dmap]
1043 else:
1028 else:
1044 visit = [f for f in dmap if f not in results and matchfn(f)]
1029 visit = [f for f in dmap if f not in results and matchfn(f)]
1045 visit.sort()
1030 visit.sort()
1046
1031
1047 if unknown:
1032 if unknown:
1048 # unknown == True means we walked all dirs under the roots
1033 # unknown == True means we walked all dirs under the roots
1049 # that wasn't ignored, and everything that matched was stat'ed
1034 # that wasn't ignored, and everything that matched was stat'ed
1050 # and is already in results.
1035 # and is already in results.
1051 # The rest must thus be ignored or under a symlink.
1036 # The rest must thus be ignored or under a symlink.
1052 audit_path = pathutil.pathauditor(self._root, cached=True)
1037 audit_path = pathutil.pathauditor(self._root, cached=True)
1053
1038
1054 for nf in iter(visit):
1039 for nf in iter(visit):
1055 # If a stat for the same file was already added with a
1040 # If a stat for the same file was already added with a
1056 # different case, don't add one for this, since that would
1041 # different case, don't add one for this, since that would
1057 # make it appear as if the file exists under both names
1042 # make it appear as if the file exists under both names
1058 # on disk.
1043 # on disk.
1059 if (normalizefile and
1044 if (normalizefile and
1060 normalizefile(nf, True, True) in results):
1045 normalizefile(nf, True, True) in results):
1061 results[nf] = None
1046 results[nf] = None
1062 # Report ignored items in the dmap as long as they are not
1047 # Report ignored items in the dmap as long as they are not
1063 # under a symlink directory.
1048 # under a symlink directory.
1064 elif audit_path.check(nf):
1049 elif audit_path.check(nf):
1065 try:
1050 try:
1066 results[nf] = lstat(join(nf))
1051 results[nf] = lstat(join(nf))
1067 # file was just ignored, no links, and exists
1052 # file was just ignored, no links, and exists
1068 except OSError:
1053 except OSError:
1069 # file doesn't exist
1054 # file doesn't exist
1070 results[nf] = None
1055 results[nf] = None
1071 else:
1056 else:
1072 # It's either missing or under a symlink directory
1057 # It's either missing or under a symlink directory
1073 # which we in this case report as missing
1058 # which we in this case report as missing
1074 results[nf] = None
1059 results[nf] = None
1075 else:
1060 else:
1076 # We may not have walked the full directory tree above,
1061 # We may not have walked the full directory tree above,
1077 # so stat and check everything we missed.
1062 # so stat and check everything we missed.
1078 iv = iter(visit)
1063 iv = iter(visit)
1079 for st in util.statfiles([join(i) for i in visit]):
1064 for st in util.statfiles([join(i) for i in visit]):
1080 results[next(iv)] = st
1065 results[next(iv)] = st
1081 return results
1066 return results
1082
1067
1083 def status(self, match, subrepos, ignored, clean, unknown):
1068 def status(self, match, subrepos, ignored, clean, unknown):
1084 '''Determine the status of the working copy relative to the
1069 '''Determine the status of the working copy relative to the
1085 dirstate and return a pair of (unsure, status), where status is of type
1070 dirstate and return a pair of (unsure, status), where status is of type
1086 scmutil.status and:
1071 scmutil.status and:
1087
1072
1088 unsure:
1073 unsure:
1089 files that might have been modified since the dirstate was
1074 files that might have been modified since the dirstate was
1090 written, but need to be read to be sure (size is the same
1075 written, but need to be read to be sure (size is the same
1091 but mtime differs)
1076 but mtime differs)
1092 status.modified:
1077 status.modified:
1093 files that have definitely been modified since the dirstate
1078 files that have definitely been modified since the dirstate
1094 was written (different size or mode)
1079 was written (different size or mode)
1095 status.clean:
1080 status.clean:
1096 files that have definitely not been modified since the
1081 files that have definitely not been modified since the
1097 dirstate was written
1082 dirstate was written
1098 '''
1083 '''
1099 listignored, listclean, listunknown = ignored, clean, unknown
1084 listignored, listclean, listunknown = ignored, clean, unknown
1100 lookup, modified, added, unknown, ignored = [], [], [], [], []
1085 lookup, modified, added, unknown, ignored = [], [], [], [], []
1101 removed, deleted, clean = [], [], []
1086 removed, deleted, clean = [], [], []
1102
1087
1103 dmap = self._map
1088 dmap = self._map
1104 ladd = lookup.append # aka "unsure"
1089 ladd = lookup.append # aka "unsure"
1105 madd = modified.append
1090 madd = modified.append
1106 aadd = added.append
1091 aadd = added.append
1107 uadd = unknown.append
1092 uadd = unknown.append
1108 iadd = ignored.append
1093 iadd = ignored.append
1109 radd = removed.append
1094 radd = removed.append
1110 dadd = deleted.append
1095 dadd = deleted.append
1111 cadd = clean.append
1096 cadd = clean.append
1112 mexact = match.exact
1097 mexact = match.exact
1113 dirignore = self._dirignore
1098 dirignore = self._dirignore
1114 checkexec = self._checkexec
1099 checkexec = self._checkexec
1115 copymap = self._map.copymap
1100 copymap = self._map.copymap
1116 lastnormaltime = self._lastnormaltime
1101 lastnormaltime = self._lastnormaltime
1117
1102
1118 # We need to do full walks when either
1103 # We need to do full walks when either
1119 # - we're listing all clean files, or
1104 # - we're listing all clean files, or
1120 # - match.traversedir does something, because match.traversedir should
1105 # - match.traversedir does something, because match.traversedir should
1121 # be called for every dir in the working dir
1106 # be called for every dir in the working dir
1122 full = listclean or match.traversedir is not None
1107 full = listclean or match.traversedir is not None
1123 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1108 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1124 full=full).iteritems():
1109 full=full).iteritems():
1125 if fn not in dmap:
1110 if fn not in dmap:
1126 if (listignored or mexact(fn)) and dirignore(fn):
1111 if (listignored or mexact(fn)) and dirignore(fn):
1127 if listignored:
1112 if listignored:
1128 iadd(fn)
1113 iadd(fn)
1129 else:
1114 else:
1130 uadd(fn)
1115 uadd(fn)
1131 continue
1116 continue
1132
1117
1133 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1118 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1134 # written like that for performance reasons. dmap[fn] is not a
1119 # written like that for performance reasons. dmap[fn] is not a
1135 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1120 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1136 # opcode has fast paths when the value to be unpacked is a tuple or
1121 # opcode has fast paths when the value to be unpacked is a tuple or
1137 # a list, but falls back to creating a full-fledged iterator in
1122 # a list, but falls back to creating a full-fledged iterator in
1138 # general. That is much slower than simply accessing and storing the
1123 # general. That is much slower than simply accessing and storing the
1139 # tuple members one by one.
1124 # tuple members one by one.
1140 t = dmap[fn]
1125 t = dmap[fn]
1141 state = t[0]
1126 state = t[0]
1142 mode = t[1]
1127 mode = t[1]
1143 size = t[2]
1128 size = t[2]
1144 time = t[3]
1129 time = t[3]
1145
1130
1146 if not st and state in "nma":
1131 if not st and state in "nma":
1147 dadd(fn)
1132 dadd(fn)
1148 elif state == 'n':
1133 elif state == 'n':
1149 if (size >= 0 and
1134 if (size >= 0 and
1150 ((size != st.st_size and size != st.st_size & _rangemask)
1135 ((size != st.st_size and size != st.st_size & _rangemask)
1151 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1136 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1152 or size == -2 # other parent
1137 or size == -2 # other parent
1153 or fn in copymap):
1138 or fn in copymap):
1154 madd(fn)
1139 madd(fn)
1155 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1140 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1156 ladd(fn)
1141 ladd(fn)
1157 elif st.st_mtime == lastnormaltime:
1142 elif st.st_mtime == lastnormaltime:
1158 # fn may have just been marked as normal and it may have
1143 # fn may have just been marked as normal and it may have
1159 # changed in the same second without changing its size.
1144 # changed in the same second without changing its size.
1160 # This can happen if we quickly do multiple commits.
1145 # This can happen if we quickly do multiple commits.
1161 # Force lookup, so we don't miss such a racy file change.
1146 # Force lookup, so we don't miss such a racy file change.
1162 ladd(fn)
1147 ladd(fn)
1163 elif listclean:
1148 elif listclean:
1164 cadd(fn)
1149 cadd(fn)
1165 elif state == 'm':
1150 elif state == 'm':
1166 madd(fn)
1151 madd(fn)
1167 elif state == 'a':
1152 elif state == 'a':
1168 aadd(fn)
1153 aadd(fn)
1169 elif state == 'r':
1154 elif state == 'r':
1170 radd(fn)
1155 radd(fn)
1171
1156
1172 return (lookup, scmutil.status(modified, added, removed, deleted,
1157 return (lookup, scmutil.status(modified, added, removed, deleted,
1173 unknown, ignored, clean))
1158 unknown, ignored, clean))
1174
1159
1175 def matches(self, match):
1160 def matches(self, match):
1176 '''
1161 '''
1177 return files in the dirstate (in whatever state) filtered by match
1162 return files in the dirstate (in whatever state) filtered by match
1178 '''
1163 '''
1179 dmap = self._map
1164 dmap = self._map
1180 if match.always():
1165 if match.always():
1181 return dmap.keys()
1166 return dmap.keys()
1182 files = match.files()
1167 files = match.files()
1183 if match.isexact():
1168 if match.isexact():
1184 # fast path -- filter the other way around, since typically files is
1169 # fast path -- filter the other way around, since typically files is
1185 # much smaller than dmap
1170 # much smaller than dmap
1186 return [f for f in files if f in dmap]
1171 return [f for f in files if f in dmap]
1187 if match.prefix() and all(fn in dmap for fn in files):
1172 if match.prefix() and all(fn in dmap for fn in files):
1188 # fast path -- all the values are known to be files, so just return
1173 # fast path -- all the values are known to be files, so just return
1189 # that
1174 # that
1190 return list(files)
1175 return list(files)
1191 return [f for f in dmap if match(f)]
1176 return [f for f in dmap if match(f)]
1192
1177
1193 def _actualfilename(self, tr):
1178 def _actualfilename(self, tr):
1194 if tr:
1179 if tr:
1195 return self._pendingfilename
1180 return self._pendingfilename
1196 else:
1181 else:
1197 return self._filename
1182 return self._filename
1198
1183
1199 def savebackup(self, tr, backupname):
1184 def savebackup(self, tr, backupname):
1200 '''Save current dirstate into backup file'''
1185 '''Save current dirstate into backup file'''
1201 filename = self._actualfilename(tr)
1186 filename = self._actualfilename(tr)
1202 assert backupname != filename
1187 assert backupname != filename
1203
1188
1204 # use '_writedirstate' instead of 'write' to write changes certainly,
1189 # use '_writedirstate' instead of 'write' to write changes certainly,
1205 # because the latter omits writing out if transaction is running.
1190 # because the latter omits writing out if transaction is running.
1206 # output file will be used to create backup of dirstate at this point.
1191 # output file will be used to create backup of dirstate at this point.
1207 if self._dirty or not self._opener.exists(filename):
1192 if self._dirty or not self._opener.exists(filename):
1208 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1193 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1209 checkambig=True))
1194 checkambig=True))
1210
1195
1211 if tr:
1196 if tr:
1212 # ensure that subsequent tr.writepending returns True for
1197 # ensure that subsequent tr.writepending returns True for
1213 # changes written out above, even if dirstate is never
1198 # changes written out above, even if dirstate is never
1214 # changed after this
1199 # changed after this
1215 tr.addfilegenerator('dirstate', (self._filename,),
1200 tr.addfilegenerator('dirstate', (self._filename,),
1216 self._writedirstate, location='plain')
1201 self._writedirstate, location='plain')
1217
1202
1218 # ensure that pending file written above is unlinked at
1203 # ensure that pending file written above is unlinked at
1219 # failure, even if tr.writepending isn't invoked until the
1204 # failure, even if tr.writepending isn't invoked until the
1220 # end of this transaction
1205 # end of this transaction
1221 tr.registertmp(filename, location='plain')
1206 tr.registertmp(filename, location='plain')
1222
1207
1223 self._opener.tryunlink(backupname)
1208 self._opener.tryunlink(backupname)
1224 # hardlink backup is okay because _writedirstate is always called
1209 # hardlink backup is okay because _writedirstate is always called
1225 # with an "atomictemp=True" file.
1210 # with an "atomictemp=True" file.
1226 util.copyfile(self._opener.join(filename),
1211 util.copyfile(self._opener.join(filename),
1227 self._opener.join(backupname), hardlink=True)
1212 self._opener.join(backupname), hardlink=True)
1228
1213
1229 def restorebackup(self, tr, backupname):
1214 def restorebackup(self, tr, backupname):
1230 '''Restore dirstate by backup file'''
1215 '''Restore dirstate by backup file'''
1231 # this "invalidate()" prevents "wlock.release()" from writing
1216 # this "invalidate()" prevents "wlock.release()" from writing
1232 # changes of dirstate out after restoring from backup file
1217 # changes of dirstate out after restoring from backup file
1233 self.invalidate()
1218 self.invalidate()
1234 filename = self._actualfilename(tr)
1219 filename = self._actualfilename(tr)
1235 self._opener.rename(backupname, filename, checkambig=True)
1220 self._opener.rename(backupname, filename, checkambig=True)
1236
1221
1237 def clearbackup(self, tr, backupname):
1222 def clearbackup(self, tr, backupname):
1238 '''Clear backup file'''
1223 '''Clear backup file'''
1239 self._opener.unlink(backupname)
1224 self._opener.unlink(backupname)
1240
1225
1241 class dirstatemap(object):
1226 class dirstatemap(object):
1242 def __init__(self, ui, opener, root):
1227 def __init__(self, ui, opener, root):
1243 self._ui = ui
1228 self._ui = ui
1244 self._opener = opener
1229 self._opener = opener
1245 self._root = root
1230 self._root = root
1246 self._filename = 'dirstate'
1231 self._filename = 'dirstate'
1247
1232
1248 self._map = {}
1233 self._map = {}
1249 self.copymap = {}
1234 self.copymap = {}
1250 self._parents = None
1235 self._parents = None
1251 self._dirtyparents = False
1236 self._dirtyparents = False
1252
1237
1253 # for consistent view between _pl() and _read() invocations
1238 # for consistent view between _pl() and _read() invocations
1254 self._pendingmode = None
1239 self._pendingmode = None
1255
1240
1256 def iteritems(self):
1241 def iteritems(self):
1257 return self._map.iteritems()
1242 return self._map.iteritems()
1258
1243
1259 def __len__(self):
1244 def __len__(self):
1260 return len(self._map)
1245 return len(self._map)
1261
1246
1262 def __iter__(self):
1247 def __iter__(self):
1263 return iter(self._map)
1248 return iter(self._map)
1264
1249
1265 def get(self, key, default=None):
1250 def get(self, key, default=None):
1266 return self._map.get(key, default)
1251 return self._map.get(key, default)
1267
1252
1268 def __contains__(self, key):
1253 def __contains__(self, key):
1269 return key in self._map
1254 return key in self._map
1270
1255
1271 def __setitem__(self, key, value):
1256 def __setitem__(self, key, value):
1272 self._map[key] = value
1257 self._map[key] = value
1273
1258
1274 def __getitem__(self, key):
1259 def __getitem__(self, key):
1275 return self._map[key]
1260 return self._map[key]
1276
1261
1277 def __delitem__(self, key):
1262 def __delitem__(self, key):
1278 del self._map[key]
1263 del self._map[key]
1279
1264
1280 def keys(self):
1265 def keys(self):
1281 return self._map.keys()
1266 return self._map.keys()
1282
1267
1283 def nonnormalentries(self):
1268 def nonnormalentries(self):
1284 '''Compute the nonnormal dirstate entries from the dmap'''
1269 '''Compute the nonnormal dirstate entries from the dmap'''
1285 try:
1270 try:
1286 return parsers.nonnormalotherparententries(self._map)
1271 return parsers.nonnormalotherparententries(self._map)
1287 except AttributeError:
1272 except AttributeError:
1288 nonnorm = set()
1273 nonnorm = set()
1289 otherparent = set()
1274 otherparent = set()
1290 for fname, e in self._map.iteritems():
1275 for fname, e in self._map.iteritems():
1291 if e[0] != 'n' or e[3] == -1:
1276 if e[0] != 'n' or e[3] == -1:
1292 nonnorm.add(fname)
1277 nonnorm.add(fname)
1293 if e[0] == 'n' and e[2] == -2:
1278 if e[0] == 'n' and e[2] == -2:
1294 otherparent.add(fname)
1279 otherparent.add(fname)
1295 return nonnorm, otherparent
1280 return nonnorm, otherparent
1296
1281
1297 def filefoldmap(self):
1282 def filefoldmap(self):
1298 """Returns a dictionary mapping normalized case paths to their
1283 """Returns a dictionary mapping normalized case paths to their
1299 non-normalized versions.
1284 non-normalized versions.
1300 """
1285 """
1301 try:
1286 try:
1302 makefilefoldmap = parsers.make_file_foldmap
1287 makefilefoldmap = parsers.make_file_foldmap
1303 except AttributeError:
1288 except AttributeError:
1304 pass
1289 pass
1305 else:
1290 else:
1306 return makefilefoldmap(self._map, util.normcasespec,
1291 return makefilefoldmap(self._map, util.normcasespec,
1307 util.normcasefallback)
1292 util.normcasefallback)
1308
1293
1309 f = {}
1294 f = {}
1310 normcase = util.normcase
1295 normcase = util.normcase
1311 for name, s in self._map.iteritems():
1296 for name, s in self._map.iteritems():
1312 if s[0] != 'r':
1297 if s[0] != 'r':
1313 f[normcase(name)] = name
1298 f[normcase(name)] = name
1314 f['.'] = '.' # prevents useless util.fspath() invocation
1299 f['.'] = '.' # prevents useless util.fspath() invocation
1315 return f
1300 return f
1316
1301
1317 def dirs(self):
1302 def dirs(self):
1318 """Returns a set-like object containing all the directories in the
1303 """Returns a set-like object containing all the directories in the
1319 current dirstate.
1304 current dirstate.
1320 """
1305 """
1321 return util.dirs(self._map, 'r')
1306 return util.dirs(self._map, 'r')
1322
1307
1323 def _opendirstatefile(self):
1308 def _opendirstatefile(self):
1324 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1309 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1325 if self._pendingmode is not None and self._pendingmode != mode:
1310 if self._pendingmode is not None and self._pendingmode != mode:
1326 fp.close()
1311 fp.close()
1327 raise error.Abort(_('working directory state may be '
1312 raise error.Abort(_('working directory state may be '
1328 'changed parallelly'))
1313 'changed parallelly'))
1329 self._pendingmode = mode
1314 self._pendingmode = mode
1330 return fp
1315 return fp
1331
1316
1332 def parents(self):
1317 def parents(self):
1333 if not self._parents:
1318 if not self._parents:
1334 try:
1319 try:
1335 fp = self._opendirstatefile()
1320 fp = self._opendirstatefile()
1336 st = fp.read(40)
1321 st = fp.read(40)
1337 fp.close()
1322 fp.close()
1338 except IOError as err:
1323 except IOError as err:
1339 if err.errno != errno.ENOENT:
1324 if err.errno != errno.ENOENT:
1340 raise
1325 raise
1341 # File doesn't exist, so the current state is empty
1326 # File doesn't exist, so the current state is empty
1342 st = ''
1327 st = ''
1343
1328
1344 l = len(st)
1329 l = len(st)
1345 if l == 40:
1330 if l == 40:
1346 self._parents = st[:20], st[20:40]
1331 self._parents = st[:20], st[20:40]
1347 elif l == 0:
1332 elif l == 0:
1348 self._parents = [nullid, nullid]
1333 self._parents = [nullid, nullid]
1349 else:
1334 else:
1350 raise error.Abort(_('working directory state appears '
1335 raise error.Abort(_('working directory state appears '
1351 'damaged!'))
1336 'damaged!'))
1352
1337
1353 return self._parents
1338 return self._parents
1354
1339
1355 def setparents(self, p1, p2):
1340 def setparents(self, p1, p2):
1356 self._parents = (p1, p2)
1341 self._parents = (p1, p2)
1357 self._dirtyparents = True
1342 self._dirtyparents = True
1358
1343
1359 def read(self):
1344 def read(self):
1360 try:
1345 try:
1361 fp = self._opendirstatefile()
1346 fp = self._opendirstatefile()
1362 try:
1347 try:
1363 st = fp.read()
1348 st = fp.read()
1364 finally:
1349 finally:
1365 fp.close()
1350 fp.close()
1366 except IOError as err:
1351 except IOError as err:
1367 if err.errno != errno.ENOENT:
1352 if err.errno != errno.ENOENT:
1368 raise
1353 raise
1369 return
1354 return
1370 if not st:
1355 if not st:
1371 return
1356 return
1372
1357
1373 if util.safehasattr(parsers, 'dict_new_presized'):
1358 if util.safehasattr(parsers, 'dict_new_presized'):
1374 # Make an estimate of the number of files in the dirstate based on
1359 # Make an estimate of the number of files in the dirstate based on
1375 # its size. From a linear regression on a set of real-world repos,
1360 # its size. From a linear regression on a set of real-world repos,
1376 # all over 10,000 files, the size of a dirstate entry is 85
1361 # all over 10,000 files, the size of a dirstate entry is 85
1377 # bytes. The cost of resizing is significantly higher than the cost
1362 # bytes. The cost of resizing is significantly higher than the cost
1378 # of filling in a larger presized dict, so subtract 20% from the
1363 # of filling in a larger presized dict, so subtract 20% from the
1379 # size.
1364 # size.
1380 #
1365 #
1381 # This heuristic is imperfect in many ways, so in a future dirstate
1366 # This heuristic is imperfect in many ways, so in a future dirstate
1382 # format update it makes sense to just record the number of entries
1367 # format update it makes sense to just record the number of entries
1383 # on write.
1368 # on write.
1384 self._map = parsers.dict_new_presized(len(st) / 71)
1369 self._map = parsers.dict_new_presized(len(st) / 71)
1385
1370
1386 # Python's garbage collector triggers a GC each time a certain number
1371 # Python's garbage collector triggers a GC each time a certain number
1387 # of container objects (the number being defined by
1372 # of container objects (the number being defined by
1388 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1373 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1389 # for each file in the dirstate. The C version then immediately marks
1374 # for each file in the dirstate. The C version then immediately marks
1390 # them as not to be tracked by the collector. However, this has no
1375 # them as not to be tracked by the collector. However, this has no
1391 # effect on when GCs are triggered, only on what objects the GC looks
1376 # effect on when GCs are triggered, only on what objects the GC looks
1392 # into. This means that O(number of files) GCs are unavoidable.
1377 # into. This means that O(number of files) GCs are unavoidable.
1393 # Depending on when in the process's lifetime the dirstate is parsed,
1378 # Depending on when in the process's lifetime the dirstate is parsed,
1394 # this can get very expensive. As a workaround, disable GC while
1379 # this can get very expensive. As a workaround, disable GC while
1395 # parsing the dirstate.
1380 # parsing the dirstate.
1396 #
1381 #
1397 # (we cannot decorate the function directly since it is in a C module)
1382 # (we cannot decorate the function directly since it is in a C module)
1398 parse_dirstate = util.nogc(parsers.parse_dirstate)
1383 parse_dirstate = util.nogc(parsers.parse_dirstate)
1399 p = parse_dirstate(self._map, self.copymap, st)
1384 p = parse_dirstate(self._map, self.copymap, st)
1400 if not self._dirtyparents:
1385 if not self._dirtyparents:
1401 self.setparents(*p)
1386 self.setparents(*p)
1402
1387
1403 def write(self, st, now):
1388 def write(self, st, now):
1404 st.write(parsers.pack_dirstate(self._map, self.copymap,
1389 st.write(parsers.pack_dirstate(self._map, self.copymap,
1405 self.parents(), now))
1390 self.parents(), now))
1406 st.close()
1391 st.close()
1407 self._dirtyparents = False
1392 self._dirtyparents = False
1393 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1394
1395 @propertycache
1396 def nonnormalset(self):
1397 nonnorm, otherparents = self.nonnormalentries()
1398 self.otherparentset = otherparents
1399 return nonnorm
1400
1401 @propertycache
1402 def otherparentset(self):
1403 nonnorm, otherparents = self.nonnormalentries()
1404 self.nonnormalset = nonnorm
1405 return otherparents
1406
General Comments 0
You need to be logged in to leave comments. Login now