##// END OF EJS Templates
dirstate: move clear onto dirstatemap class...
Durham Goode -
r34934:0217f75b stable
parent child Browse files
Show More
@@ -1,1396 +1,1400
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._read()
132 self._read()
133 return self._map
133 return self._map
134
134
135 @property
135 @property
136 def _sparsematcher(self):
136 def _sparsematcher(self):
137 """The matcher for the sparse checkout.
137 """The matcher for the sparse checkout.
138
138
139 The working directory may not include every file from a manifest. The
139 The working directory may not include every file from a manifest. The
140 matcher obtained by this property will match a path if it is to be
140 matcher obtained by this property will match a path if it is to be
141 included in the working directory.
141 included in the working directory.
142 """
142 """
143 # TODO there is potential to cache this property. For now, the matcher
143 # TODO there is potential to cache this property. For now, the matcher
144 # is resolved on every access. (But the called function does use a
144 # is resolved on every access. (But the called function does use a
145 # cache to keep the lookup fast.)
145 # cache to keep the lookup fast.)
146 return self._sparsematchfn()
146 return self._sparsematchfn()
147
147
148 @repocache('branch')
148 @repocache('branch')
149 def _branch(self):
149 def _branch(self):
150 try:
150 try:
151 return self._opener.read("branch").strip() or "default"
151 return self._opener.read("branch").strip() or "default"
152 except IOError as inst:
152 except IOError as inst:
153 if inst.errno != errno.ENOENT:
153 if inst.errno != errno.ENOENT:
154 raise
154 raise
155 return "default"
155 return "default"
156
156
157 @property
157 @property
158 def _pl(self):
158 def _pl(self):
159 return self._map.parents()
159 return self._map.parents()
160
160
161 def dirs(self):
161 def dirs(self):
162 return self._map.dirs
162 return self._map.dirs
163
163
164 @rootcache('.hgignore')
164 @rootcache('.hgignore')
165 def _ignore(self):
165 def _ignore(self):
166 files = self._ignorefiles()
166 files = self._ignorefiles()
167 if not files:
167 if not files:
168 return matchmod.never(self._root, '')
168 return matchmod.never(self._root, '')
169
169
170 pats = ['include:%s' % f for f in files]
170 pats = ['include:%s' % f for f in files]
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
172
172
173 @propertycache
173 @propertycache
174 def _slash(self):
174 def _slash(self):
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
176
176
177 @propertycache
177 @propertycache
178 def _checklink(self):
178 def _checklink(self):
179 return util.checklink(self._root)
179 return util.checklink(self._root)
180
180
181 @propertycache
181 @propertycache
182 def _checkexec(self):
182 def _checkexec(self):
183 return util.checkexec(self._root)
183 return util.checkexec(self._root)
184
184
185 @propertycache
185 @propertycache
186 def _checkcase(self):
186 def _checkcase(self):
187 return not util.fscasesensitive(self._join('.hg'))
187 return not util.fscasesensitive(self._join('.hg'))
188
188
189 def _join(self, f):
189 def _join(self, f):
190 # much faster than os.path.join()
190 # much faster than os.path.join()
191 # it's safe because f is always a relative path
191 # it's safe because f is always a relative path
192 return self._rootdir + f
192 return self._rootdir + f
193
193
194 def flagfunc(self, buildfallback):
194 def flagfunc(self, buildfallback):
195 if self._checklink and self._checkexec:
195 if self._checklink and self._checkexec:
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return 'l'
200 return 'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return 'x'
202 return 'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return ''
205 return ''
206 return f
206 return f
207
207
208 fallback = buildfallback()
208 fallback = buildfallback()
209 if self._checklink:
209 if self._checklink:
210 def f(x):
210 def f(x):
211 if os.path.islink(self._join(x)):
211 if os.path.islink(self._join(x)):
212 return 'l'
212 return 'l'
213 if 'x' in fallback(x):
213 if 'x' in fallback(x):
214 return 'x'
214 return 'x'
215 return ''
215 return ''
216 return f
216 return f
217 if self._checkexec:
217 if self._checkexec:
218 def f(x):
218 def f(x):
219 if 'l' in fallback(x):
219 if 'l' in fallback(x):
220 return 'l'
220 return 'l'
221 if util.isexec(self._join(x)):
221 if util.isexec(self._join(x)):
222 return 'x'
222 return 'x'
223 return ''
223 return ''
224 return f
224 return f
225 else:
225 else:
226 return fallback
226 return fallback
227
227
228 @propertycache
228 @propertycache
229 def _cwd(self):
229 def _cwd(self):
230 # internal config: ui.forcecwd
230 # internal config: ui.forcecwd
231 forcecwd = self._ui.config('ui', 'forcecwd')
231 forcecwd = self._ui.config('ui', 'forcecwd')
232 if forcecwd:
232 if forcecwd:
233 return forcecwd
233 return forcecwd
234 return pycompat.getcwd()
234 return pycompat.getcwd()
235
235
236 def getcwd(self):
236 def getcwd(self):
237 '''Return the path from which a canonical path is calculated.
237 '''Return the path from which a canonical path is calculated.
238
238
239 This path should be used to resolve file patterns or to convert
239 This path should be used to resolve file patterns or to convert
240 canonical paths back to file paths for display. It shouldn't be
240 canonical paths back to file paths for display. It shouldn't be
241 used to get real file paths. Use vfs functions instead.
241 used to get real file paths. Use vfs functions instead.
242 '''
242 '''
243 cwd = self._cwd
243 cwd = self._cwd
244 if cwd == self._root:
244 if cwd == self._root:
245 return ''
245 return ''
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
247 rootsep = self._root
247 rootsep = self._root
248 if not util.endswithsep(rootsep):
248 if not util.endswithsep(rootsep):
249 rootsep += pycompat.ossep
249 rootsep += pycompat.ossep
250 if cwd.startswith(rootsep):
250 if cwd.startswith(rootsep):
251 return cwd[len(rootsep):]
251 return cwd[len(rootsep):]
252 else:
252 else:
253 # we're outside the repo. return an absolute path.
253 # we're outside the repo. return an absolute path.
254 return cwd
254 return cwd
255
255
256 def pathto(self, f, cwd=None):
256 def pathto(self, f, cwd=None):
257 if cwd is None:
257 if cwd is None:
258 cwd = self.getcwd()
258 cwd = self.getcwd()
259 path = util.pathto(self._root, cwd, f)
259 path = util.pathto(self._root, cwd, f)
260 if self._slash:
260 if self._slash:
261 return util.pconvert(path)
261 return util.pconvert(path)
262 return path
262 return path
263
263
264 def __getitem__(self, key):
264 def __getitem__(self, key):
265 '''Return the current state of key (a filename) in the dirstate.
265 '''Return the current state of key (a filename) in the dirstate.
266
266
267 States are:
267 States are:
268 n normal
268 n normal
269 m needs merging
269 m needs merging
270 r marked for removal
270 r marked for removal
271 a marked for addition
271 a marked for addition
272 ? not tracked
272 ? not tracked
273 '''
273 '''
274 return self._map.get(key, ("?",))[0]
274 return self._map.get(key, ("?",))[0]
275
275
276 def __contains__(self, key):
276 def __contains__(self, key):
277 return key in self._map
277 return key in self._map
278
278
279 def __iter__(self):
279 def __iter__(self):
280 return iter(sorted(self._map))
280 return iter(sorted(self._map))
281
281
282 def items(self):
282 def items(self):
283 return self._map.iteritems()
283 return self._map.iteritems()
284
284
285 iteritems = items
285 iteritems = items
286
286
287 def parents(self):
287 def parents(self):
288 return [self._validate(p) for p in self._pl]
288 return [self._validate(p) for p in self._pl]
289
289
290 def p1(self):
290 def p1(self):
291 return self._validate(self._pl[0])
291 return self._validate(self._pl[0])
292
292
293 def p2(self):
293 def p2(self):
294 return self._validate(self._pl[1])
294 return self._validate(self._pl[1])
295
295
296 def branch(self):
296 def branch(self):
297 return encoding.tolocal(self._branch)
297 return encoding.tolocal(self._branch)
298
298
299 def setparents(self, p1, p2=nullid):
299 def setparents(self, p1, p2=nullid):
300 """Set dirstate parents to p1 and p2.
300 """Set dirstate parents to p1 and p2.
301
301
302 When moving from two parents to one, 'm' merged entries a
302 When moving from two parents to one, 'm' merged entries a
303 adjusted to normal and previous copy records discarded and
303 adjusted to normal and previous copy records discarded and
304 returned by the call.
304 returned by the call.
305
305
306 See localrepo.setparents()
306 See localrepo.setparents()
307 """
307 """
308 if self._parentwriters == 0:
308 if self._parentwriters == 0:
309 raise ValueError("cannot set dirstate parent without "
309 raise ValueError("cannot set dirstate parent without "
310 "calling dirstate.beginparentchange")
310 "calling dirstate.beginparentchange")
311
311
312 self._dirty = True
312 self._dirty = True
313 oldp2 = self._pl[1]
313 oldp2 = self._pl[1]
314 if self._origpl is None:
314 if self._origpl is None:
315 self._origpl = self._pl
315 self._origpl = self._pl
316 self._map.setparents(p1, p2)
316 self._map.setparents(p1, p2)
317 copies = {}
317 copies = {}
318 if oldp2 != nullid and p2 == nullid:
318 if oldp2 != nullid and p2 == nullid:
319 candidatefiles = self._map.nonnormalset.union(
319 candidatefiles = self._map.nonnormalset.union(
320 self._map.otherparentset)
320 self._map.otherparentset)
321 for f in candidatefiles:
321 for f in candidatefiles:
322 s = self._map.get(f)
322 s = self._map.get(f)
323 if s is None:
323 if s is None:
324 continue
324 continue
325
325
326 # Discard 'm' markers when moving away from a merge state
326 # Discard 'm' markers when moving away from a merge state
327 if s[0] == 'm':
327 if s[0] == 'm':
328 source = self._map.copymap.get(f)
328 source = self._map.copymap.get(f)
329 if source:
329 if source:
330 copies[f] = source
330 copies[f] = source
331 self.normallookup(f)
331 self.normallookup(f)
332 # Also fix up otherparent markers
332 # Also fix up otherparent markers
333 elif s[0] == 'n' and s[2] == -2:
333 elif s[0] == 'n' and s[2] == -2:
334 source = self._map.copymap.get(f)
334 source = self._map.copymap.get(f)
335 if source:
335 if source:
336 copies[f] = source
336 copies[f] = source
337 self.add(f)
337 self.add(f)
338 return copies
338 return copies
339
339
340 def setbranch(self, branch):
340 def setbranch(self, branch):
341 self._branch = encoding.fromlocal(branch)
341 self._branch = encoding.fromlocal(branch)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
343 try:
343 try:
344 f.write(self._branch + '\n')
344 f.write(self._branch + '\n')
345 f.close()
345 f.close()
346
346
347 # make sure filecache has the correct stat info for _branch after
347 # make sure filecache has the correct stat info for _branch after
348 # replacing the underlying file
348 # replacing the underlying file
349 ce = self._filecache['_branch']
349 ce = self._filecache['_branch']
350 if ce:
350 if ce:
351 ce.refresh()
351 ce.refresh()
352 except: # re-raises
352 except: # re-raises
353 f.discard()
353 f.discard()
354 raise
354 raise
355
355
356 def _read(self):
356 def _read(self):
357 self._map = dirstatemap(self._ui, self._opener, self._root)
357 self._map = dirstatemap(self._ui, self._opener, self._root)
358 self._map.read()
358 self._map.read()
359
359
360 def invalidate(self):
360 def invalidate(self):
361 '''Causes the next access to reread the dirstate.
361 '''Causes the next access to reread the dirstate.
362
362
363 This is different from localrepo.invalidatedirstate() because it always
363 This is different from localrepo.invalidatedirstate() because it always
364 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
364 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
365 check whether the dirstate has changed before rereading it.'''
365 check whether the dirstate has changed before rereading it.'''
366
366
367 for a in ("_map", "_branch", "_ignore"):
367 for a in ("_map", "_branch", "_ignore"):
368 if a in self.__dict__:
368 if a in self.__dict__:
369 delattr(self, a)
369 delattr(self, a)
370 self._lastnormaltime = 0
370 self._lastnormaltime = 0
371 self._dirty = False
371 self._dirty = False
372 self._updatedfiles.clear()
372 self._updatedfiles.clear()
373 self._parentwriters = 0
373 self._parentwriters = 0
374 self._origpl = None
374 self._origpl = None
375
375
376 def copy(self, source, dest):
376 def copy(self, source, dest):
377 """Mark dest as a copy of source. Unmark dest if source is None."""
377 """Mark dest as a copy of source. Unmark dest if source is None."""
378 if source == dest:
378 if source == dest:
379 return
379 return
380 self._dirty = True
380 self._dirty = True
381 if source is not None:
381 if source is not None:
382 self._map.copymap[dest] = source
382 self._map.copymap[dest] = source
383 self._updatedfiles.add(source)
383 self._updatedfiles.add(source)
384 self._updatedfiles.add(dest)
384 self._updatedfiles.add(dest)
385 elif self._map.copymap.pop(dest, None):
385 elif self._map.copymap.pop(dest, None):
386 self._updatedfiles.add(dest)
386 self._updatedfiles.add(dest)
387
387
388 def copied(self, file):
388 def copied(self, file):
389 return self._map.copymap.get(file, None)
389 return self._map.copymap.get(file, None)
390
390
391 def copies(self):
391 def copies(self):
392 return self._map.copymap
392 return self._map.copymap
393
393
394 def _droppath(self, f):
394 def _droppath(self, f):
395 if self[f] not in "?r" and "dirs" in self._map.__dict__:
395 if self[f] not in "?r" and "dirs" in self._map.__dict__:
396 self._map.dirs.delpath(f)
396 self._map.dirs.delpath(f)
397
397
398 if "filefoldmap" in self._map.__dict__:
398 if "filefoldmap" in self._map.__dict__:
399 normed = util.normcase(f)
399 normed = util.normcase(f)
400 if normed in self._map.filefoldmap:
400 if normed in self._map.filefoldmap:
401 del self._map.filefoldmap[normed]
401 del self._map.filefoldmap[normed]
402
402
403 self._updatedfiles.add(f)
403 self._updatedfiles.add(f)
404
404
405 def _addpath(self, f, state, mode, size, mtime):
405 def _addpath(self, f, state, mode, size, mtime):
406 oldstate = self[f]
406 oldstate = self[f]
407 if state == 'a' or oldstate == 'r':
407 if state == 'a' or oldstate == 'r':
408 scmutil.checkfilename(f)
408 scmutil.checkfilename(f)
409 if f in self._map.dirs:
409 if f in self._map.dirs:
410 raise error.Abort(_('directory %r already in dirstate') % f)
410 raise error.Abort(_('directory %r already in dirstate') % f)
411 # shadows
411 # shadows
412 for d in util.finddirs(f):
412 for d in util.finddirs(f):
413 if d in self._map.dirs:
413 if d in self._map.dirs:
414 break
414 break
415 entry = self._map.get(d)
415 entry = self._map.get(d)
416 if entry is not None and entry[0] != 'r':
416 if entry is not None and entry[0] != 'r':
417 raise error.Abort(
417 raise error.Abort(
418 _('file %r in dirstate clashes with %r') % (d, f))
418 _('file %r in dirstate clashes with %r') % (d, f))
419 if oldstate in "?r" and "dirs" in self._map.__dict__:
419 if oldstate in "?r" and "dirs" in self._map.__dict__:
420 self._map.dirs.addpath(f)
420 self._map.dirs.addpath(f)
421 self._dirty = True
421 self._dirty = True
422 self._updatedfiles.add(f)
422 self._updatedfiles.add(f)
423 self._map[f] = dirstatetuple(state, mode, size, mtime)
423 self._map[f] = dirstatetuple(state, mode, size, mtime)
424 if state != 'n' or mtime == -1:
424 if state != 'n' or mtime == -1:
425 self._map.nonnormalset.add(f)
425 self._map.nonnormalset.add(f)
426 if size == -2:
426 if size == -2:
427 self._map.otherparentset.add(f)
427 self._map.otherparentset.add(f)
428
428
429 def normal(self, f):
429 def normal(self, f):
430 '''Mark a file normal and clean.'''
430 '''Mark a file normal and clean.'''
431 s = os.lstat(self._join(f))
431 s = os.lstat(self._join(f))
432 mtime = s.st_mtime
432 mtime = s.st_mtime
433 self._addpath(f, 'n', s.st_mode,
433 self._addpath(f, 'n', s.st_mode,
434 s.st_size & _rangemask, mtime & _rangemask)
434 s.st_size & _rangemask, mtime & _rangemask)
435 self._map.copymap.pop(f, None)
435 self._map.copymap.pop(f, None)
436 if f in self._map.nonnormalset:
436 if f in self._map.nonnormalset:
437 self._map.nonnormalset.remove(f)
437 self._map.nonnormalset.remove(f)
438 if mtime > self._lastnormaltime:
438 if mtime > self._lastnormaltime:
439 # Remember the most recent modification timeslot for status(),
439 # Remember the most recent modification timeslot for status(),
440 # to make sure we won't miss future size-preserving file content
440 # to make sure we won't miss future size-preserving file content
441 # modifications that happen within the same timeslot.
441 # modifications that happen within the same timeslot.
442 self._lastnormaltime = mtime
442 self._lastnormaltime = mtime
443
443
444 def normallookup(self, f):
444 def normallookup(self, f):
445 '''Mark a file normal, but possibly dirty.'''
445 '''Mark a file normal, but possibly dirty.'''
446 if self._pl[1] != nullid:
446 if self._pl[1] != nullid:
447 # if there is a merge going on and the file was either
447 # if there is a merge going on and the file was either
448 # in state 'm' (-1) or coming from other parent (-2) before
448 # in state 'm' (-1) or coming from other parent (-2) before
449 # being removed, restore that state.
449 # being removed, restore that state.
450 entry = self._map.get(f)
450 entry = self._map.get(f)
451 if entry is not None:
451 if entry is not None:
452 if entry[0] == 'r' and entry[2] in (-1, -2):
452 if entry[0] == 'r' and entry[2] in (-1, -2):
453 source = self._map.copymap.get(f)
453 source = self._map.copymap.get(f)
454 if entry[2] == -1:
454 if entry[2] == -1:
455 self.merge(f)
455 self.merge(f)
456 elif entry[2] == -2:
456 elif entry[2] == -2:
457 self.otherparent(f)
457 self.otherparent(f)
458 if source:
458 if source:
459 self.copy(source, f)
459 self.copy(source, f)
460 return
460 return
461 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
461 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
462 return
462 return
463 self._addpath(f, 'n', 0, -1, -1)
463 self._addpath(f, 'n', 0, -1, -1)
464 self._map.copymap.pop(f, None)
464 self._map.copymap.pop(f, None)
465 if f in self._map.nonnormalset:
465 if f in self._map.nonnormalset:
466 self._map.nonnormalset.remove(f)
466 self._map.nonnormalset.remove(f)
467
467
468 def otherparent(self, f):
468 def otherparent(self, f):
469 '''Mark as coming from the other parent, always dirty.'''
469 '''Mark as coming from the other parent, always dirty.'''
470 if self._pl[1] == nullid:
470 if self._pl[1] == nullid:
471 raise error.Abort(_("setting %r to other parent "
471 raise error.Abort(_("setting %r to other parent "
472 "only allowed in merges") % f)
472 "only allowed in merges") % f)
473 if f in self and self[f] == 'n':
473 if f in self and self[f] == 'n':
474 # merge-like
474 # merge-like
475 self._addpath(f, 'm', 0, -2, -1)
475 self._addpath(f, 'm', 0, -2, -1)
476 else:
476 else:
477 # add-like
477 # add-like
478 self._addpath(f, 'n', 0, -2, -1)
478 self._addpath(f, 'n', 0, -2, -1)
479 self._map.copymap.pop(f, None)
479 self._map.copymap.pop(f, None)
480
480
481 def add(self, f):
481 def add(self, f):
482 '''Mark a file added.'''
482 '''Mark a file added.'''
483 self._addpath(f, 'a', 0, -1, -1)
483 self._addpath(f, 'a', 0, -1, -1)
484 self._map.copymap.pop(f, None)
484 self._map.copymap.pop(f, None)
485
485
486 def remove(self, f):
486 def remove(self, f):
487 '''Mark a file removed.'''
487 '''Mark a file removed.'''
488 self._dirty = True
488 self._dirty = True
489 self._droppath(f)
489 self._droppath(f)
490 size = 0
490 size = 0
491 if self._pl[1] != nullid:
491 if self._pl[1] != nullid:
492 entry = self._map.get(f)
492 entry = self._map.get(f)
493 if entry is not None:
493 if entry is not None:
494 # backup the previous state
494 # backup the previous state
495 if entry[0] == 'm': # merge
495 if entry[0] == 'm': # merge
496 size = -1
496 size = -1
497 elif entry[0] == 'n' and entry[2] == -2: # other parent
497 elif entry[0] == 'n' and entry[2] == -2: # other parent
498 size = -2
498 size = -2
499 self._map.otherparentset.add(f)
499 self._map.otherparentset.add(f)
500 self._map[f] = dirstatetuple('r', 0, size, 0)
500 self._map[f] = dirstatetuple('r', 0, size, 0)
501 self._map.nonnormalset.add(f)
501 self._map.nonnormalset.add(f)
502 if size == 0:
502 if size == 0:
503 self._map.copymap.pop(f, None)
503 self._map.copymap.pop(f, None)
504
504
505 def merge(self, f):
505 def merge(self, f):
506 '''Mark a file merged.'''
506 '''Mark a file merged.'''
507 if self._pl[1] == nullid:
507 if self._pl[1] == nullid:
508 return self.normallookup(f)
508 return self.normallookup(f)
509 return self.otherparent(f)
509 return self.otherparent(f)
510
510
511 def drop(self, f):
511 def drop(self, f):
512 '''Drop a file from the dirstate'''
512 '''Drop a file from the dirstate'''
513 if f in self._map:
513 if f in self._map:
514 self._dirty = True
514 self._dirty = True
515 self._droppath(f)
515 self._droppath(f)
516 del self._map[f]
516 del self._map[f]
517 if f in self._map.nonnormalset:
517 if f in self._map.nonnormalset:
518 self._map.nonnormalset.remove(f)
518 self._map.nonnormalset.remove(f)
519 self._map.copymap.pop(f, None)
519 self._map.copymap.pop(f, None)
520
520
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
521 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
522 if exists is None:
522 if exists is None:
523 exists = os.path.lexists(os.path.join(self._root, path))
523 exists = os.path.lexists(os.path.join(self._root, path))
524 if not exists:
524 if not exists:
525 # Maybe a path component exists
525 # Maybe a path component exists
526 if not ignoremissing and '/' in path:
526 if not ignoremissing and '/' in path:
527 d, f = path.rsplit('/', 1)
527 d, f = path.rsplit('/', 1)
528 d = self._normalize(d, False, ignoremissing, None)
528 d = self._normalize(d, False, ignoremissing, None)
529 folded = d + "/" + f
529 folded = d + "/" + f
530 else:
530 else:
531 # No path components, preserve original case
531 # No path components, preserve original case
532 folded = path
532 folded = path
533 else:
533 else:
534 # recursively normalize leading directory components
534 # recursively normalize leading directory components
535 # against dirstate
535 # against dirstate
536 if '/' in normed:
536 if '/' in normed:
537 d, f = normed.rsplit('/', 1)
537 d, f = normed.rsplit('/', 1)
538 d = self._normalize(d, False, ignoremissing, True)
538 d = self._normalize(d, False, ignoremissing, True)
539 r = self._root + "/" + d
539 r = self._root + "/" + d
540 folded = d + "/" + util.fspath(f, r)
540 folded = d + "/" + util.fspath(f, r)
541 else:
541 else:
542 folded = util.fspath(normed, self._root)
542 folded = util.fspath(normed, self._root)
543 storemap[normed] = folded
543 storemap[normed] = folded
544
544
545 return folded
545 return folded
546
546
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
547 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
548 normed = util.normcase(path)
548 normed = util.normcase(path)
549 folded = self._map.filefoldmap.get(normed, None)
549 folded = self._map.filefoldmap.get(normed, None)
550 if folded is None:
550 if folded is None:
551 if isknown:
551 if isknown:
552 folded = path
552 folded = path
553 else:
553 else:
554 folded = self._discoverpath(path, normed, ignoremissing, exists,
554 folded = self._discoverpath(path, normed, ignoremissing, exists,
555 self._map.filefoldmap)
555 self._map.filefoldmap)
556 return folded
556 return folded
557
557
558 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
558 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
559 normed = util.normcase(path)
559 normed = util.normcase(path)
560 folded = self._map.filefoldmap.get(normed, None)
560 folded = self._map.filefoldmap.get(normed, None)
561 if folded is None:
561 if folded is None:
562 folded = self._map.dirfoldmap.get(normed, None)
562 folded = self._map.dirfoldmap.get(normed, None)
563 if folded is None:
563 if folded is None:
564 if isknown:
564 if isknown:
565 folded = path
565 folded = path
566 else:
566 else:
567 # store discovered result in dirfoldmap so that future
567 # store discovered result in dirfoldmap so that future
568 # normalizefile calls don't start matching directories
568 # normalizefile calls don't start matching directories
569 folded = self._discoverpath(path, normed, ignoremissing, exists,
569 folded = self._discoverpath(path, normed, ignoremissing, exists,
570 self._map.dirfoldmap)
570 self._map.dirfoldmap)
571 return folded
571 return folded
572
572
573 def normalize(self, path, isknown=False, ignoremissing=False):
573 def normalize(self, path, isknown=False, ignoremissing=False):
574 '''
574 '''
575 normalize the case of a pathname when on a casefolding filesystem
575 normalize the case of a pathname when on a casefolding filesystem
576
576
577 isknown specifies whether the filename came from walking the
577 isknown specifies whether the filename came from walking the
578 disk, to avoid extra filesystem access.
578 disk, to avoid extra filesystem access.
579
579
580 If ignoremissing is True, missing path are returned
580 If ignoremissing is True, missing path are returned
581 unchanged. Otherwise, we try harder to normalize possibly
581 unchanged. Otherwise, we try harder to normalize possibly
582 existing path components.
582 existing path components.
583
583
584 The normalized case is determined based on the following precedence:
584 The normalized case is determined based on the following precedence:
585
585
586 - version of name already stored in the dirstate
586 - version of name already stored in the dirstate
587 - version of name stored on disk
587 - version of name stored on disk
588 - version provided via command arguments
588 - version provided via command arguments
589 '''
589 '''
590
590
591 if self._checkcase:
591 if self._checkcase:
592 return self._normalize(path, isknown, ignoremissing)
592 return self._normalize(path, isknown, ignoremissing)
593 return path
593 return path
594
594
595 def clear(self):
595 def clear(self):
596 self._map = dirstatemap(self._ui, self._opener, self._root)
596 self._map.clear()
597 self._map.setparents(nullid, nullid)
598 self._lastnormaltime = 0
597 self._lastnormaltime = 0
599 self._updatedfiles.clear()
598 self._updatedfiles.clear()
600 self._dirty = True
599 self._dirty = True
601
600
602 def rebuild(self, parent, allfiles, changedfiles=None):
601 def rebuild(self, parent, allfiles, changedfiles=None):
603 if changedfiles is None:
602 if changedfiles is None:
604 # Rebuild entire dirstate
603 # Rebuild entire dirstate
605 changedfiles = allfiles
604 changedfiles = allfiles
606 lastnormaltime = self._lastnormaltime
605 lastnormaltime = self._lastnormaltime
607 self.clear()
606 self.clear()
608 self._lastnormaltime = lastnormaltime
607 self._lastnormaltime = lastnormaltime
609
608
610 if self._origpl is None:
609 if self._origpl is None:
611 self._origpl = self._pl
610 self._origpl = self._pl
612 self._map.setparents(parent, nullid)
611 self._map.setparents(parent, nullid)
613 for f in changedfiles:
612 for f in changedfiles:
614 if f in allfiles:
613 if f in allfiles:
615 self.normallookup(f)
614 self.normallookup(f)
616 else:
615 else:
617 self.drop(f)
616 self.drop(f)
618
617
619 self._dirty = True
618 self._dirty = True
620
619
621 def identity(self):
620 def identity(self):
622 '''Return identity of dirstate itself to detect changing in storage
621 '''Return identity of dirstate itself to detect changing in storage
623
622
624 If identity of previous dirstate is equal to this, writing
623 If identity of previous dirstate is equal to this, writing
625 changes based on the former dirstate out can keep consistency.
624 changes based on the former dirstate out can keep consistency.
626 '''
625 '''
627 return self._map.identity
626 return self._map.identity
628
627
629 def write(self, tr):
628 def write(self, tr):
630 if not self._dirty:
629 if not self._dirty:
631 return
630 return
632
631
633 filename = self._filename
632 filename = self._filename
634 if tr:
633 if tr:
635 # 'dirstate.write()' is not only for writing in-memory
634 # 'dirstate.write()' is not only for writing in-memory
636 # changes out, but also for dropping ambiguous timestamp.
635 # changes out, but also for dropping ambiguous timestamp.
637 # delayed writing re-raise "ambiguous timestamp issue".
636 # delayed writing re-raise "ambiguous timestamp issue".
638 # See also the wiki page below for detail:
637 # See also the wiki page below for detail:
639 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
638 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
640
639
641 # emulate dropping timestamp in 'parsers.pack_dirstate'
640 # emulate dropping timestamp in 'parsers.pack_dirstate'
642 now = _getfsnow(self._opener)
641 now = _getfsnow(self._opener)
643 dmap = self._map
642 dmap = self._map
644 for f in self._updatedfiles:
643 for f in self._updatedfiles:
645 e = dmap.get(f)
644 e = dmap.get(f)
646 if e is not None and e[0] == 'n' and e[3] == now:
645 if e is not None and e[0] == 'n' and e[3] == now:
647 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
646 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
648 self._map.nonnormalset.add(f)
647 self._map.nonnormalset.add(f)
649
648
650 # emulate that all 'dirstate.normal' results are written out
649 # emulate that all 'dirstate.normal' results are written out
651 self._lastnormaltime = 0
650 self._lastnormaltime = 0
652 self._updatedfiles.clear()
651 self._updatedfiles.clear()
653
652
654 # delay writing in-memory changes out
653 # delay writing in-memory changes out
655 tr.addfilegenerator('dirstate', (self._filename,),
654 tr.addfilegenerator('dirstate', (self._filename,),
656 self._writedirstate, location='plain')
655 self._writedirstate, location='plain')
657 return
656 return
658
657
659 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
658 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
660 self._writedirstate(st)
659 self._writedirstate(st)
661
660
662 def addparentchangecallback(self, category, callback):
661 def addparentchangecallback(self, category, callback):
663 """add a callback to be called when the wd parents are changed
662 """add a callback to be called when the wd parents are changed
664
663
665 Callback will be called with the following arguments:
664 Callback will be called with the following arguments:
666 dirstate, (oldp1, oldp2), (newp1, newp2)
665 dirstate, (oldp1, oldp2), (newp1, newp2)
667
666
668 Category is a unique identifier to allow overwriting an old callback
667 Category is a unique identifier to allow overwriting an old callback
669 with a newer callback.
668 with a newer callback.
670 """
669 """
671 self._plchangecallbacks[category] = callback
670 self._plchangecallbacks[category] = callback
672
671
673 def _writedirstate(self, st):
672 def _writedirstate(self, st):
674 # notify callbacks about parents change
673 # notify callbacks about parents change
675 if self._origpl is not None and self._origpl != self._pl:
674 if self._origpl is not None and self._origpl != self._pl:
676 for c, callback in sorted(self._plchangecallbacks.iteritems()):
675 for c, callback in sorted(self._plchangecallbacks.iteritems()):
677 callback(self, self._origpl, self._pl)
676 callback(self, self._origpl, self._pl)
678 self._origpl = None
677 self._origpl = None
679 # use the modification time of the newly created temporary file as the
678 # use the modification time of the newly created temporary file as the
680 # filesystem's notion of 'now'
679 # filesystem's notion of 'now'
681 now = util.fstat(st).st_mtime & _rangemask
680 now = util.fstat(st).st_mtime & _rangemask
682
681
683 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
682 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
684 # timestamp of each entries in dirstate, because of 'now > mtime'
683 # timestamp of each entries in dirstate, because of 'now > mtime'
685 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
684 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
686 if delaywrite > 0:
685 if delaywrite > 0:
687 # do we have any files to delay for?
686 # do we have any files to delay for?
688 for f, e in self._map.iteritems():
687 for f, e in self._map.iteritems():
689 if e[0] == 'n' and e[3] == now:
688 if e[0] == 'n' and e[3] == now:
690 import time # to avoid useless import
689 import time # to avoid useless import
691 # rather than sleep n seconds, sleep until the next
690 # rather than sleep n seconds, sleep until the next
692 # multiple of n seconds
691 # multiple of n seconds
693 clock = time.time()
692 clock = time.time()
694 start = int(clock) - (int(clock) % delaywrite)
693 start = int(clock) - (int(clock) % delaywrite)
695 end = start + delaywrite
694 end = start + delaywrite
696 time.sleep(end - clock)
695 time.sleep(end - clock)
697 now = end # trust our estimate that the end is near now
696 now = end # trust our estimate that the end is near now
698 break
697 break
699
698
700 self._map.write(st, now)
699 self._map.write(st, now)
701 self._lastnormaltime = 0
700 self._lastnormaltime = 0
702 self._dirty = False
701 self._dirty = False
703
702
704 def _dirignore(self, f):
703 def _dirignore(self, f):
705 if f == '.':
704 if f == '.':
706 return False
705 return False
707 if self._ignore(f):
706 if self._ignore(f):
708 return True
707 return True
709 for p in util.finddirs(f):
708 for p in util.finddirs(f):
710 if self._ignore(p):
709 if self._ignore(p):
711 return True
710 return True
712 return False
711 return False
713
712
714 def _ignorefiles(self):
713 def _ignorefiles(self):
715 files = []
714 files = []
716 if os.path.exists(self._join('.hgignore')):
715 if os.path.exists(self._join('.hgignore')):
717 files.append(self._join('.hgignore'))
716 files.append(self._join('.hgignore'))
718 for name, path in self._ui.configitems("ui"):
717 for name, path in self._ui.configitems("ui"):
719 if name == 'ignore' or name.startswith('ignore.'):
718 if name == 'ignore' or name.startswith('ignore.'):
720 # we need to use os.path.join here rather than self._join
719 # we need to use os.path.join here rather than self._join
721 # because path is arbitrary and user-specified
720 # because path is arbitrary and user-specified
722 files.append(os.path.join(self._rootdir, util.expandpath(path)))
721 files.append(os.path.join(self._rootdir, util.expandpath(path)))
723 return files
722 return files
724
723
725 def _ignorefileandline(self, f):
724 def _ignorefileandline(self, f):
726 files = collections.deque(self._ignorefiles())
725 files = collections.deque(self._ignorefiles())
727 visited = set()
726 visited = set()
728 while files:
727 while files:
729 i = files.popleft()
728 i = files.popleft()
730 patterns = matchmod.readpatternfile(i, self._ui.warn,
729 patterns = matchmod.readpatternfile(i, self._ui.warn,
731 sourceinfo=True)
730 sourceinfo=True)
732 for pattern, lineno, line in patterns:
731 for pattern, lineno, line in patterns:
733 kind, p = matchmod._patsplit(pattern, 'glob')
732 kind, p = matchmod._patsplit(pattern, 'glob')
734 if kind == "subinclude":
733 if kind == "subinclude":
735 if p not in visited:
734 if p not in visited:
736 files.append(p)
735 files.append(p)
737 continue
736 continue
738 m = matchmod.match(self._root, '', [], [pattern],
737 m = matchmod.match(self._root, '', [], [pattern],
739 warn=self._ui.warn)
738 warn=self._ui.warn)
740 if m(f):
739 if m(f):
741 return (i, lineno, line)
740 return (i, lineno, line)
742 visited.add(i)
741 visited.add(i)
743 return (None, -1, "")
742 return (None, -1, "")
744
743
745 def _walkexplicit(self, match, subrepos):
744 def _walkexplicit(self, match, subrepos):
746 '''Get stat data about the files explicitly specified by match.
745 '''Get stat data about the files explicitly specified by match.
747
746
748 Return a triple (results, dirsfound, dirsnotfound).
747 Return a triple (results, dirsfound, dirsnotfound).
749 - results is a mapping from filename to stat result. It also contains
748 - results is a mapping from filename to stat result. It also contains
750 listings mapping subrepos and .hg to None.
749 listings mapping subrepos and .hg to None.
751 - dirsfound is a list of files found to be directories.
750 - dirsfound is a list of files found to be directories.
752 - dirsnotfound is a list of files that the dirstate thinks are
751 - dirsnotfound is a list of files that the dirstate thinks are
753 directories and that were not found.'''
752 directories and that were not found.'''
754
753
755 def badtype(mode):
754 def badtype(mode):
756 kind = _('unknown')
755 kind = _('unknown')
757 if stat.S_ISCHR(mode):
756 if stat.S_ISCHR(mode):
758 kind = _('character device')
757 kind = _('character device')
759 elif stat.S_ISBLK(mode):
758 elif stat.S_ISBLK(mode):
760 kind = _('block device')
759 kind = _('block device')
761 elif stat.S_ISFIFO(mode):
760 elif stat.S_ISFIFO(mode):
762 kind = _('fifo')
761 kind = _('fifo')
763 elif stat.S_ISSOCK(mode):
762 elif stat.S_ISSOCK(mode):
764 kind = _('socket')
763 kind = _('socket')
765 elif stat.S_ISDIR(mode):
764 elif stat.S_ISDIR(mode):
766 kind = _('directory')
765 kind = _('directory')
767 return _('unsupported file type (type is %s)') % kind
766 return _('unsupported file type (type is %s)') % kind
768
767
769 matchedir = match.explicitdir
768 matchedir = match.explicitdir
770 badfn = match.bad
769 badfn = match.bad
771 dmap = self._map
770 dmap = self._map
772 lstat = os.lstat
771 lstat = os.lstat
773 getkind = stat.S_IFMT
772 getkind = stat.S_IFMT
774 dirkind = stat.S_IFDIR
773 dirkind = stat.S_IFDIR
775 regkind = stat.S_IFREG
774 regkind = stat.S_IFREG
776 lnkkind = stat.S_IFLNK
775 lnkkind = stat.S_IFLNK
777 join = self._join
776 join = self._join
778 dirsfound = []
777 dirsfound = []
779 foundadd = dirsfound.append
778 foundadd = dirsfound.append
780 dirsnotfound = []
779 dirsnotfound = []
781 notfoundadd = dirsnotfound.append
780 notfoundadd = dirsnotfound.append
782
781
783 if not match.isexact() and self._checkcase:
782 if not match.isexact() and self._checkcase:
784 normalize = self._normalize
783 normalize = self._normalize
785 else:
784 else:
786 normalize = None
785 normalize = None
787
786
788 files = sorted(match.files())
787 files = sorted(match.files())
789 subrepos.sort()
788 subrepos.sort()
790 i, j = 0, 0
789 i, j = 0, 0
791 while i < len(files) and j < len(subrepos):
790 while i < len(files) and j < len(subrepos):
792 subpath = subrepos[j] + "/"
791 subpath = subrepos[j] + "/"
793 if files[i] < subpath:
792 if files[i] < subpath:
794 i += 1
793 i += 1
795 continue
794 continue
796 while i < len(files) and files[i].startswith(subpath):
795 while i < len(files) and files[i].startswith(subpath):
797 del files[i]
796 del files[i]
798 j += 1
797 j += 1
799
798
800 if not files or '.' in files:
799 if not files or '.' in files:
801 files = ['.']
800 files = ['.']
802 results = dict.fromkeys(subrepos)
801 results = dict.fromkeys(subrepos)
803 results['.hg'] = None
802 results['.hg'] = None
804
803
805 alldirs = None
804 alldirs = None
806 for ff in files:
805 for ff in files:
807 # constructing the foldmap is expensive, so don't do it for the
806 # constructing the foldmap is expensive, so don't do it for the
808 # common case where files is ['.']
807 # common case where files is ['.']
809 if normalize and ff != '.':
808 if normalize and ff != '.':
810 nf = normalize(ff, False, True)
809 nf = normalize(ff, False, True)
811 else:
810 else:
812 nf = ff
811 nf = ff
813 if nf in results:
812 if nf in results:
814 continue
813 continue
815
814
816 try:
815 try:
817 st = lstat(join(nf))
816 st = lstat(join(nf))
818 kind = getkind(st.st_mode)
817 kind = getkind(st.st_mode)
819 if kind == dirkind:
818 if kind == dirkind:
820 if nf in dmap:
819 if nf in dmap:
821 # file replaced by dir on disk but still in dirstate
820 # file replaced by dir on disk but still in dirstate
822 results[nf] = None
821 results[nf] = None
823 if matchedir:
822 if matchedir:
824 matchedir(nf)
823 matchedir(nf)
825 foundadd((nf, ff))
824 foundadd((nf, ff))
826 elif kind == regkind or kind == lnkkind:
825 elif kind == regkind or kind == lnkkind:
827 results[nf] = st
826 results[nf] = st
828 else:
827 else:
829 badfn(ff, badtype(kind))
828 badfn(ff, badtype(kind))
830 if nf in dmap:
829 if nf in dmap:
831 results[nf] = None
830 results[nf] = None
832 except OSError as inst: # nf not found on disk - it is dirstate only
831 except OSError as inst: # nf not found on disk - it is dirstate only
833 if nf in dmap: # does it exactly match a missing file?
832 if nf in dmap: # does it exactly match a missing file?
834 results[nf] = None
833 results[nf] = None
835 else: # does it match a missing directory?
834 else: # does it match a missing directory?
836 if alldirs is None:
835 if alldirs is None:
837 alldirs = util.dirs(dmap._map)
836 alldirs = util.dirs(dmap._map)
838 if nf in alldirs:
837 if nf in alldirs:
839 if matchedir:
838 if matchedir:
840 matchedir(nf)
839 matchedir(nf)
841 notfoundadd(nf)
840 notfoundadd(nf)
842 else:
841 else:
843 badfn(ff, encoding.strtolocal(inst.strerror))
842 badfn(ff, encoding.strtolocal(inst.strerror))
844
843
845 # Case insensitive filesystems cannot rely on lstat() failing to detect
844 # Case insensitive filesystems cannot rely on lstat() failing to detect
846 # a case-only rename. Prune the stat object for any file that does not
845 # a case-only rename. Prune the stat object for any file that does not
847 # match the case in the filesystem, if there are multiple files that
846 # match the case in the filesystem, if there are multiple files that
848 # normalize to the same path.
847 # normalize to the same path.
849 if match.isexact() and self._checkcase:
848 if match.isexact() and self._checkcase:
850 normed = {}
849 normed = {}
851
850
852 for f, st in results.iteritems():
851 for f, st in results.iteritems():
853 if st is None:
852 if st is None:
854 continue
853 continue
855
854
856 nc = util.normcase(f)
855 nc = util.normcase(f)
857 paths = normed.get(nc)
856 paths = normed.get(nc)
858
857
859 if paths is None:
858 if paths is None:
860 paths = set()
859 paths = set()
861 normed[nc] = paths
860 normed[nc] = paths
862
861
863 paths.add(f)
862 paths.add(f)
864
863
865 for norm, paths in normed.iteritems():
864 for norm, paths in normed.iteritems():
866 if len(paths) > 1:
865 if len(paths) > 1:
867 for path in paths:
866 for path in paths:
868 folded = self._discoverpath(path, norm, True, None,
867 folded = self._discoverpath(path, norm, True, None,
869 self._map.dirfoldmap)
868 self._map.dirfoldmap)
870 if path != folded:
869 if path != folded:
871 results[path] = None
870 results[path] = None
872
871
873 return results, dirsfound, dirsnotfound
872 return results, dirsfound, dirsnotfound
874
873
875 def walk(self, match, subrepos, unknown, ignored, full=True):
874 def walk(self, match, subrepos, unknown, ignored, full=True):
876 '''
875 '''
877 Walk recursively through the directory tree, finding all files
876 Walk recursively through the directory tree, finding all files
878 matched by match.
877 matched by match.
879
878
880 If full is False, maybe skip some known-clean files.
879 If full is False, maybe skip some known-clean files.
881
880
882 Return a dict mapping filename to stat-like object (either
881 Return a dict mapping filename to stat-like object (either
883 mercurial.osutil.stat instance or return value of os.stat()).
882 mercurial.osutil.stat instance or return value of os.stat()).
884
883
885 '''
884 '''
886 # full is a flag that extensions that hook into walk can use -- this
885 # full is a flag that extensions that hook into walk can use -- this
887 # implementation doesn't use it at all. This satisfies the contract
886 # implementation doesn't use it at all. This satisfies the contract
888 # because we only guarantee a "maybe".
887 # because we only guarantee a "maybe".
889
888
890 if ignored:
889 if ignored:
891 ignore = util.never
890 ignore = util.never
892 dirignore = util.never
891 dirignore = util.never
893 elif unknown:
892 elif unknown:
894 ignore = self._ignore
893 ignore = self._ignore
895 dirignore = self._dirignore
894 dirignore = self._dirignore
896 else:
895 else:
897 # if not unknown and not ignored, drop dir recursion and step 2
896 # if not unknown and not ignored, drop dir recursion and step 2
898 ignore = util.always
897 ignore = util.always
899 dirignore = util.always
898 dirignore = util.always
900
899
901 matchfn = match.matchfn
900 matchfn = match.matchfn
902 matchalways = match.always()
901 matchalways = match.always()
903 matchtdir = match.traversedir
902 matchtdir = match.traversedir
904 dmap = self._map
903 dmap = self._map
905 listdir = util.listdir
904 listdir = util.listdir
906 lstat = os.lstat
905 lstat = os.lstat
907 dirkind = stat.S_IFDIR
906 dirkind = stat.S_IFDIR
908 regkind = stat.S_IFREG
907 regkind = stat.S_IFREG
909 lnkkind = stat.S_IFLNK
908 lnkkind = stat.S_IFLNK
910 join = self._join
909 join = self._join
911
910
912 exact = skipstep3 = False
911 exact = skipstep3 = False
913 if match.isexact(): # match.exact
912 if match.isexact(): # match.exact
914 exact = True
913 exact = True
915 dirignore = util.always # skip step 2
914 dirignore = util.always # skip step 2
916 elif match.prefix(): # match.match, no patterns
915 elif match.prefix(): # match.match, no patterns
917 skipstep3 = True
916 skipstep3 = True
918
917
919 if not exact and self._checkcase:
918 if not exact and self._checkcase:
920 normalize = self._normalize
919 normalize = self._normalize
921 normalizefile = self._normalizefile
920 normalizefile = self._normalizefile
922 skipstep3 = False
921 skipstep3 = False
923 else:
922 else:
924 normalize = self._normalize
923 normalize = self._normalize
925 normalizefile = None
924 normalizefile = None
926
925
927 # step 1: find all explicit files
926 # step 1: find all explicit files
928 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
927 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
929
928
930 skipstep3 = skipstep3 and not (work or dirsnotfound)
929 skipstep3 = skipstep3 and not (work or dirsnotfound)
931 work = [d for d in work if not dirignore(d[0])]
930 work = [d for d in work if not dirignore(d[0])]
932
931
933 # step 2: visit subdirectories
932 # step 2: visit subdirectories
934 def traverse(work, alreadynormed):
933 def traverse(work, alreadynormed):
935 wadd = work.append
934 wadd = work.append
936 while work:
935 while work:
937 nd = work.pop()
936 nd = work.pop()
938 if not match.visitdir(nd):
937 if not match.visitdir(nd):
939 continue
938 continue
940 skip = None
939 skip = None
941 if nd == '.':
940 if nd == '.':
942 nd = ''
941 nd = ''
943 else:
942 else:
944 skip = '.hg'
943 skip = '.hg'
945 try:
944 try:
946 entries = listdir(join(nd), stat=True, skip=skip)
945 entries = listdir(join(nd), stat=True, skip=skip)
947 except OSError as inst:
946 except OSError as inst:
948 if inst.errno in (errno.EACCES, errno.ENOENT):
947 if inst.errno in (errno.EACCES, errno.ENOENT):
949 match.bad(self.pathto(nd),
948 match.bad(self.pathto(nd),
950 encoding.strtolocal(inst.strerror))
949 encoding.strtolocal(inst.strerror))
951 continue
950 continue
952 raise
951 raise
953 for f, kind, st in entries:
952 for f, kind, st in entries:
954 if normalizefile:
953 if normalizefile:
955 # even though f might be a directory, we're only
954 # even though f might be a directory, we're only
956 # interested in comparing it to files currently in the
955 # interested in comparing it to files currently in the
957 # dmap -- therefore normalizefile is enough
956 # dmap -- therefore normalizefile is enough
958 nf = normalizefile(nd and (nd + "/" + f) or f, True,
957 nf = normalizefile(nd and (nd + "/" + f) or f, True,
959 True)
958 True)
960 else:
959 else:
961 nf = nd and (nd + "/" + f) or f
960 nf = nd and (nd + "/" + f) or f
962 if nf not in results:
961 if nf not in results:
963 if kind == dirkind:
962 if kind == dirkind:
964 if not ignore(nf):
963 if not ignore(nf):
965 if matchtdir:
964 if matchtdir:
966 matchtdir(nf)
965 matchtdir(nf)
967 wadd(nf)
966 wadd(nf)
968 if nf in dmap and (matchalways or matchfn(nf)):
967 if nf in dmap and (matchalways or matchfn(nf)):
969 results[nf] = None
968 results[nf] = None
970 elif kind == regkind or kind == lnkkind:
969 elif kind == regkind or kind == lnkkind:
971 if nf in dmap:
970 if nf in dmap:
972 if matchalways or matchfn(nf):
971 if matchalways or matchfn(nf):
973 results[nf] = st
972 results[nf] = st
974 elif ((matchalways or matchfn(nf))
973 elif ((matchalways or matchfn(nf))
975 and not ignore(nf)):
974 and not ignore(nf)):
976 # unknown file -- normalize if necessary
975 # unknown file -- normalize if necessary
977 if not alreadynormed:
976 if not alreadynormed:
978 nf = normalize(nf, False, True)
977 nf = normalize(nf, False, True)
979 results[nf] = st
978 results[nf] = st
980 elif nf in dmap and (matchalways or matchfn(nf)):
979 elif nf in dmap and (matchalways or matchfn(nf)):
981 results[nf] = None
980 results[nf] = None
982
981
983 for nd, d in work:
982 for nd, d in work:
984 # alreadynormed means that processwork doesn't have to do any
983 # alreadynormed means that processwork doesn't have to do any
985 # expensive directory normalization
984 # expensive directory normalization
986 alreadynormed = not normalize or nd == d
985 alreadynormed = not normalize or nd == d
987 traverse([d], alreadynormed)
986 traverse([d], alreadynormed)
988
987
989 for s in subrepos:
988 for s in subrepos:
990 del results[s]
989 del results[s]
991 del results['.hg']
990 del results['.hg']
992
991
993 # step 3: visit remaining files from dmap
992 # step 3: visit remaining files from dmap
994 if not skipstep3 and not exact:
993 if not skipstep3 and not exact:
995 # If a dmap file is not in results yet, it was either
994 # If a dmap file is not in results yet, it was either
996 # a) not matching matchfn b) ignored, c) missing, or d) under a
995 # a) not matching matchfn b) ignored, c) missing, or d) under a
997 # symlink directory.
996 # symlink directory.
998 if not results and matchalways:
997 if not results and matchalways:
999 visit = [f for f in dmap]
998 visit = [f for f in dmap]
1000 else:
999 else:
1001 visit = [f for f in dmap if f not in results and matchfn(f)]
1000 visit = [f for f in dmap if f not in results and matchfn(f)]
1002 visit.sort()
1001 visit.sort()
1003
1002
1004 if unknown:
1003 if unknown:
1005 # unknown == True means we walked all dirs under the roots
1004 # unknown == True means we walked all dirs under the roots
1006 # that wasn't ignored, and everything that matched was stat'ed
1005 # that wasn't ignored, and everything that matched was stat'ed
1007 # and is already in results.
1006 # and is already in results.
1008 # The rest must thus be ignored or under a symlink.
1007 # The rest must thus be ignored or under a symlink.
1009 audit_path = pathutil.pathauditor(self._root, cached=True)
1008 audit_path = pathutil.pathauditor(self._root, cached=True)
1010
1009
1011 for nf in iter(visit):
1010 for nf in iter(visit):
1012 # If a stat for the same file was already added with a
1011 # If a stat for the same file was already added with a
1013 # different case, don't add one for this, since that would
1012 # different case, don't add one for this, since that would
1014 # make it appear as if the file exists under both names
1013 # make it appear as if the file exists under both names
1015 # on disk.
1014 # on disk.
1016 if (normalizefile and
1015 if (normalizefile and
1017 normalizefile(nf, True, True) in results):
1016 normalizefile(nf, True, True) in results):
1018 results[nf] = None
1017 results[nf] = None
1019 # Report ignored items in the dmap as long as they are not
1018 # Report ignored items in the dmap as long as they are not
1020 # under a symlink directory.
1019 # under a symlink directory.
1021 elif audit_path.check(nf):
1020 elif audit_path.check(nf):
1022 try:
1021 try:
1023 results[nf] = lstat(join(nf))
1022 results[nf] = lstat(join(nf))
1024 # file was just ignored, no links, and exists
1023 # file was just ignored, no links, and exists
1025 except OSError:
1024 except OSError:
1026 # file doesn't exist
1025 # file doesn't exist
1027 results[nf] = None
1026 results[nf] = None
1028 else:
1027 else:
1029 # It's either missing or under a symlink directory
1028 # It's either missing or under a symlink directory
1030 # which we in this case report as missing
1029 # which we in this case report as missing
1031 results[nf] = None
1030 results[nf] = None
1032 else:
1031 else:
1033 # We may not have walked the full directory tree above,
1032 # We may not have walked the full directory tree above,
1034 # so stat and check everything we missed.
1033 # so stat and check everything we missed.
1035 iv = iter(visit)
1034 iv = iter(visit)
1036 for st in util.statfiles([join(i) for i in visit]):
1035 for st in util.statfiles([join(i) for i in visit]):
1037 results[next(iv)] = st
1036 results[next(iv)] = st
1038 return results
1037 return results
1039
1038
1040 def status(self, match, subrepos, ignored, clean, unknown):
1039 def status(self, match, subrepos, ignored, clean, unknown):
1041 '''Determine the status of the working copy relative to the
1040 '''Determine the status of the working copy relative to the
1042 dirstate and return a pair of (unsure, status), where status is of type
1041 dirstate and return a pair of (unsure, status), where status is of type
1043 scmutil.status and:
1042 scmutil.status and:
1044
1043
1045 unsure:
1044 unsure:
1046 files that might have been modified since the dirstate was
1045 files that might have been modified since the dirstate was
1047 written, but need to be read to be sure (size is the same
1046 written, but need to be read to be sure (size is the same
1048 but mtime differs)
1047 but mtime differs)
1049 status.modified:
1048 status.modified:
1050 files that have definitely been modified since the dirstate
1049 files that have definitely been modified since the dirstate
1051 was written (different size or mode)
1050 was written (different size or mode)
1052 status.clean:
1051 status.clean:
1053 files that have definitely not been modified since the
1052 files that have definitely not been modified since the
1054 dirstate was written
1053 dirstate was written
1055 '''
1054 '''
1056 listignored, listclean, listunknown = ignored, clean, unknown
1055 listignored, listclean, listunknown = ignored, clean, unknown
1057 lookup, modified, added, unknown, ignored = [], [], [], [], []
1056 lookup, modified, added, unknown, ignored = [], [], [], [], []
1058 removed, deleted, clean = [], [], []
1057 removed, deleted, clean = [], [], []
1059
1058
1060 dmap = self._map
1059 dmap = self._map
1061 ladd = lookup.append # aka "unsure"
1060 ladd = lookup.append # aka "unsure"
1062 madd = modified.append
1061 madd = modified.append
1063 aadd = added.append
1062 aadd = added.append
1064 uadd = unknown.append
1063 uadd = unknown.append
1065 iadd = ignored.append
1064 iadd = ignored.append
1066 radd = removed.append
1065 radd = removed.append
1067 dadd = deleted.append
1066 dadd = deleted.append
1068 cadd = clean.append
1067 cadd = clean.append
1069 mexact = match.exact
1068 mexact = match.exact
1070 dirignore = self._dirignore
1069 dirignore = self._dirignore
1071 checkexec = self._checkexec
1070 checkexec = self._checkexec
1072 copymap = self._map.copymap
1071 copymap = self._map.copymap
1073 lastnormaltime = self._lastnormaltime
1072 lastnormaltime = self._lastnormaltime
1074
1073
1075 # We need to do full walks when either
1074 # We need to do full walks when either
1076 # - we're listing all clean files, or
1075 # - we're listing all clean files, or
1077 # - match.traversedir does something, because match.traversedir should
1076 # - match.traversedir does something, because match.traversedir should
1078 # be called for every dir in the working dir
1077 # be called for every dir in the working dir
1079 full = listclean or match.traversedir is not None
1078 full = listclean or match.traversedir is not None
1080 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1079 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1081 full=full).iteritems():
1080 full=full).iteritems():
1082 if fn not in dmap:
1081 if fn not in dmap:
1083 if (listignored or mexact(fn)) and dirignore(fn):
1082 if (listignored or mexact(fn)) and dirignore(fn):
1084 if listignored:
1083 if listignored:
1085 iadd(fn)
1084 iadd(fn)
1086 else:
1085 else:
1087 uadd(fn)
1086 uadd(fn)
1088 continue
1087 continue
1089
1088
1090 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1089 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1091 # written like that for performance reasons. dmap[fn] is not a
1090 # written like that for performance reasons. dmap[fn] is not a
1092 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1091 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1093 # opcode has fast paths when the value to be unpacked is a tuple or
1092 # opcode has fast paths when the value to be unpacked is a tuple or
1094 # a list, but falls back to creating a full-fledged iterator in
1093 # a list, but falls back to creating a full-fledged iterator in
1095 # general. That is much slower than simply accessing and storing the
1094 # general. That is much slower than simply accessing and storing the
1096 # tuple members one by one.
1095 # tuple members one by one.
1097 t = dmap[fn]
1096 t = dmap[fn]
1098 state = t[0]
1097 state = t[0]
1099 mode = t[1]
1098 mode = t[1]
1100 size = t[2]
1099 size = t[2]
1101 time = t[3]
1100 time = t[3]
1102
1101
1103 if not st and state in "nma":
1102 if not st and state in "nma":
1104 dadd(fn)
1103 dadd(fn)
1105 elif state == 'n':
1104 elif state == 'n':
1106 if (size >= 0 and
1105 if (size >= 0 and
1107 ((size != st.st_size and size != st.st_size & _rangemask)
1106 ((size != st.st_size and size != st.st_size & _rangemask)
1108 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1107 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1109 or size == -2 # other parent
1108 or size == -2 # other parent
1110 or fn in copymap):
1109 or fn in copymap):
1111 madd(fn)
1110 madd(fn)
1112 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1111 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1113 ladd(fn)
1112 ladd(fn)
1114 elif st.st_mtime == lastnormaltime:
1113 elif st.st_mtime == lastnormaltime:
1115 # fn may have just been marked as normal and it may have
1114 # fn may have just been marked as normal and it may have
1116 # changed in the same second without changing its size.
1115 # changed in the same second without changing its size.
1117 # This can happen if we quickly do multiple commits.
1116 # This can happen if we quickly do multiple commits.
1118 # Force lookup, so we don't miss such a racy file change.
1117 # Force lookup, so we don't miss such a racy file change.
1119 ladd(fn)
1118 ladd(fn)
1120 elif listclean:
1119 elif listclean:
1121 cadd(fn)
1120 cadd(fn)
1122 elif state == 'm':
1121 elif state == 'm':
1123 madd(fn)
1122 madd(fn)
1124 elif state == 'a':
1123 elif state == 'a':
1125 aadd(fn)
1124 aadd(fn)
1126 elif state == 'r':
1125 elif state == 'r':
1127 radd(fn)
1126 radd(fn)
1128
1127
1129 return (lookup, scmutil.status(modified, added, removed, deleted,
1128 return (lookup, scmutil.status(modified, added, removed, deleted,
1130 unknown, ignored, clean))
1129 unknown, ignored, clean))
1131
1130
1132 def matches(self, match):
1131 def matches(self, match):
1133 '''
1132 '''
1134 return files in the dirstate (in whatever state) filtered by match
1133 return files in the dirstate (in whatever state) filtered by match
1135 '''
1134 '''
1136 dmap = self._map
1135 dmap = self._map
1137 if match.always():
1136 if match.always():
1138 return dmap.keys()
1137 return dmap.keys()
1139 files = match.files()
1138 files = match.files()
1140 if match.isexact():
1139 if match.isexact():
1141 # fast path -- filter the other way around, since typically files is
1140 # fast path -- filter the other way around, since typically files is
1142 # much smaller than dmap
1141 # much smaller than dmap
1143 return [f for f in files if f in dmap]
1142 return [f for f in files if f in dmap]
1144 if match.prefix() and all(fn in dmap for fn in files):
1143 if match.prefix() and all(fn in dmap for fn in files):
1145 # fast path -- all the values are known to be files, so just return
1144 # fast path -- all the values are known to be files, so just return
1146 # that
1145 # that
1147 return list(files)
1146 return list(files)
1148 return [f for f in dmap if match(f)]
1147 return [f for f in dmap if match(f)]
1149
1148
1150 def _actualfilename(self, tr):
1149 def _actualfilename(self, tr):
1151 if tr:
1150 if tr:
1152 return self._pendingfilename
1151 return self._pendingfilename
1153 else:
1152 else:
1154 return self._filename
1153 return self._filename
1155
1154
1156 def savebackup(self, tr, backupname):
1155 def savebackup(self, tr, backupname):
1157 '''Save current dirstate into backup file'''
1156 '''Save current dirstate into backup file'''
1158 filename = self._actualfilename(tr)
1157 filename = self._actualfilename(tr)
1159 assert backupname != filename
1158 assert backupname != filename
1160
1159
1161 # use '_writedirstate' instead of 'write' to write changes certainly,
1160 # use '_writedirstate' instead of 'write' to write changes certainly,
1162 # because the latter omits writing out if transaction is running.
1161 # because the latter omits writing out if transaction is running.
1163 # output file will be used to create backup of dirstate at this point.
1162 # output file will be used to create backup of dirstate at this point.
1164 if self._dirty or not self._opener.exists(filename):
1163 if self._dirty or not self._opener.exists(filename):
1165 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1164 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1166 checkambig=True))
1165 checkambig=True))
1167
1166
1168 if tr:
1167 if tr:
1169 # ensure that subsequent tr.writepending returns True for
1168 # ensure that subsequent tr.writepending returns True for
1170 # changes written out above, even if dirstate is never
1169 # changes written out above, even if dirstate is never
1171 # changed after this
1170 # changed after this
1172 tr.addfilegenerator('dirstate', (self._filename,),
1171 tr.addfilegenerator('dirstate', (self._filename,),
1173 self._writedirstate, location='plain')
1172 self._writedirstate, location='plain')
1174
1173
1175 # ensure that pending file written above is unlinked at
1174 # ensure that pending file written above is unlinked at
1176 # failure, even if tr.writepending isn't invoked until the
1175 # failure, even if tr.writepending isn't invoked until the
1177 # end of this transaction
1176 # end of this transaction
1178 tr.registertmp(filename, location='plain')
1177 tr.registertmp(filename, location='plain')
1179
1178
1180 self._opener.tryunlink(backupname)
1179 self._opener.tryunlink(backupname)
1181 # hardlink backup is okay because _writedirstate is always called
1180 # hardlink backup is okay because _writedirstate is always called
1182 # with an "atomictemp=True" file.
1181 # with an "atomictemp=True" file.
1183 util.copyfile(self._opener.join(filename),
1182 util.copyfile(self._opener.join(filename),
1184 self._opener.join(backupname), hardlink=True)
1183 self._opener.join(backupname), hardlink=True)
1185
1184
1186 def restorebackup(self, tr, backupname):
1185 def restorebackup(self, tr, backupname):
1187 '''Restore dirstate by backup file'''
1186 '''Restore dirstate by backup file'''
1188 # this "invalidate()" prevents "wlock.release()" from writing
1187 # this "invalidate()" prevents "wlock.release()" from writing
1189 # changes of dirstate out after restoring from backup file
1188 # changes of dirstate out after restoring from backup file
1190 self.invalidate()
1189 self.invalidate()
1191 filename = self._actualfilename(tr)
1190 filename = self._actualfilename(tr)
1192 self._opener.rename(backupname, filename, checkambig=True)
1191 self._opener.rename(backupname, filename, checkambig=True)
1193
1192
1194 def clearbackup(self, tr, backupname):
1193 def clearbackup(self, tr, backupname):
1195 '''Clear backup file'''
1194 '''Clear backup file'''
1196 self._opener.unlink(backupname)
1195 self._opener.unlink(backupname)
1197
1196
1198 class dirstatemap(object):
1197 class dirstatemap(object):
1199 def __init__(self, ui, opener, root):
1198 def __init__(self, ui, opener, root):
1200 self._ui = ui
1199 self._ui = ui
1201 self._opener = opener
1200 self._opener = opener
1202 self._root = root
1201 self._root = root
1203 self._filename = 'dirstate'
1202 self._filename = 'dirstate'
1204
1203
1205 self._map = {}
1204 self._map = {}
1206 self.copymap = {}
1205 self.copymap = {}
1207 self._parents = None
1206 self._parents = None
1208 self._dirtyparents = False
1207 self._dirtyparents = False
1209
1208
1210 # for consistent view between _pl() and _read() invocations
1209 # for consistent view between _pl() and _read() invocations
1211 self._pendingmode = None
1210 self._pendingmode = None
1212
1211
1212 def clear(self):
1213 self._map = {}
1214 self.copymap = {}
1215 self.setparents(nullid, nullid)
1216
1213 def iteritems(self):
1217 def iteritems(self):
1214 return self._map.iteritems()
1218 return self._map.iteritems()
1215
1219
1216 def __len__(self):
1220 def __len__(self):
1217 return len(self._map)
1221 return len(self._map)
1218
1222
1219 def __iter__(self):
1223 def __iter__(self):
1220 return iter(self._map)
1224 return iter(self._map)
1221
1225
1222 def get(self, key, default=None):
1226 def get(self, key, default=None):
1223 return self._map.get(key, default)
1227 return self._map.get(key, default)
1224
1228
1225 def __contains__(self, key):
1229 def __contains__(self, key):
1226 return key in self._map
1230 return key in self._map
1227
1231
1228 def __setitem__(self, key, value):
1232 def __setitem__(self, key, value):
1229 self._map[key] = value
1233 self._map[key] = value
1230
1234
1231 def __getitem__(self, key):
1235 def __getitem__(self, key):
1232 return self._map[key]
1236 return self._map[key]
1233
1237
1234 def __delitem__(self, key):
1238 def __delitem__(self, key):
1235 del self._map[key]
1239 del self._map[key]
1236
1240
1237 def keys(self):
1241 def keys(self):
1238 return self._map.keys()
1242 return self._map.keys()
1239
1243
1240 def nonnormalentries(self):
1244 def nonnormalentries(self):
1241 '''Compute the nonnormal dirstate entries from the dmap'''
1245 '''Compute the nonnormal dirstate entries from the dmap'''
1242 try:
1246 try:
1243 return parsers.nonnormalotherparententries(self._map)
1247 return parsers.nonnormalotherparententries(self._map)
1244 except AttributeError:
1248 except AttributeError:
1245 nonnorm = set()
1249 nonnorm = set()
1246 otherparent = set()
1250 otherparent = set()
1247 for fname, e in self._map.iteritems():
1251 for fname, e in self._map.iteritems():
1248 if e[0] != 'n' or e[3] == -1:
1252 if e[0] != 'n' or e[3] == -1:
1249 nonnorm.add(fname)
1253 nonnorm.add(fname)
1250 if e[0] == 'n' and e[2] == -2:
1254 if e[0] == 'n' and e[2] == -2:
1251 otherparent.add(fname)
1255 otherparent.add(fname)
1252 return nonnorm, otherparent
1256 return nonnorm, otherparent
1253
1257
1254 @propertycache
1258 @propertycache
1255 def filefoldmap(self):
1259 def filefoldmap(self):
1256 """Returns a dictionary mapping normalized case paths to their
1260 """Returns a dictionary mapping normalized case paths to their
1257 non-normalized versions.
1261 non-normalized versions.
1258 """
1262 """
1259 try:
1263 try:
1260 makefilefoldmap = parsers.make_file_foldmap
1264 makefilefoldmap = parsers.make_file_foldmap
1261 except AttributeError:
1265 except AttributeError:
1262 pass
1266 pass
1263 else:
1267 else:
1264 return makefilefoldmap(self._map, util.normcasespec,
1268 return makefilefoldmap(self._map, util.normcasespec,
1265 util.normcasefallback)
1269 util.normcasefallback)
1266
1270
1267 f = {}
1271 f = {}
1268 normcase = util.normcase
1272 normcase = util.normcase
1269 for name, s in self._map.iteritems():
1273 for name, s in self._map.iteritems():
1270 if s[0] != 'r':
1274 if s[0] != 'r':
1271 f[normcase(name)] = name
1275 f[normcase(name)] = name
1272 f['.'] = '.' # prevents useless util.fspath() invocation
1276 f['.'] = '.' # prevents useless util.fspath() invocation
1273 return f
1277 return f
1274
1278
1275 @propertycache
1279 @propertycache
1276 def dirs(self):
1280 def dirs(self):
1277 """Returns a set-like object containing all the directories in the
1281 """Returns a set-like object containing all the directories in the
1278 current dirstate.
1282 current dirstate.
1279 """
1283 """
1280 return util.dirs(self._map, 'r')
1284 return util.dirs(self._map, 'r')
1281
1285
1282 def _opendirstatefile(self):
1286 def _opendirstatefile(self):
1283 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1287 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1284 if self._pendingmode is not None and self._pendingmode != mode:
1288 if self._pendingmode is not None and self._pendingmode != mode:
1285 fp.close()
1289 fp.close()
1286 raise error.Abort(_('working directory state may be '
1290 raise error.Abort(_('working directory state may be '
1287 'changed parallelly'))
1291 'changed parallelly'))
1288 self._pendingmode = mode
1292 self._pendingmode = mode
1289 return fp
1293 return fp
1290
1294
1291 def parents(self):
1295 def parents(self):
1292 if not self._parents:
1296 if not self._parents:
1293 try:
1297 try:
1294 fp = self._opendirstatefile()
1298 fp = self._opendirstatefile()
1295 st = fp.read(40)
1299 st = fp.read(40)
1296 fp.close()
1300 fp.close()
1297 except IOError as err:
1301 except IOError as err:
1298 if err.errno != errno.ENOENT:
1302 if err.errno != errno.ENOENT:
1299 raise
1303 raise
1300 # File doesn't exist, so the current state is empty
1304 # File doesn't exist, so the current state is empty
1301 st = ''
1305 st = ''
1302
1306
1303 l = len(st)
1307 l = len(st)
1304 if l == 40:
1308 if l == 40:
1305 self._parents = st[:20], st[20:40]
1309 self._parents = st[:20], st[20:40]
1306 elif l == 0:
1310 elif l == 0:
1307 self._parents = [nullid, nullid]
1311 self._parents = [nullid, nullid]
1308 else:
1312 else:
1309 raise error.Abort(_('working directory state appears '
1313 raise error.Abort(_('working directory state appears '
1310 'damaged!'))
1314 'damaged!'))
1311
1315
1312 return self._parents
1316 return self._parents
1313
1317
1314 def setparents(self, p1, p2):
1318 def setparents(self, p1, p2):
1315 self._parents = (p1, p2)
1319 self._parents = (p1, p2)
1316 self._dirtyparents = True
1320 self._dirtyparents = True
1317
1321
1318 def read(self):
1322 def read(self):
1319 # ignore HG_PENDING because identity is used only for writing
1323 # ignore HG_PENDING because identity is used only for writing
1320 self.identity = util.filestat.frompath(
1324 self.identity = util.filestat.frompath(
1321 self._opener.join(self._filename))
1325 self._opener.join(self._filename))
1322
1326
1323 try:
1327 try:
1324 fp = self._opendirstatefile()
1328 fp = self._opendirstatefile()
1325 try:
1329 try:
1326 st = fp.read()
1330 st = fp.read()
1327 finally:
1331 finally:
1328 fp.close()
1332 fp.close()
1329 except IOError as err:
1333 except IOError as err:
1330 if err.errno != errno.ENOENT:
1334 if err.errno != errno.ENOENT:
1331 raise
1335 raise
1332 return
1336 return
1333 if not st:
1337 if not st:
1334 return
1338 return
1335
1339
1336 if util.safehasattr(parsers, 'dict_new_presized'):
1340 if util.safehasattr(parsers, 'dict_new_presized'):
1337 # Make an estimate of the number of files in the dirstate based on
1341 # Make an estimate of the number of files in the dirstate based on
1338 # its size. From a linear regression on a set of real-world repos,
1342 # its size. From a linear regression on a set of real-world repos,
1339 # all over 10,000 files, the size of a dirstate entry is 85
1343 # all over 10,000 files, the size of a dirstate entry is 85
1340 # bytes. The cost of resizing is significantly higher than the cost
1344 # bytes. The cost of resizing is significantly higher than the cost
1341 # of filling in a larger presized dict, so subtract 20% from the
1345 # of filling in a larger presized dict, so subtract 20% from the
1342 # size.
1346 # size.
1343 #
1347 #
1344 # This heuristic is imperfect in many ways, so in a future dirstate
1348 # This heuristic is imperfect in many ways, so in a future dirstate
1345 # format update it makes sense to just record the number of entries
1349 # format update it makes sense to just record the number of entries
1346 # on write.
1350 # on write.
1347 self._map = parsers.dict_new_presized(len(st) / 71)
1351 self._map = parsers.dict_new_presized(len(st) / 71)
1348
1352
1349 # Python's garbage collector triggers a GC each time a certain number
1353 # Python's garbage collector triggers a GC each time a certain number
1350 # of container objects (the number being defined by
1354 # of container objects (the number being defined by
1351 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1355 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1352 # for each file in the dirstate. The C version then immediately marks
1356 # for each file in the dirstate. The C version then immediately marks
1353 # them as not to be tracked by the collector. However, this has no
1357 # them as not to be tracked by the collector. However, this has no
1354 # effect on when GCs are triggered, only on what objects the GC looks
1358 # effect on when GCs are triggered, only on what objects the GC looks
1355 # into. This means that O(number of files) GCs are unavoidable.
1359 # into. This means that O(number of files) GCs are unavoidable.
1356 # Depending on when in the process's lifetime the dirstate is parsed,
1360 # Depending on when in the process's lifetime the dirstate is parsed,
1357 # this can get very expensive. As a workaround, disable GC while
1361 # this can get very expensive. As a workaround, disable GC while
1358 # parsing the dirstate.
1362 # parsing the dirstate.
1359 #
1363 #
1360 # (we cannot decorate the function directly since it is in a C module)
1364 # (we cannot decorate the function directly since it is in a C module)
1361 parse_dirstate = util.nogc(parsers.parse_dirstate)
1365 parse_dirstate = util.nogc(parsers.parse_dirstate)
1362 p = parse_dirstate(self._map, self.copymap, st)
1366 p = parse_dirstate(self._map, self.copymap, st)
1363 if not self._dirtyparents:
1367 if not self._dirtyparents:
1364 self.setparents(*p)
1368 self.setparents(*p)
1365
1369
1366 def write(self, st, now):
1370 def write(self, st, now):
1367 st.write(parsers.pack_dirstate(self._map, self.copymap,
1371 st.write(parsers.pack_dirstate(self._map, self.copymap,
1368 self.parents(), now))
1372 self.parents(), now))
1369 st.close()
1373 st.close()
1370 self._dirtyparents = False
1374 self._dirtyparents = False
1371 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1375 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1372
1376
1373 @propertycache
1377 @propertycache
1374 def nonnormalset(self):
1378 def nonnormalset(self):
1375 nonnorm, otherparents = self.nonnormalentries()
1379 nonnorm, otherparents = self.nonnormalentries()
1376 self.otherparentset = otherparents
1380 self.otherparentset = otherparents
1377 return nonnorm
1381 return nonnorm
1378
1382
1379 @propertycache
1383 @propertycache
1380 def otherparentset(self):
1384 def otherparentset(self):
1381 nonnorm, otherparents = self.nonnormalentries()
1385 nonnorm, otherparents = self.nonnormalentries()
1382 self.nonnormalset = nonnorm
1386 self.nonnormalset = nonnorm
1383 return otherparents
1387 return otherparents
1384
1388
1385 @propertycache
1389 @propertycache
1386 def identity(self):
1390 def identity(self):
1387 self.read()
1391 self.read()
1388 return self.identity
1392 return self.identity
1389
1393
1390 @propertycache
1394 @propertycache
1391 def dirfoldmap(self):
1395 def dirfoldmap(self):
1392 f = {}
1396 f = {}
1393 normcase = util.normcase
1397 normcase = util.normcase
1394 for name in self.dirs:
1398 for name in self.dirs:
1395 f[normcase(name)] = name
1399 f[normcase(name)] = name
1396 return f
1400 return f
General Comments 0
You need to be logged in to leave comments. Login now