##// END OF EJS Templates
dirstate: clean up when restoring identical backups...
Mark Thomas -
r34941:c2b30348 stable
parent child Browse files
Show More
@@ -1,1420 +1,1424 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 '''Return the dirstate contents as a map from filename to
130 '''Return the dirstate contents as a map from filename to
131 (state, mode, size, time).'''
131 (state, mode, size, time).'''
132 self._map = dirstatemap(self._ui, self._opener, self._root)
132 self._map = dirstatemap(self._ui, self._opener, self._root)
133 return self._map
133 return self._map
134
134
135 @property
135 @property
136 def _sparsematcher(self):
136 def _sparsematcher(self):
137 """The matcher for the sparse checkout.
137 """The matcher for the sparse checkout.
138
138
139 The working directory may not include every file from a manifest. The
139 The working directory may not include every file from a manifest. The
140 matcher obtained by this property will match a path if it is to be
140 matcher obtained by this property will match a path if it is to be
141 included in the working directory.
141 included in the working directory.
142 """
142 """
143 # TODO there is potential to cache this property. For now, the matcher
143 # TODO there is potential to cache this property. For now, the matcher
144 # is resolved on every access. (But the called function does use a
144 # is resolved on every access. (But the called function does use a
145 # cache to keep the lookup fast.)
145 # cache to keep the lookup fast.)
146 return self._sparsematchfn()
146 return self._sparsematchfn()
147
147
148 @repocache('branch')
148 @repocache('branch')
149 def _branch(self):
149 def _branch(self):
150 try:
150 try:
151 return self._opener.read("branch").strip() or "default"
151 return self._opener.read("branch").strip() or "default"
152 except IOError as inst:
152 except IOError as inst:
153 if inst.errno != errno.ENOENT:
153 if inst.errno != errno.ENOENT:
154 raise
154 raise
155 return "default"
155 return "default"
156
156
157 @property
157 @property
158 def _pl(self):
158 def _pl(self):
159 return self._map.parents()
159 return self._map.parents()
160
160
161 def dirs(self):
161 def dirs(self):
162 return self._map.dirs
162 return self._map.dirs
163
163
164 @rootcache('.hgignore')
164 @rootcache('.hgignore')
165 def _ignore(self):
165 def _ignore(self):
166 files = self._ignorefiles()
166 files = self._ignorefiles()
167 if not files:
167 if not files:
168 return matchmod.never(self._root, '')
168 return matchmod.never(self._root, '')
169
169
170 pats = ['include:%s' % f for f in files]
170 pats = ['include:%s' % f for f in files]
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
172
172
173 @propertycache
173 @propertycache
174 def _slash(self):
174 def _slash(self):
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
176
176
177 @propertycache
177 @propertycache
178 def _checklink(self):
178 def _checklink(self):
179 return util.checklink(self._root)
179 return util.checklink(self._root)
180
180
181 @propertycache
181 @propertycache
182 def _checkexec(self):
182 def _checkexec(self):
183 return util.checkexec(self._root)
183 return util.checkexec(self._root)
184
184
185 @propertycache
185 @propertycache
186 def _checkcase(self):
186 def _checkcase(self):
187 return not util.fscasesensitive(self._join('.hg'))
187 return not util.fscasesensitive(self._join('.hg'))
188
188
189 def _join(self, f):
189 def _join(self, f):
190 # much faster than os.path.join()
190 # much faster than os.path.join()
191 # it's safe because f is always a relative path
191 # it's safe because f is always a relative path
192 return self._rootdir + f
192 return self._rootdir + f
193
193
194 def flagfunc(self, buildfallback):
194 def flagfunc(self, buildfallback):
195 if self._checklink and self._checkexec:
195 if self._checklink and self._checkexec:
196 def f(x):
196 def f(x):
197 try:
197 try:
198 st = os.lstat(self._join(x))
198 st = os.lstat(self._join(x))
199 if util.statislink(st):
199 if util.statislink(st):
200 return 'l'
200 return 'l'
201 if util.statisexec(st):
201 if util.statisexec(st):
202 return 'x'
202 return 'x'
203 except OSError:
203 except OSError:
204 pass
204 pass
205 return ''
205 return ''
206 return f
206 return f
207
207
208 fallback = buildfallback()
208 fallback = buildfallback()
209 if self._checklink:
209 if self._checklink:
210 def f(x):
210 def f(x):
211 if os.path.islink(self._join(x)):
211 if os.path.islink(self._join(x)):
212 return 'l'
212 return 'l'
213 if 'x' in fallback(x):
213 if 'x' in fallback(x):
214 return 'x'
214 return 'x'
215 return ''
215 return ''
216 return f
216 return f
217 if self._checkexec:
217 if self._checkexec:
218 def f(x):
218 def f(x):
219 if 'l' in fallback(x):
219 if 'l' in fallback(x):
220 return 'l'
220 return 'l'
221 if util.isexec(self._join(x)):
221 if util.isexec(self._join(x)):
222 return 'x'
222 return 'x'
223 return ''
223 return ''
224 return f
224 return f
225 else:
225 else:
226 return fallback
226 return fallback
227
227
228 @propertycache
228 @propertycache
229 def _cwd(self):
229 def _cwd(self):
230 # internal config: ui.forcecwd
230 # internal config: ui.forcecwd
231 forcecwd = self._ui.config('ui', 'forcecwd')
231 forcecwd = self._ui.config('ui', 'forcecwd')
232 if forcecwd:
232 if forcecwd:
233 return forcecwd
233 return forcecwd
234 return pycompat.getcwd()
234 return pycompat.getcwd()
235
235
236 def getcwd(self):
236 def getcwd(self):
237 '''Return the path from which a canonical path is calculated.
237 '''Return the path from which a canonical path is calculated.
238
238
239 This path should be used to resolve file patterns or to convert
239 This path should be used to resolve file patterns or to convert
240 canonical paths back to file paths for display. It shouldn't be
240 canonical paths back to file paths for display. It shouldn't be
241 used to get real file paths. Use vfs functions instead.
241 used to get real file paths. Use vfs functions instead.
242 '''
242 '''
243 cwd = self._cwd
243 cwd = self._cwd
244 if cwd == self._root:
244 if cwd == self._root:
245 return ''
245 return ''
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
247 rootsep = self._root
247 rootsep = self._root
248 if not util.endswithsep(rootsep):
248 if not util.endswithsep(rootsep):
249 rootsep += pycompat.ossep
249 rootsep += pycompat.ossep
250 if cwd.startswith(rootsep):
250 if cwd.startswith(rootsep):
251 return cwd[len(rootsep):]
251 return cwd[len(rootsep):]
252 else:
252 else:
253 # we're outside the repo. return an absolute path.
253 # we're outside the repo. return an absolute path.
254 return cwd
254 return cwd
255
255
256 def pathto(self, f, cwd=None):
256 def pathto(self, f, cwd=None):
257 if cwd is None:
257 if cwd is None:
258 cwd = self.getcwd()
258 cwd = self.getcwd()
259 path = util.pathto(self._root, cwd, f)
259 path = util.pathto(self._root, cwd, f)
260 if self._slash:
260 if self._slash:
261 return util.pconvert(path)
261 return util.pconvert(path)
262 return path
262 return path
263
263
264 def __getitem__(self, key):
264 def __getitem__(self, key):
265 '''Return the current state of key (a filename) in the dirstate.
265 '''Return the current state of key (a filename) in the dirstate.
266
266
267 States are:
267 States are:
268 n normal
268 n normal
269 m needs merging
269 m needs merging
270 r marked for removal
270 r marked for removal
271 a marked for addition
271 a marked for addition
272 ? not tracked
272 ? not tracked
273 '''
273 '''
274 return self._map.get(key, ("?",))[0]
274 return self._map.get(key, ("?",))[0]
275
275
276 def __contains__(self, key):
276 def __contains__(self, key):
277 return key in self._map
277 return key in self._map
278
278
279 def __iter__(self):
279 def __iter__(self):
280 return iter(sorted(self._map))
280 return iter(sorted(self._map))
281
281
282 def items(self):
282 def items(self):
283 return self._map.iteritems()
283 return self._map.iteritems()
284
284
285 iteritems = items
285 iteritems = items
286
286
287 def parents(self):
287 def parents(self):
288 return [self._validate(p) for p in self._pl]
288 return [self._validate(p) for p in self._pl]
289
289
290 def p1(self):
290 def p1(self):
291 return self._validate(self._pl[0])
291 return self._validate(self._pl[0])
292
292
293 def p2(self):
293 def p2(self):
294 return self._validate(self._pl[1])
294 return self._validate(self._pl[1])
295
295
296 def branch(self):
296 def branch(self):
297 return encoding.tolocal(self._branch)
297 return encoding.tolocal(self._branch)
298
298
299 def setparents(self, p1, p2=nullid):
299 def setparents(self, p1, p2=nullid):
300 """Set dirstate parents to p1 and p2.
300 """Set dirstate parents to p1 and p2.
301
301
302 When moving from two parents to one, 'm' merged entries a
302 When moving from two parents to one, 'm' merged entries a
303 adjusted to normal and previous copy records discarded and
303 adjusted to normal and previous copy records discarded and
304 returned by the call.
304 returned by the call.
305
305
306 See localrepo.setparents()
306 See localrepo.setparents()
307 """
307 """
308 if self._parentwriters == 0:
308 if self._parentwriters == 0:
309 raise ValueError("cannot set dirstate parent without "
309 raise ValueError("cannot set dirstate parent without "
310 "calling dirstate.beginparentchange")
310 "calling dirstate.beginparentchange")
311
311
312 self._dirty = True
312 self._dirty = True
313 oldp2 = self._pl[1]
313 oldp2 = self._pl[1]
314 if self._origpl is None:
314 if self._origpl is None:
315 self._origpl = self._pl
315 self._origpl = self._pl
316 self._map.setparents(p1, p2)
316 self._map.setparents(p1, p2)
317 copies = {}
317 copies = {}
318 if oldp2 != nullid and p2 == nullid:
318 if oldp2 != nullid and p2 == nullid:
319 candidatefiles = self._map.nonnormalset.union(
319 candidatefiles = self._map.nonnormalset.union(
320 self._map.otherparentset)
320 self._map.otherparentset)
321 for f in candidatefiles:
321 for f in candidatefiles:
322 s = self._map.get(f)
322 s = self._map.get(f)
323 if s is None:
323 if s is None:
324 continue
324 continue
325
325
326 # Discard 'm' markers when moving away from a merge state
326 # Discard 'm' markers when moving away from a merge state
327 if s[0] == 'm':
327 if s[0] == 'm':
328 source = self._map.copymap.get(f)
328 source = self._map.copymap.get(f)
329 if source:
329 if source:
330 copies[f] = source
330 copies[f] = source
331 self.normallookup(f)
331 self.normallookup(f)
332 # Also fix up otherparent markers
332 # Also fix up otherparent markers
333 elif s[0] == 'n' and s[2] == -2:
333 elif s[0] == 'n' and s[2] == -2:
334 source = self._map.copymap.get(f)
334 source = self._map.copymap.get(f)
335 if source:
335 if source:
336 copies[f] = source
336 copies[f] = source
337 self.add(f)
337 self.add(f)
338 return copies
338 return copies
339
339
340 def setbranch(self, branch):
340 def setbranch(self, branch):
341 self._branch = encoding.fromlocal(branch)
341 self._branch = encoding.fromlocal(branch)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
343 try:
343 try:
344 f.write(self._branch + '\n')
344 f.write(self._branch + '\n')
345 f.close()
345 f.close()
346
346
347 # make sure filecache has the correct stat info for _branch after
347 # make sure filecache has the correct stat info for _branch after
348 # replacing the underlying file
348 # replacing the underlying file
349 ce = self._filecache['_branch']
349 ce = self._filecache['_branch']
350 if ce:
350 if ce:
351 ce.refresh()
351 ce.refresh()
352 except: # re-raises
352 except: # re-raises
353 f.discard()
353 f.discard()
354 raise
354 raise
355
355
356 def invalidate(self):
356 def invalidate(self):
357 '''Causes the next access to reread the dirstate.
357 '''Causes the next access to reread the dirstate.
358
358
359 This is different from localrepo.invalidatedirstate() because it always
359 This is different from localrepo.invalidatedirstate() because it always
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
361 check whether the dirstate has changed before rereading it.'''
361 check whether the dirstate has changed before rereading it.'''
362
362
363 for a in ("_map", "_branch", "_ignore"):
363 for a in ("_map", "_branch", "_ignore"):
364 if a in self.__dict__:
364 if a in self.__dict__:
365 delattr(self, a)
365 delattr(self, a)
366 self._lastnormaltime = 0
366 self._lastnormaltime = 0
367 self._dirty = False
367 self._dirty = False
368 self._updatedfiles.clear()
368 self._updatedfiles.clear()
369 self._parentwriters = 0
369 self._parentwriters = 0
370 self._origpl = None
370 self._origpl = None
371
371
372 def copy(self, source, dest):
372 def copy(self, source, dest):
373 """Mark dest as a copy of source. Unmark dest if source is None."""
373 """Mark dest as a copy of source. Unmark dest if source is None."""
374 if source == dest:
374 if source == dest:
375 return
375 return
376 self._dirty = True
376 self._dirty = True
377 if source is not None:
377 if source is not None:
378 self._map.copymap[dest] = source
378 self._map.copymap[dest] = source
379 self._updatedfiles.add(source)
379 self._updatedfiles.add(source)
380 self._updatedfiles.add(dest)
380 self._updatedfiles.add(dest)
381 elif self._map.copymap.pop(dest, None):
381 elif self._map.copymap.pop(dest, None):
382 self._updatedfiles.add(dest)
382 self._updatedfiles.add(dest)
383
383
384 def copied(self, file):
384 def copied(self, file):
385 return self._map.copymap.get(file, None)
385 return self._map.copymap.get(file, None)
386
386
387 def copies(self):
387 def copies(self):
388 return self._map.copymap
388 return self._map.copymap
389
389
390 def _droppath(self, f):
390 def _droppath(self, f):
391 if self[f] not in "?r" and "dirs" in self._map.__dict__:
391 if self[f] not in "?r" and "dirs" in self._map.__dict__:
392 self._map.dirs.delpath(f)
392 self._map.dirs.delpath(f)
393
393
394 if "filefoldmap" in self._map.__dict__:
394 if "filefoldmap" in self._map.__dict__:
395 normed = util.normcase(f)
395 normed = util.normcase(f)
396 if normed in self._map.filefoldmap:
396 if normed in self._map.filefoldmap:
397 del self._map.filefoldmap[normed]
397 del self._map.filefoldmap[normed]
398
398
399 self._updatedfiles.add(f)
399 self._updatedfiles.add(f)
400
400
401 def _addpath(self, f, state, mode, size, mtime):
401 def _addpath(self, f, state, mode, size, mtime):
402 oldstate = self[f]
402 oldstate = self[f]
403 if state == 'a' or oldstate == 'r':
403 if state == 'a' or oldstate == 'r':
404 scmutil.checkfilename(f)
404 scmutil.checkfilename(f)
405 if f in self._map.dirs:
405 if f in self._map.dirs:
406 raise error.Abort(_('directory %r already in dirstate') % f)
406 raise error.Abort(_('directory %r already in dirstate') % f)
407 # shadows
407 # shadows
408 for d in util.finddirs(f):
408 for d in util.finddirs(f):
409 if d in self._map.dirs:
409 if d in self._map.dirs:
410 break
410 break
411 entry = self._map.get(d)
411 entry = self._map.get(d)
412 if entry is not None and entry[0] != 'r':
412 if entry is not None and entry[0] != 'r':
413 raise error.Abort(
413 raise error.Abort(
414 _('file %r in dirstate clashes with %r') % (d, f))
414 _('file %r in dirstate clashes with %r') % (d, f))
415 if oldstate in "?r" and "dirs" in self._map.__dict__:
415 if oldstate in "?r" and "dirs" in self._map.__dict__:
416 self._map.dirs.addpath(f)
416 self._map.dirs.addpath(f)
417 self._dirty = True
417 self._dirty = True
418 self._updatedfiles.add(f)
418 self._updatedfiles.add(f)
419 self._map[f] = dirstatetuple(state, mode, size, mtime)
419 self._map[f] = dirstatetuple(state, mode, size, mtime)
420 if state != 'n' or mtime == -1:
420 if state != 'n' or mtime == -1:
421 self._map.nonnormalset.add(f)
421 self._map.nonnormalset.add(f)
422 if size == -2:
422 if size == -2:
423 self._map.otherparentset.add(f)
423 self._map.otherparentset.add(f)
424
424
425 def normal(self, f):
425 def normal(self, f):
426 '''Mark a file normal and clean.'''
426 '''Mark a file normal and clean.'''
427 s = os.lstat(self._join(f))
427 s = os.lstat(self._join(f))
428 mtime = s.st_mtime
428 mtime = s.st_mtime
429 self._addpath(f, 'n', s.st_mode,
429 self._addpath(f, 'n', s.st_mode,
430 s.st_size & _rangemask, mtime & _rangemask)
430 s.st_size & _rangemask, mtime & _rangemask)
431 self._map.copymap.pop(f, None)
431 self._map.copymap.pop(f, None)
432 if f in self._map.nonnormalset:
432 if f in self._map.nonnormalset:
433 self._map.nonnormalset.remove(f)
433 self._map.nonnormalset.remove(f)
434 if mtime > self._lastnormaltime:
434 if mtime > self._lastnormaltime:
435 # Remember the most recent modification timeslot for status(),
435 # Remember the most recent modification timeslot for status(),
436 # to make sure we won't miss future size-preserving file content
436 # to make sure we won't miss future size-preserving file content
437 # modifications that happen within the same timeslot.
437 # modifications that happen within the same timeslot.
438 self._lastnormaltime = mtime
438 self._lastnormaltime = mtime
439
439
440 def normallookup(self, f):
440 def normallookup(self, f):
441 '''Mark a file normal, but possibly dirty.'''
441 '''Mark a file normal, but possibly dirty.'''
442 if self._pl[1] != nullid:
442 if self._pl[1] != nullid:
443 # if there is a merge going on and the file was either
443 # if there is a merge going on and the file was either
444 # in state 'm' (-1) or coming from other parent (-2) before
444 # in state 'm' (-1) or coming from other parent (-2) before
445 # being removed, restore that state.
445 # being removed, restore that state.
446 entry = self._map.get(f)
446 entry = self._map.get(f)
447 if entry is not None:
447 if entry is not None:
448 if entry[0] == 'r' and entry[2] in (-1, -2):
448 if entry[0] == 'r' and entry[2] in (-1, -2):
449 source = self._map.copymap.get(f)
449 source = self._map.copymap.get(f)
450 if entry[2] == -1:
450 if entry[2] == -1:
451 self.merge(f)
451 self.merge(f)
452 elif entry[2] == -2:
452 elif entry[2] == -2:
453 self.otherparent(f)
453 self.otherparent(f)
454 if source:
454 if source:
455 self.copy(source, f)
455 self.copy(source, f)
456 return
456 return
457 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
457 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
458 return
458 return
459 self._addpath(f, 'n', 0, -1, -1)
459 self._addpath(f, 'n', 0, -1, -1)
460 self._map.copymap.pop(f, None)
460 self._map.copymap.pop(f, None)
461 if f in self._map.nonnormalset:
461 if f in self._map.nonnormalset:
462 self._map.nonnormalset.remove(f)
462 self._map.nonnormalset.remove(f)
463
463
464 def otherparent(self, f):
464 def otherparent(self, f):
465 '''Mark as coming from the other parent, always dirty.'''
465 '''Mark as coming from the other parent, always dirty.'''
466 if self._pl[1] == nullid:
466 if self._pl[1] == nullid:
467 raise error.Abort(_("setting %r to other parent "
467 raise error.Abort(_("setting %r to other parent "
468 "only allowed in merges") % f)
468 "only allowed in merges") % f)
469 if f in self and self[f] == 'n':
469 if f in self and self[f] == 'n':
470 # merge-like
470 # merge-like
471 self._addpath(f, 'm', 0, -2, -1)
471 self._addpath(f, 'm', 0, -2, -1)
472 else:
472 else:
473 # add-like
473 # add-like
474 self._addpath(f, 'n', 0, -2, -1)
474 self._addpath(f, 'n', 0, -2, -1)
475 self._map.copymap.pop(f, None)
475 self._map.copymap.pop(f, None)
476
476
477 def add(self, f):
477 def add(self, f):
478 '''Mark a file added.'''
478 '''Mark a file added.'''
479 self._addpath(f, 'a', 0, -1, -1)
479 self._addpath(f, 'a', 0, -1, -1)
480 self._map.copymap.pop(f, None)
480 self._map.copymap.pop(f, None)
481
481
482 def remove(self, f):
482 def remove(self, f):
483 '''Mark a file removed.'''
483 '''Mark a file removed.'''
484 self._dirty = True
484 self._dirty = True
485 self._droppath(f)
485 self._droppath(f)
486 size = 0
486 size = 0
487 if self._pl[1] != nullid:
487 if self._pl[1] != nullid:
488 entry = self._map.get(f)
488 entry = self._map.get(f)
489 if entry is not None:
489 if entry is not None:
490 # backup the previous state
490 # backup the previous state
491 if entry[0] == 'm': # merge
491 if entry[0] == 'm': # merge
492 size = -1
492 size = -1
493 elif entry[0] == 'n' and entry[2] == -2: # other parent
493 elif entry[0] == 'n' and entry[2] == -2: # other parent
494 size = -2
494 size = -2
495 self._map.otherparentset.add(f)
495 self._map.otherparentset.add(f)
496 self._map[f] = dirstatetuple('r', 0, size, 0)
496 self._map[f] = dirstatetuple('r', 0, size, 0)
497 self._map.nonnormalset.add(f)
497 self._map.nonnormalset.add(f)
498 if size == 0:
498 if size == 0:
499 self._map.copymap.pop(f, None)
499 self._map.copymap.pop(f, None)
500
500
501 def merge(self, f):
501 def merge(self, f):
502 '''Mark a file merged.'''
502 '''Mark a file merged.'''
503 if self._pl[1] == nullid:
503 if self._pl[1] == nullid:
504 return self.normallookup(f)
504 return self.normallookup(f)
505 return self.otherparent(f)
505 return self.otherparent(f)
506
506
507 def drop(self, f):
507 def drop(self, f):
508 '''Drop a file from the dirstate'''
508 '''Drop a file from the dirstate'''
509 if f in self._map:
509 if f in self._map:
510 self._dirty = True
510 self._dirty = True
511 self._droppath(f)
511 self._droppath(f)
512 del self._map[f]
512 del self._map[f]
513 if f in self._map.nonnormalset:
513 if f in self._map.nonnormalset:
514 self._map.nonnormalset.remove(f)
514 self._map.nonnormalset.remove(f)
515 self._map.copymap.pop(f, None)
515 self._map.copymap.pop(f, None)
516
516
517 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
517 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
518 if exists is None:
518 if exists is None:
519 exists = os.path.lexists(os.path.join(self._root, path))
519 exists = os.path.lexists(os.path.join(self._root, path))
520 if not exists:
520 if not exists:
521 # Maybe a path component exists
521 # Maybe a path component exists
522 if not ignoremissing and '/' in path:
522 if not ignoremissing and '/' in path:
523 d, f = path.rsplit('/', 1)
523 d, f = path.rsplit('/', 1)
524 d = self._normalize(d, False, ignoremissing, None)
524 d = self._normalize(d, False, ignoremissing, None)
525 folded = d + "/" + f
525 folded = d + "/" + f
526 else:
526 else:
527 # No path components, preserve original case
527 # No path components, preserve original case
528 folded = path
528 folded = path
529 else:
529 else:
530 # recursively normalize leading directory components
530 # recursively normalize leading directory components
531 # against dirstate
531 # against dirstate
532 if '/' in normed:
532 if '/' in normed:
533 d, f = normed.rsplit('/', 1)
533 d, f = normed.rsplit('/', 1)
534 d = self._normalize(d, False, ignoremissing, True)
534 d = self._normalize(d, False, ignoremissing, True)
535 r = self._root + "/" + d
535 r = self._root + "/" + d
536 folded = d + "/" + util.fspath(f, r)
536 folded = d + "/" + util.fspath(f, r)
537 else:
537 else:
538 folded = util.fspath(normed, self._root)
538 folded = util.fspath(normed, self._root)
539 storemap[normed] = folded
539 storemap[normed] = folded
540
540
541 return folded
541 return folded
542
542
543 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
543 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
544 normed = util.normcase(path)
544 normed = util.normcase(path)
545 folded = self._map.filefoldmap.get(normed, None)
545 folded = self._map.filefoldmap.get(normed, None)
546 if folded is None:
546 if folded is None:
547 if isknown:
547 if isknown:
548 folded = path
548 folded = path
549 else:
549 else:
550 folded = self._discoverpath(path, normed, ignoremissing, exists,
550 folded = self._discoverpath(path, normed, ignoremissing, exists,
551 self._map.filefoldmap)
551 self._map.filefoldmap)
552 return folded
552 return folded
553
553
554 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
554 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
555 normed = util.normcase(path)
555 normed = util.normcase(path)
556 folded = self._map.filefoldmap.get(normed, None)
556 folded = self._map.filefoldmap.get(normed, None)
557 if folded is None:
557 if folded is None:
558 folded = self._map.dirfoldmap.get(normed, None)
558 folded = self._map.dirfoldmap.get(normed, None)
559 if folded is None:
559 if folded is None:
560 if isknown:
560 if isknown:
561 folded = path
561 folded = path
562 else:
562 else:
563 # store discovered result in dirfoldmap so that future
563 # store discovered result in dirfoldmap so that future
564 # normalizefile calls don't start matching directories
564 # normalizefile calls don't start matching directories
565 folded = self._discoverpath(path, normed, ignoremissing, exists,
565 folded = self._discoverpath(path, normed, ignoremissing, exists,
566 self._map.dirfoldmap)
566 self._map.dirfoldmap)
567 return folded
567 return folded
568
568
569 def normalize(self, path, isknown=False, ignoremissing=False):
569 def normalize(self, path, isknown=False, ignoremissing=False):
570 '''
570 '''
571 normalize the case of a pathname when on a casefolding filesystem
571 normalize the case of a pathname when on a casefolding filesystem
572
572
573 isknown specifies whether the filename came from walking the
573 isknown specifies whether the filename came from walking the
574 disk, to avoid extra filesystem access.
574 disk, to avoid extra filesystem access.
575
575
576 If ignoremissing is True, missing path are returned
576 If ignoremissing is True, missing path are returned
577 unchanged. Otherwise, we try harder to normalize possibly
577 unchanged. Otherwise, we try harder to normalize possibly
578 existing path components.
578 existing path components.
579
579
580 The normalized case is determined based on the following precedence:
580 The normalized case is determined based on the following precedence:
581
581
582 - version of name already stored in the dirstate
582 - version of name already stored in the dirstate
583 - version of name stored on disk
583 - version of name stored on disk
584 - version provided via command arguments
584 - version provided via command arguments
585 '''
585 '''
586
586
587 if self._checkcase:
587 if self._checkcase:
588 return self._normalize(path, isknown, ignoremissing)
588 return self._normalize(path, isknown, ignoremissing)
589 return path
589 return path
590
590
591 def clear(self):
591 def clear(self):
592 self._map.clear()
592 self._map.clear()
593 self._lastnormaltime = 0
593 self._lastnormaltime = 0
594 self._updatedfiles.clear()
594 self._updatedfiles.clear()
595 self._dirty = True
595 self._dirty = True
596
596
597 def rebuild(self, parent, allfiles, changedfiles=None):
597 def rebuild(self, parent, allfiles, changedfiles=None):
598 if changedfiles is None:
598 if changedfiles is None:
599 # Rebuild entire dirstate
599 # Rebuild entire dirstate
600 changedfiles = allfiles
600 changedfiles = allfiles
601 lastnormaltime = self._lastnormaltime
601 lastnormaltime = self._lastnormaltime
602 self.clear()
602 self.clear()
603 self._lastnormaltime = lastnormaltime
603 self._lastnormaltime = lastnormaltime
604
604
605 if self._origpl is None:
605 if self._origpl is None:
606 self._origpl = self._pl
606 self._origpl = self._pl
607 self._map.setparents(parent, nullid)
607 self._map.setparents(parent, nullid)
608 for f in changedfiles:
608 for f in changedfiles:
609 if f in allfiles:
609 if f in allfiles:
610 self.normallookup(f)
610 self.normallookup(f)
611 else:
611 else:
612 self.drop(f)
612 self.drop(f)
613
613
614 self._dirty = True
614 self._dirty = True
615
615
616 def identity(self):
616 def identity(self):
617 '''Return identity of dirstate itself to detect changing in storage
617 '''Return identity of dirstate itself to detect changing in storage
618
618
619 If identity of previous dirstate is equal to this, writing
619 If identity of previous dirstate is equal to this, writing
620 changes based on the former dirstate out can keep consistency.
620 changes based on the former dirstate out can keep consistency.
621 '''
621 '''
622 return self._map.identity
622 return self._map.identity
623
623
624 def write(self, tr):
624 def write(self, tr):
625 if not self._dirty:
625 if not self._dirty:
626 return
626 return
627
627
628 filename = self._filename
628 filename = self._filename
629 if tr:
629 if tr:
630 # 'dirstate.write()' is not only for writing in-memory
630 # 'dirstate.write()' is not only for writing in-memory
631 # changes out, but also for dropping ambiguous timestamp.
631 # changes out, but also for dropping ambiguous timestamp.
632 # delayed writing re-raise "ambiguous timestamp issue".
632 # delayed writing re-raise "ambiguous timestamp issue".
633 # See also the wiki page below for detail:
633 # See also the wiki page below for detail:
634 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
634 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
635
635
636 # emulate dropping timestamp in 'parsers.pack_dirstate'
636 # emulate dropping timestamp in 'parsers.pack_dirstate'
637 now = _getfsnow(self._opener)
637 now = _getfsnow(self._opener)
638 dmap = self._map
638 dmap = self._map
639 for f in self._updatedfiles:
639 for f in self._updatedfiles:
640 e = dmap.get(f)
640 e = dmap.get(f)
641 if e is not None and e[0] == 'n' and e[3] == now:
641 if e is not None and e[0] == 'n' and e[3] == now:
642 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
642 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
643 self._map.nonnormalset.add(f)
643 self._map.nonnormalset.add(f)
644
644
645 # emulate that all 'dirstate.normal' results are written out
645 # emulate that all 'dirstate.normal' results are written out
646 self._lastnormaltime = 0
646 self._lastnormaltime = 0
647 self._updatedfiles.clear()
647 self._updatedfiles.clear()
648
648
649 # delay writing in-memory changes out
649 # delay writing in-memory changes out
650 tr.addfilegenerator('dirstate', (self._filename,),
650 tr.addfilegenerator('dirstate', (self._filename,),
651 self._writedirstate, location='plain')
651 self._writedirstate, location='plain')
652 return
652 return
653
653
654 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
654 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
655 self._writedirstate(st)
655 self._writedirstate(st)
656
656
657 def addparentchangecallback(self, category, callback):
657 def addparentchangecallback(self, category, callback):
658 """add a callback to be called when the wd parents are changed
658 """add a callback to be called when the wd parents are changed
659
659
660 Callback will be called with the following arguments:
660 Callback will be called with the following arguments:
661 dirstate, (oldp1, oldp2), (newp1, newp2)
661 dirstate, (oldp1, oldp2), (newp1, newp2)
662
662
663 Category is a unique identifier to allow overwriting an old callback
663 Category is a unique identifier to allow overwriting an old callback
664 with a newer callback.
664 with a newer callback.
665 """
665 """
666 self._plchangecallbacks[category] = callback
666 self._plchangecallbacks[category] = callback
667
667
668 def _writedirstate(self, st):
668 def _writedirstate(self, st):
669 # notify callbacks about parents change
669 # notify callbacks about parents change
670 if self._origpl is not None and self._origpl != self._pl:
670 if self._origpl is not None and self._origpl != self._pl:
671 for c, callback in sorted(self._plchangecallbacks.iteritems()):
671 for c, callback in sorted(self._plchangecallbacks.iteritems()):
672 callback(self, self._origpl, self._pl)
672 callback(self, self._origpl, self._pl)
673 self._origpl = None
673 self._origpl = None
674 # use the modification time of the newly created temporary file as the
674 # use the modification time of the newly created temporary file as the
675 # filesystem's notion of 'now'
675 # filesystem's notion of 'now'
676 now = util.fstat(st).st_mtime & _rangemask
676 now = util.fstat(st).st_mtime & _rangemask
677
677
678 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
678 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
679 # timestamp of each entries in dirstate, because of 'now > mtime'
679 # timestamp of each entries in dirstate, because of 'now > mtime'
680 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
680 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
681 if delaywrite > 0:
681 if delaywrite > 0:
682 # do we have any files to delay for?
682 # do we have any files to delay for?
683 for f, e in self._map.iteritems():
683 for f, e in self._map.iteritems():
684 if e[0] == 'n' and e[3] == now:
684 if e[0] == 'n' and e[3] == now:
685 import time # to avoid useless import
685 import time # to avoid useless import
686 # rather than sleep n seconds, sleep until the next
686 # rather than sleep n seconds, sleep until the next
687 # multiple of n seconds
687 # multiple of n seconds
688 clock = time.time()
688 clock = time.time()
689 start = int(clock) - (int(clock) % delaywrite)
689 start = int(clock) - (int(clock) % delaywrite)
690 end = start + delaywrite
690 end = start + delaywrite
691 time.sleep(end - clock)
691 time.sleep(end - clock)
692 now = end # trust our estimate that the end is near now
692 now = end # trust our estimate that the end is near now
693 break
693 break
694
694
695 self._map.write(st, now)
695 self._map.write(st, now)
696 self._lastnormaltime = 0
696 self._lastnormaltime = 0
697 self._dirty = False
697 self._dirty = False
698
698
699 def _dirignore(self, f):
699 def _dirignore(self, f):
700 if f == '.':
700 if f == '.':
701 return False
701 return False
702 if self._ignore(f):
702 if self._ignore(f):
703 return True
703 return True
704 for p in util.finddirs(f):
704 for p in util.finddirs(f):
705 if self._ignore(p):
705 if self._ignore(p):
706 return True
706 return True
707 return False
707 return False
708
708
709 def _ignorefiles(self):
709 def _ignorefiles(self):
710 files = []
710 files = []
711 if os.path.exists(self._join('.hgignore')):
711 if os.path.exists(self._join('.hgignore')):
712 files.append(self._join('.hgignore'))
712 files.append(self._join('.hgignore'))
713 for name, path in self._ui.configitems("ui"):
713 for name, path in self._ui.configitems("ui"):
714 if name == 'ignore' or name.startswith('ignore.'):
714 if name == 'ignore' or name.startswith('ignore.'):
715 # we need to use os.path.join here rather than self._join
715 # we need to use os.path.join here rather than self._join
716 # because path is arbitrary and user-specified
716 # because path is arbitrary and user-specified
717 files.append(os.path.join(self._rootdir, util.expandpath(path)))
717 files.append(os.path.join(self._rootdir, util.expandpath(path)))
718 return files
718 return files
719
719
720 def _ignorefileandline(self, f):
720 def _ignorefileandline(self, f):
721 files = collections.deque(self._ignorefiles())
721 files = collections.deque(self._ignorefiles())
722 visited = set()
722 visited = set()
723 while files:
723 while files:
724 i = files.popleft()
724 i = files.popleft()
725 patterns = matchmod.readpatternfile(i, self._ui.warn,
725 patterns = matchmod.readpatternfile(i, self._ui.warn,
726 sourceinfo=True)
726 sourceinfo=True)
727 for pattern, lineno, line in patterns:
727 for pattern, lineno, line in patterns:
728 kind, p = matchmod._patsplit(pattern, 'glob')
728 kind, p = matchmod._patsplit(pattern, 'glob')
729 if kind == "subinclude":
729 if kind == "subinclude":
730 if p not in visited:
730 if p not in visited:
731 files.append(p)
731 files.append(p)
732 continue
732 continue
733 m = matchmod.match(self._root, '', [], [pattern],
733 m = matchmod.match(self._root, '', [], [pattern],
734 warn=self._ui.warn)
734 warn=self._ui.warn)
735 if m(f):
735 if m(f):
736 return (i, lineno, line)
736 return (i, lineno, line)
737 visited.add(i)
737 visited.add(i)
738 return (None, -1, "")
738 return (None, -1, "")
739
739
740 def _walkexplicit(self, match, subrepos):
740 def _walkexplicit(self, match, subrepos):
741 '''Get stat data about the files explicitly specified by match.
741 '''Get stat data about the files explicitly specified by match.
742
742
743 Return a triple (results, dirsfound, dirsnotfound).
743 Return a triple (results, dirsfound, dirsnotfound).
744 - results is a mapping from filename to stat result. It also contains
744 - results is a mapping from filename to stat result. It also contains
745 listings mapping subrepos and .hg to None.
745 listings mapping subrepos and .hg to None.
746 - dirsfound is a list of files found to be directories.
746 - dirsfound is a list of files found to be directories.
747 - dirsnotfound is a list of files that the dirstate thinks are
747 - dirsnotfound is a list of files that the dirstate thinks are
748 directories and that were not found.'''
748 directories and that were not found.'''
749
749
750 def badtype(mode):
750 def badtype(mode):
751 kind = _('unknown')
751 kind = _('unknown')
752 if stat.S_ISCHR(mode):
752 if stat.S_ISCHR(mode):
753 kind = _('character device')
753 kind = _('character device')
754 elif stat.S_ISBLK(mode):
754 elif stat.S_ISBLK(mode):
755 kind = _('block device')
755 kind = _('block device')
756 elif stat.S_ISFIFO(mode):
756 elif stat.S_ISFIFO(mode):
757 kind = _('fifo')
757 kind = _('fifo')
758 elif stat.S_ISSOCK(mode):
758 elif stat.S_ISSOCK(mode):
759 kind = _('socket')
759 kind = _('socket')
760 elif stat.S_ISDIR(mode):
760 elif stat.S_ISDIR(mode):
761 kind = _('directory')
761 kind = _('directory')
762 return _('unsupported file type (type is %s)') % kind
762 return _('unsupported file type (type is %s)') % kind
763
763
764 matchedir = match.explicitdir
764 matchedir = match.explicitdir
765 badfn = match.bad
765 badfn = match.bad
766 dmap = self._map
766 dmap = self._map
767 lstat = os.lstat
767 lstat = os.lstat
768 getkind = stat.S_IFMT
768 getkind = stat.S_IFMT
769 dirkind = stat.S_IFDIR
769 dirkind = stat.S_IFDIR
770 regkind = stat.S_IFREG
770 regkind = stat.S_IFREG
771 lnkkind = stat.S_IFLNK
771 lnkkind = stat.S_IFLNK
772 join = self._join
772 join = self._join
773 dirsfound = []
773 dirsfound = []
774 foundadd = dirsfound.append
774 foundadd = dirsfound.append
775 dirsnotfound = []
775 dirsnotfound = []
776 notfoundadd = dirsnotfound.append
776 notfoundadd = dirsnotfound.append
777
777
778 if not match.isexact() and self._checkcase:
778 if not match.isexact() and self._checkcase:
779 normalize = self._normalize
779 normalize = self._normalize
780 else:
780 else:
781 normalize = None
781 normalize = None
782
782
783 files = sorted(match.files())
783 files = sorted(match.files())
784 subrepos.sort()
784 subrepos.sort()
785 i, j = 0, 0
785 i, j = 0, 0
786 while i < len(files) and j < len(subrepos):
786 while i < len(files) and j < len(subrepos):
787 subpath = subrepos[j] + "/"
787 subpath = subrepos[j] + "/"
788 if files[i] < subpath:
788 if files[i] < subpath:
789 i += 1
789 i += 1
790 continue
790 continue
791 while i < len(files) and files[i].startswith(subpath):
791 while i < len(files) and files[i].startswith(subpath):
792 del files[i]
792 del files[i]
793 j += 1
793 j += 1
794
794
795 if not files or '.' in files:
795 if not files or '.' in files:
796 files = ['.']
796 files = ['.']
797 results = dict.fromkeys(subrepos)
797 results = dict.fromkeys(subrepos)
798 results['.hg'] = None
798 results['.hg'] = None
799
799
800 alldirs = None
800 alldirs = None
801 for ff in files:
801 for ff in files:
802 # constructing the foldmap is expensive, so don't do it for the
802 # constructing the foldmap is expensive, so don't do it for the
803 # common case where files is ['.']
803 # common case where files is ['.']
804 if normalize and ff != '.':
804 if normalize and ff != '.':
805 nf = normalize(ff, False, True)
805 nf = normalize(ff, False, True)
806 else:
806 else:
807 nf = ff
807 nf = ff
808 if nf in results:
808 if nf in results:
809 continue
809 continue
810
810
811 try:
811 try:
812 st = lstat(join(nf))
812 st = lstat(join(nf))
813 kind = getkind(st.st_mode)
813 kind = getkind(st.st_mode)
814 if kind == dirkind:
814 if kind == dirkind:
815 if nf in dmap:
815 if nf in dmap:
816 # file replaced by dir on disk but still in dirstate
816 # file replaced by dir on disk but still in dirstate
817 results[nf] = None
817 results[nf] = None
818 if matchedir:
818 if matchedir:
819 matchedir(nf)
819 matchedir(nf)
820 foundadd((nf, ff))
820 foundadd((nf, ff))
821 elif kind == regkind or kind == lnkkind:
821 elif kind == regkind or kind == lnkkind:
822 results[nf] = st
822 results[nf] = st
823 else:
823 else:
824 badfn(ff, badtype(kind))
824 badfn(ff, badtype(kind))
825 if nf in dmap:
825 if nf in dmap:
826 results[nf] = None
826 results[nf] = None
827 except OSError as inst: # nf not found on disk - it is dirstate only
827 except OSError as inst: # nf not found on disk - it is dirstate only
828 if nf in dmap: # does it exactly match a missing file?
828 if nf in dmap: # does it exactly match a missing file?
829 results[nf] = None
829 results[nf] = None
830 else: # does it match a missing directory?
830 else: # does it match a missing directory?
831 if alldirs is None:
831 if alldirs is None:
832 alldirs = util.dirs(dmap._map)
832 alldirs = util.dirs(dmap._map)
833 if nf in alldirs:
833 if nf in alldirs:
834 if matchedir:
834 if matchedir:
835 matchedir(nf)
835 matchedir(nf)
836 notfoundadd(nf)
836 notfoundadd(nf)
837 else:
837 else:
838 badfn(ff, encoding.strtolocal(inst.strerror))
838 badfn(ff, encoding.strtolocal(inst.strerror))
839
839
840 # Case insensitive filesystems cannot rely on lstat() failing to detect
840 # Case insensitive filesystems cannot rely on lstat() failing to detect
841 # a case-only rename. Prune the stat object for any file that does not
841 # a case-only rename. Prune the stat object for any file that does not
842 # match the case in the filesystem, if there are multiple files that
842 # match the case in the filesystem, if there are multiple files that
843 # normalize to the same path.
843 # normalize to the same path.
844 if match.isexact() and self._checkcase:
844 if match.isexact() and self._checkcase:
845 normed = {}
845 normed = {}
846
846
847 for f, st in results.iteritems():
847 for f, st in results.iteritems():
848 if st is None:
848 if st is None:
849 continue
849 continue
850
850
851 nc = util.normcase(f)
851 nc = util.normcase(f)
852 paths = normed.get(nc)
852 paths = normed.get(nc)
853
853
854 if paths is None:
854 if paths is None:
855 paths = set()
855 paths = set()
856 normed[nc] = paths
856 normed[nc] = paths
857
857
858 paths.add(f)
858 paths.add(f)
859
859
860 for norm, paths in normed.iteritems():
860 for norm, paths in normed.iteritems():
861 if len(paths) > 1:
861 if len(paths) > 1:
862 for path in paths:
862 for path in paths:
863 folded = self._discoverpath(path, norm, True, None,
863 folded = self._discoverpath(path, norm, True, None,
864 self._map.dirfoldmap)
864 self._map.dirfoldmap)
865 if path != folded:
865 if path != folded:
866 results[path] = None
866 results[path] = None
867
867
868 return results, dirsfound, dirsnotfound
868 return results, dirsfound, dirsnotfound
869
869
870 def walk(self, match, subrepos, unknown, ignored, full=True):
870 def walk(self, match, subrepos, unknown, ignored, full=True):
871 '''
871 '''
872 Walk recursively through the directory tree, finding all files
872 Walk recursively through the directory tree, finding all files
873 matched by match.
873 matched by match.
874
874
875 If full is False, maybe skip some known-clean files.
875 If full is False, maybe skip some known-clean files.
876
876
877 Return a dict mapping filename to stat-like object (either
877 Return a dict mapping filename to stat-like object (either
878 mercurial.osutil.stat instance or return value of os.stat()).
878 mercurial.osutil.stat instance or return value of os.stat()).
879
879
880 '''
880 '''
881 # full is a flag that extensions that hook into walk can use -- this
881 # full is a flag that extensions that hook into walk can use -- this
882 # implementation doesn't use it at all. This satisfies the contract
882 # implementation doesn't use it at all. This satisfies the contract
883 # because we only guarantee a "maybe".
883 # because we only guarantee a "maybe".
884
884
885 if ignored:
885 if ignored:
886 ignore = util.never
886 ignore = util.never
887 dirignore = util.never
887 dirignore = util.never
888 elif unknown:
888 elif unknown:
889 ignore = self._ignore
889 ignore = self._ignore
890 dirignore = self._dirignore
890 dirignore = self._dirignore
891 else:
891 else:
892 # if not unknown and not ignored, drop dir recursion and step 2
892 # if not unknown and not ignored, drop dir recursion and step 2
893 ignore = util.always
893 ignore = util.always
894 dirignore = util.always
894 dirignore = util.always
895
895
896 matchfn = match.matchfn
896 matchfn = match.matchfn
897 matchalways = match.always()
897 matchalways = match.always()
898 matchtdir = match.traversedir
898 matchtdir = match.traversedir
899 dmap = self._map
899 dmap = self._map
900 listdir = util.listdir
900 listdir = util.listdir
901 lstat = os.lstat
901 lstat = os.lstat
902 dirkind = stat.S_IFDIR
902 dirkind = stat.S_IFDIR
903 regkind = stat.S_IFREG
903 regkind = stat.S_IFREG
904 lnkkind = stat.S_IFLNK
904 lnkkind = stat.S_IFLNK
905 join = self._join
905 join = self._join
906
906
907 exact = skipstep3 = False
907 exact = skipstep3 = False
908 if match.isexact(): # match.exact
908 if match.isexact(): # match.exact
909 exact = True
909 exact = True
910 dirignore = util.always # skip step 2
910 dirignore = util.always # skip step 2
911 elif match.prefix(): # match.match, no patterns
911 elif match.prefix(): # match.match, no patterns
912 skipstep3 = True
912 skipstep3 = True
913
913
914 if not exact and self._checkcase:
914 if not exact and self._checkcase:
915 normalize = self._normalize
915 normalize = self._normalize
916 normalizefile = self._normalizefile
916 normalizefile = self._normalizefile
917 skipstep3 = False
917 skipstep3 = False
918 else:
918 else:
919 normalize = self._normalize
919 normalize = self._normalize
920 normalizefile = None
920 normalizefile = None
921
921
922 # step 1: find all explicit files
922 # step 1: find all explicit files
923 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
923 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
924
924
925 skipstep3 = skipstep3 and not (work or dirsnotfound)
925 skipstep3 = skipstep3 and not (work or dirsnotfound)
926 work = [d for d in work if not dirignore(d[0])]
926 work = [d for d in work if not dirignore(d[0])]
927
927
928 # step 2: visit subdirectories
928 # step 2: visit subdirectories
929 def traverse(work, alreadynormed):
929 def traverse(work, alreadynormed):
930 wadd = work.append
930 wadd = work.append
931 while work:
931 while work:
932 nd = work.pop()
932 nd = work.pop()
933 if not match.visitdir(nd):
933 if not match.visitdir(nd):
934 continue
934 continue
935 skip = None
935 skip = None
936 if nd == '.':
936 if nd == '.':
937 nd = ''
937 nd = ''
938 else:
938 else:
939 skip = '.hg'
939 skip = '.hg'
940 try:
940 try:
941 entries = listdir(join(nd), stat=True, skip=skip)
941 entries = listdir(join(nd), stat=True, skip=skip)
942 except OSError as inst:
942 except OSError as inst:
943 if inst.errno in (errno.EACCES, errno.ENOENT):
943 if inst.errno in (errno.EACCES, errno.ENOENT):
944 match.bad(self.pathto(nd),
944 match.bad(self.pathto(nd),
945 encoding.strtolocal(inst.strerror))
945 encoding.strtolocal(inst.strerror))
946 continue
946 continue
947 raise
947 raise
948 for f, kind, st in entries:
948 for f, kind, st in entries:
949 if normalizefile:
949 if normalizefile:
950 # even though f might be a directory, we're only
950 # even though f might be a directory, we're only
951 # interested in comparing it to files currently in the
951 # interested in comparing it to files currently in the
952 # dmap -- therefore normalizefile is enough
952 # dmap -- therefore normalizefile is enough
953 nf = normalizefile(nd and (nd + "/" + f) or f, True,
953 nf = normalizefile(nd and (nd + "/" + f) or f, True,
954 True)
954 True)
955 else:
955 else:
956 nf = nd and (nd + "/" + f) or f
956 nf = nd and (nd + "/" + f) or f
957 if nf not in results:
957 if nf not in results:
958 if kind == dirkind:
958 if kind == dirkind:
959 if not ignore(nf):
959 if not ignore(nf):
960 if matchtdir:
960 if matchtdir:
961 matchtdir(nf)
961 matchtdir(nf)
962 wadd(nf)
962 wadd(nf)
963 if nf in dmap and (matchalways or matchfn(nf)):
963 if nf in dmap and (matchalways or matchfn(nf)):
964 results[nf] = None
964 results[nf] = None
965 elif kind == regkind or kind == lnkkind:
965 elif kind == regkind or kind == lnkkind:
966 if nf in dmap:
966 if nf in dmap:
967 if matchalways or matchfn(nf):
967 if matchalways or matchfn(nf):
968 results[nf] = st
968 results[nf] = st
969 elif ((matchalways or matchfn(nf))
969 elif ((matchalways or matchfn(nf))
970 and not ignore(nf)):
970 and not ignore(nf)):
971 # unknown file -- normalize if necessary
971 # unknown file -- normalize if necessary
972 if not alreadynormed:
972 if not alreadynormed:
973 nf = normalize(nf, False, True)
973 nf = normalize(nf, False, True)
974 results[nf] = st
974 results[nf] = st
975 elif nf in dmap and (matchalways or matchfn(nf)):
975 elif nf in dmap and (matchalways or matchfn(nf)):
976 results[nf] = None
976 results[nf] = None
977
977
978 for nd, d in work:
978 for nd, d in work:
979 # alreadynormed means that processwork doesn't have to do any
979 # alreadynormed means that processwork doesn't have to do any
980 # expensive directory normalization
980 # expensive directory normalization
981 alreadynormed = not normalize or nd == d
981 alreadynormed = not normalize or nd == d
982 traverse([d], alreadynormed)
982 traverse([d], alreadynormed)
983
983
984 for s in subrepos:
984 for s in subrepos:
985 del results[s]
985 del results[s]
986 del results['.hg']
986 del results['.hg']
987
987
988 # step 3: visit remaining files from dmap
988 # step 3: visit remaining files from dmap
989 if not skipstep3 and not exact:
989 if not skipstep3 and not exact:
990 # If a dmap file is not in results yet, it was either
990 # If a dmap file is not in results yet, it was either
991 # a) not matching matchfn b) ignored, c) missing, or d) under a
991 # a) not matching matchfn b) ignored, c) missing, or d) under a
992 # symlink directory.
992 # symlink directory.
993 if not results and matchalways:
993 if not results and matchalways:
994 visit = [f for f in dmap]
994 visit = [f for f in dmap]
995 else:
995 else:
996 visit = [f for f in dmap if f not in results and matchfn(f)]
996 visit = [f for f in dmap if f not in results and matchfn(f)]
997 visit.sort()
997 visit.sort()
998
998
999 if unknown:
999 if unknown:
1000 # unknown == True means we walked all dirs under the roots
1000 # unknown == True means we walked all dirs under the roots
1001 # that wasn't ignored, and everything that matched was stat'ed
1001 # that wasn't ignored, and everything that matched was stat'ed
1002 # and is already in results.
1002 # and is already in results.
1003 # The rest must thus be ignored or under a symlink.
1003 # The rest must thus be ignored or under a symlink.
1004 audit_path = pathutil.pathauditor(self._root, cached=True)
1004 audit_path = pathutil.pathauditor(self._root, cached=True)
1005
1005
1006 for nf in iter(visit):
1006 for nf in iter(visit):
1007 # If a stat for the same file was already added with a
1007 # If a stat for the same file was already added with a
1008 # different case, don't add one for this, since that would
1008 # different case, don't add one for this, since that would
1009 # make it appear as if the file exists under both names
1009 # make it appear as if the file exists under both names
1010 # on disk.
1010 # on disk.
1011 if (normalizefile and
1011 if (normalizefile and
1012 normalizefile(nf, True, True) in results):
1012 normalizefile(nf, True, True) in results):
1013 results[nf] = None
1013 results[nf] = None
1014 # Report ignored items in the dmap as long as they are not
1014 # Report ignored items in the dmap as long as they are not
1015 # under a symlink directory.
1015 # under a symlink directory.
1016 elif audit_path.check(nf):
1016 elif audit_path.check(nf):
1017 try:
1017 try:
1018 results[nf] = lstat(join(nf))
1018 results[nf] = lstat(join(nf))
1019 # file was just ignored, no links, and exists
1019 # file was just ignored, no links, and exists
1020 except OSError:
1020 except OSError:
1021 # file doesn't exist
1021 # file doesn't exist
1022 results[nf] = None
1022 results[nf] = None
1023 else:
1023 else:
1024 # It's either missing or under a symlink directory
1024 # It's either missing or under a symlink directory
1025 # which we in this case report as missing
1025 # which we in this case report as missing
1026 results[nf] = None
1026 results[nf] = None
1027 else:
1027 else:
1028 # We may not have walked the full directory tree above,
1028 # We may not have walked the full directory tree above,
1029 # so stat and check everything we missed.
1029 # so stat and check everything we missed.
1030 iv = iter(visit)
1030 iv = iter(visit)
1031 for st in util.statfiles([join(i) for i in visit]):
1031 for st in util.statfiles([join(i) for i in visit]):
1032 results[next(iv)] = st
1032 results[next(iv)] = st
1033 return results
1033 return results
1034
1034
1035 def status(self, match, subrepos, ignored, clean, unknown):
1035 def status(self, match, subrepos, ignored, clean, unknown):
1036 '''Determine the status of the working copy relative to the
1036 '''Determine the status of the working copy relative to the
1037 dirstate and return a pair of (unsure, status), where status is of type
1037 dirstate and return a pair of (unsure, status), where status is of type
1038 scmutil.status and:
1038 scmutil.status and:
1039
1039
1040 unsure:
1040 unsure:
1041 files that might have been modified since the dirstate was
1041 files that might have been modified since the dirstate was
1042 written, but need to be read to be sure (size is the same
1042 written, but need to be read to be sure (size is the same
1043 but mtime differs)
1043 but mtime differs)
1044 status.modified:
1044 status.modified:
1045 files that have definitely been modified since the dirstate
1045 files that have definitely been modified since the dirstate
1046 was written (different size or mode)
1046 was written (different size or mode)
1047 status.clean:
1047 status.clean:
1048 files that have definitely not been modified since the
1048 files that have definitely not been modified since the
1049 dirstate was written
1049 dirstate was written
1050 '''
1050 '''
1051 listignored, listclean, listunknown = ignored, clean, unknown
1051 listignored, listclean, listunknown = ignored, clean, unknown
1052 lookup, modified, added, unknown, ignored = [], [], [], [], []
1052 lookup, modified, added, unknown, ignored = [], [], [], [], []
1053 removed, deleted, clean = [], [], []
1053 removed, deleted, clean = [], [], []
1054
1054
1055 dmap = self._map
1055 dmap = self._map
1056 dmap.preload()
1056 dmap.preload()
1057 dcontains = dmap.__contains__
1057 dcontains = dmap.__contains__
1058 dget = dmap.__getitem__
1058 dget = dmap.__getitem__
1059 ladd = lookup.append # aka "unsure"
1059 ladd = lookup.append # aka "unsure"
1060 madd = modified.append
1060 madd = modified.append
1061 aadd = added.append
1061 aadd = added.append
1062 uadd = unknown.append
1062 uadd = unknown.append
1063 iadd = ignored.append
1063 iadd = ignored.append
1064 radd = removed.append
1064 radd = removed.append
1065 dadd = deleted.append
1065 dadd = deleted.append
1066 cadd = clean.append
1066 cadd = clean.append
1067 mexact = match.exact
1067 mexact = match.exact
1068 dirignore = self._dirignore
1068 dirignore = self._dirignore
1069 checkexec = self._checkexec
1069 checkexec = self._checkexec
1070 copymap = self._map.copymap
1070 copymap = self._map.copymap
1071 lastnormaltime = self._lastnormaltime
1071 lastnormaltime = self._lastnormaltime
1072
1072
1073 # We need to do full walks when either
1073 # We need to do full walks when either
1074 # - we're listing all clean files, or
1074 # - we're listing all clean files, or
1075 # - match.traversedir does something, because match.traversedir should
1075 # - match.traversedir does something, because match.traversedir should
1076 # be called for every dir in the working dir
1076 # be called for every dir in the working dir
1077 full = listclean or match.traversedir is not None
1077 full = listclean or match.traversedir is not None
1078 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1078 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1079 full=full).iteritems():
1079 full=full).iteritems():
1080 if not dcontains(fn):
1080 if not dcontains(fn):
1081 if (listignored or mexact(fn)) and dirignore(fn):
1081 if (listignored or mexact(fn)) and dirignore(fn):
1082 if listignored:
1082 if listignored:
1083 iadd(fn)
1083 iadd(fn)
1084 else:
1084 else:
1085 uadd(fn)
1085 uadd(fn)
1086 continue
1086 continue
1087
1087
1088 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1088 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1089 # written like that for performance reasons. dmap[fn] is not a
1089 # written like that for performance reasons. dmap[fn] is not a
1090 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1090 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1091 # opcode has fast paths when the value to be unpacked is a tuple or
1091 # opcode has fast paths when the value to be unpacked is a tuple or
1092 # a list, but falls back to creating a full-fledged iterator in
1092 # a list, but falls back to creating a full-fledged iterator in
1093 # general. That is much slower than simply accessing and storing the
1093 # general. That is much slower than simply accessing and storing the
1094 # tuple members one by one.
1094 # tuple members one by one.
1095 t = dget(fn)
1095 t = dget(fn)
1096 state = t[0]
1096 state = t[0]
1097 mode = t[1]
1097 mode = t[1]
1098 size = t[2]
1098 size = t[2]
1099 time = t[3]
1099 time = t[3]
1100
1100
1101 if not st and state in "nma":
1101 if not st and state in "nma":
1102 dadd(fn)
1102 dadd(fn)
1103 elif state == 'n':
1103 elif state == 'n':
1104 if (size >= 0 and
1104 if (size >= 0 and
1105 ((size != st.st_size and size != st.st_size & _rangemask)
1105 ((size != st.st_size and size != st.st_size & _rangemask)
1106 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1106 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1107 or size == -2 # other parent
1107 or size == -2 # other parent
1108 or fn in copymap):
1108 or fn in copymap):
1109 madd(fn)
1109 madd(fn)
1110 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1110 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1111 ladd(fn)
1111 ladd(fn)
1112 elif st.st_mtime == lastnormaltime:
1112 elif st.st_mtime == lastnormaltime:
1113 # fn may have just been marked as normal and it may have
1113 # fn may have just been marked as normal and it may have
1114 # changed in the same second without changing its size.
1114 # changed in the same second without changing its size.
1115 # This can happen if we quickly do multiple commits.
1115 # This can happen if we quickly do multiple commits.
1116 # Force lookup, so we don't miss such a racy file change.
1116 # Force lookup, so we don't miss such a racy file change.
1117 ladd(fn)
1117 ladd(fn)
1118 elif listclean:
1118 elif listclean:
1119 cadd(fn)
1119 cadd(fn)
1120 elif state == 'm':
1120 elif state == 'm':
1121 madd(fn)
1121 madd(fn)
1122 elif state == 'a':
1122 elif state == 'a':
1123 aadd(fn)
1123 aadd(fn)
1124 elif state == 'r':
1124 elif state == 'r':
1125 radd(fn)
1125 radd(fn)
1126
1126
1127 return (lookup, scmutil.status(modified, added, removed, deleted,
1127 return (lookup, scmutil.status(modified, added, removed, deleted,
1128 unknown, ignored, clean))
1128 unknown, ignored, clean))
1129
1129
1130 def matches(self, match):
1130 def matches(self, match):
1131 '''
1131 '''
1132 return files in the dirstate (in whatever state) filtered by match
1132 return files in the dirstate (in whatever state) filtered by match
1133 '''
1133 '''
1134 dmap = self._map
1134 dmap = self._map
1135 if match.always():
1135 if match.always():
1136 return dmap.keys()
1136 return dmap.keys()
1137 files = match.files()
1137 files = match.files()
1138 if match.isexact():
1138 if match.isexact():
1139 # fast path -- filter the other way around, since typically files is
1139 # fast path -- filter the other way around, since typically files is
1140 # much smaller than dmap
1140 # much smaller than dmap
1141 return [f for f in files if f in dmap]
1141 return [f for f in files if f in dmap]
1142 if match.prefix() and all(fn in dmap for fn in files):
1142 if match.prefix() and all(fn in dmap for fn in files):
1143 # fast path -- all the values are known to be files, so just return
1143 # fast path -- all the values are known to be files, so just return
1144 # that
1144 # that
1145 return list(files)
1145 return list(files)
1146 return [f for f in dmap if match(f)]
1146 return [f for f in dmap if match(f)]
1147
1147
1148 def _actualfilename(self, tr):
1148 def _actualfilename(self, tr):
1149 if tr:
1149 if tr:
1150 return self._pendingfilename
1150 return self._pendingfilename
1151 else:
1151 else:
1152 return self._filename
1152 return self._filename
1153
1153
1154 def savebackup(self, tr, backupname):
1154 def savebackup(self, tr, backupname):
1155 '''Save current dirstate into backup file'''
1155 '''Save current dirstate into backup file'''
1156 filename = self._actualfilename(tr)
1156 filename = self._actualfilename(tr)
1157 assert backupname != filename
1157 assert backupname != filename
1158
1158
1159 # use '_writedirstate' instead of 'write' to write changes certainly,
1159 # use '_writedirstate' instead of 'write' to write changes certainly,
1160 # because the latter omits writing out if transaction is running.
1160 # because the latter omits writing out if transaction is running.
1161 # output file will be used to create backup of dirstate at this point.
1161 # output file will be used to create backup of dirstate at this point.
1162 if self._dirty or not self._opener.exists(filename):
1162 if self._dirty or not self._opener.exists(filename):
1163 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1163 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1164 checkambig=True))
1164 checkambig=True))
1165
1165
1166 if tr:
1166 if tr:
1167 # ensure that subsequent tr.writepending returns True for
1167 # ensure that subsequent tr.writepending returns True for
1168 # changes written out above, even if dirstate is never
1168 # changes written out above, even if dirstate is never
1169 # changed after this
1169 # changed after this
1170 tr.addfilegenerator('dirstate', (self._filename,),
1170 tr.addfilegenerator('dirstate', (self._filename,),
1171 self._writedirstate, location='plain')
1171 self._writedirstate, location='plain')
1172
1172
1173 # ensure that pending file written above is unlinked at
1173 # ensure that pending file written above is unlinked at
1174 # failure, even if tr.writepending isn't invoked until the
1174 # failure, even if tr.writepending isn't invoked until the
1175 # end of this transaction
1175 # end of this transaction
1176 tr.registertmp(filename, location='plain')
1176 tr.registertmp(filename, location='plain')
1177
1177
1178 self._opener.tryunlink(backupname)
1178 self._opener.tryunlink(backupname)
1179 # hardlink backup is okay because _writedirstate is always called
1179 # hardlink backup is okay because _writedirstate is always called
1180 # with an "atomictemp=True" file.
1180 # with an "atomictemp=True" file.
1181 util.copyfile(self._opener.join(filename),
1181 util.copyfile(self._opener.join(filename),
1182 self._opener.join(backupname), hardlink=True)
1182 self._opener.join(backupname), hardlink=True)
1183
1183
1184 def restorebackup(self, tr, backupname):
1184 def restorebackup(self, tr, backupname):
1185 '''Restore dirstate by backup file'''
1185 '''Restore dirstate by backup file'''
1186 # this "invalidate()" prevents "wlock.release()" from writing
1186 # this "invalidate()" prevents "wlock.release()" from writing
1187 # changes of dirstate out after restoring from backup file
1187 # changes of dirstate out after restoring from backup file
1188 self.invalidate()
1188 self.invalidate()
1189 filename = self._actualfilename(tr)
1189 filename = self._actualfilename(tr)
1190 self._opener.rename(backupname, filename, checkambig=True)
1190 o = self._opener
1191 if util.samefile(o.join(backupname), o.join(filename)):
1192 o.unlink(backupname)
1193 else:
1194 o.rename(backupname, filename, checkambig=True)
1191
1195
1192 def clearbackup(self, tr, backupname):
1196 def clearbackup(self, tr, backupname):
1193 '''Clear backup file'''
1197 '''Clear backup file'''
1194 self._opener.unlink(backupname)
1198 self._opener.unlink(backupname)
1195
1199
1196 class dirstatemap(object):
1200 class dirstatemap(object):
1197 def __init__(self, ui, opener, root):
1201 def __init__(self, ui, opener, root):
1198 self._ui = ui
1202 self._ui = ui
1199 self._opener = opener
1203 self._opener = opener
1200 self._root = root
1204 self._root = root
1201 self._filename = 'dirstate'
1205 self._filename = 'dirstate'
1202
1206
1203 self._parents = None
1207 self._parents = None
1204 self._dirtyparents = False
1208 self._dirtyparents = False
1205
1209
1206 # for consistent view between _pl() and _read() invocations
1210 # for consistent view between _pl() and _read() invocations
1207 self._pendingmode = None
1211 self._pendingmode = None
1208
1212
1209 @propertycache
1213 @propertycache
1210 def _map(self):
1214 def _map(self):
1211 self._map = {}
1215 self._map = {}
1212 self.read()
1216 self.read()
1213 return self._map
1217 return self._map
1214
1218
1215 @propertycache
1219 @propertycache
1216 def copymap(self):
1220 def copymap(self):
1217 self.copymap = {}
1221 self.copymap = {}
1218 self._map
1222 self._map
1219 return self.copymap
1223 return self.copymap
1220
1224
1221 def clear(self):
1225 def clear(self):
1222 self._map.clear()
1226 self._map.clear()
1223 self.copymap.clear()
1227 self.copymap.clear()
1224 self.setparents(nullid, nullid)
1228 self.setparents(nullid, nullid)
1225
1229
1226 def iteritems(self):
1230 def iteritems(self):
1227 return self._map.iteritems()
1231 return self._map.iteritems()
1228
1232
1229 def __len__(self):
1233 def __len__(self):
1230 return len(self._map)
1234 return len(self._map)
1231
1235
1232 def __iter__(self):
1236 def __iter__(self):
1233 return iter(self._map)
1237 return iter(self._map)
1234
1238
1235 def get(self, key, default=None):
1239 def get(self, key, default=None):
1236 return self._map.get(key, default)
1240 return self._map.get(key, default)
1237
1241
1238 def __contains__(self, key):
1242 def __contains__(self, key):
1239 return key in self._map
1243 return key in self._map
1240
1244
1241 def __setitem__(self, key, value):
1245 def __setitem__(self, key, value):
1242 self._map[key] = value
1246 self._map[key] = value
1243
1247
1244 def __getitem__(self, key):
1248 def __getitem__(self, key):
1245 return self._map[key]
1249 return self._map[key]
1246
1250
1247 def __delitem__(self, key):
1251 def __delitem__(self, key):
1248 del self._map[key]
1252 del self._map[key]
1249
1253
1250 def keys(self):
1254 def keys(self):
1251 return self._map.keys()
1255 return self._map.keys()
1252
1256
1253 def preload(self):
1257 def preload(self):
1254 """Loads the underlying data, if it's not already loaded"""
1258 """Loads the underlying data, if it's not already loaded"""
1255 self._map
1259 self._map
1256
1260
1257 def nonnormalentries(self):
1261 def nonnormalentries(self):
1258 '''Compute the nonnormal dirstate entries from the dmap'''
1262 '''Compute the nonnormal dirstate entries from the dmap'''
1259 try:
1263 try:
1260 return parsers.nonnormalotherparententries(self._map)
1264 return parsers.nonnormalotherparententries(self._map)
1261 except AttributeError:
1265 except AttributeError:
1262 nonnorm = set()
1266 nonnorm = set()
1263 otherparent = set()
1267 otherparent = set()
1264 for fname, e in self._map.iteritems():
1268 for fname, e in self._map.iteritems():
1265 if e[0] != 'n' or e[3] == -1:
1269 if e[0] != 'n' or e[3] == -1:
1266 nonnorm.add(fname)
1270 nonnorm.add(fname)
1267 if e[0] == 'n' and e[2] == -2:
1271 if e[0] == 'n' and e[2] == -2:
1268 otherparent.add(fname)
1272 otherparent.add(fname)
1269 return nonnorm, otherparent
1273 return nonnorm, otherparent
1270
1274
1271 @propertycache
1275 @propertycache
1272 def filefoldmap(self):
1276 def filefoldmap(self):
1273 """Returns a dictionary mapping normalized case paths to their
1277 """Returns a dictionary mapping normalized case paths to their
1274 non-normalized versions.
1278 non-normalized versions.
1275 """
1279 """
1276 try:
1280 try:
1277 makefilefoldmap = parsers.make_file_foldmap
1281 makefilefoldmap = parsers.make_file_foldmap
1278 except AttributeError:
1282 except AttributeError:
1279 pass
1283 pass
1280 else:
1284 else:
1281 return makefilefoldmap(self._map, util.normcasespec,
1285 return makefilefoldmap(self._map, util.normcasespec,
1282 util.normcasefallback)
1286 util.normcasefallback)
1283
1287
1284 f = {}
1288 f = {}
1285 normcase = util.normcase
1289 normcase = util.normcase
1286 for name, s in self._map.iteritems():
1290 for name, s in self._map.iteritems():
1287 if s[0] != 'r':
1291 if s[0] != 'r':
1288 f[normcase(name)] = name
1292 f[normcase(name)] = name
1289 f['.'] = '.' # prevents useless util.fspath() invocation
1293 f['.'] = '.' # prevents useless util.fspath() invocation
1290 return f
1294 return f
1291
1295
1292 @propertycache
1296 @propertycache
1293 def dirs(self):
1297 def dirs(self):
1294 """Returns a set-like object containing all the directories in the
1298 """Returns a set-like object containing all the directories in the
1295 current dirstate.
1299 current dirstate.
1296 """
1300 """
1297 return util.dirs(self._map, 'r')
1301 return util.dirs(self._map, 'r')
1298
1302
1299 def _opendirstatefile(self):
1303 def _opendirstatefile(self):
1300 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1304 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1301 if self._pendingmode is not None and self._pendingmode != mode:
1305 if self._pendingmode is not None and self._pendingmode != mode:
1302 fp.close()
1306 fp.close()
1303 raise error.Abort(_('working directory state may be '
1307 raise error.Abort(_('working directory state may be '
1304 'changed parallelly'))
1308 'changed parallelly'))
1305 self._pendingmode = mode
1309 self._pendingmode = mode
1306 return fp
1310 return fp
1307
1311
1308 def parents(self):
1312 def parents(self):
1309 if not self._parents:
1313 if not self._parents:
1310 try:
1314 try:
1311 fp = self._opendirstatefile()
1315 fp = self._opendirstatefile()
1312 st = fp.read(40)
1316 st = fp.read(40)
1313 fp.close()
1317 fp.close()
1314 except IOError as err:
1318 except IOError as err:
1315 if err.errno != errno.ENOENT:
1319 if err.errno != errno.ENOENT:
1316 raise
1320 raise
1317 # File doesn't exist, so the current state is empty
1321 # File doesn't exist, so the current state is empty
1318 st = ''
1322 st = ''
1319
1323
1320 l = len(st)
1324 l = len(st)
1321 if l == 40:
1325 if l == 40:
1322 self._parents = st[:20], st[20:40]
1326 self._parents = st[:20], st[20:40]
1323 elif l == 0:
1327 elif l == 0:
1324 self._parents = [nullid, nullid]
1328 self._parents = [nullid, nullid]
1325 else:
1329 else:
1326 raise error.Abort(_('working directory state appears '
1330 raise error.Abort(_('working directory state appears '
1327 'damaged!'))
1331 'damaged!'))
1328
1332
1329 return self._parents
1333 return self._parents
1330
1334
1331 def setparents(self, p1, p2):
1335 def setparents(self, p1, p2):
1332 self._parents = (p1, p2)
1336 self._parents = (p1, p2)
1333 self._dirtyparents = True
1337 self._dirtyparents = True
1334
1338
1335 def read(self):
1339 def read(self):
1336 # ignore HG_PENDING because identity is used only for writing
1340 # ignore HG_PENDING because identity is used only for writing
1337 self.identity = util.filestat.frompath(
1341 self.identity = util.filestat.frompath(
1338 self._opener.join(self._filename))
1342 self._opener.join(self._filename))
1339
1343
1340 try:
1344 try:
1341 fp = self._opendirstatefile()
1345 fp = self._opendirstatefile()
1342 try:
1346 try:
1343 st = fp.read()
1347 st = fp.read()
1344 finally:
1348 finally:
1345 fp.close()
1349 fp.close()
1346 except IOError as err:
1350 except IOError as err:
1347 if err.errno != errno.ENOENT:
1351 if err.errno != errno.ENOENT:
1348 raise
1352 raise
1349 return
1353 return
1350 if not st:
1354 if not st:
1351 return
1355 return
1352
1356
1353 if util.safehasattr(parsers, 'dict_new_presized'):
1357 if util.safehasattr(parsers, 'dict_new_presized'):
1354 # Make an estimate of the number of files in the dirstate based on
1358 # Make an estimate of the number of files in the dirstate based on
1355 # its size. From a linear regression on a set of real-world repos,
1359 # its size. From a linear regression on a set of real-world repos,
1356 # all over 10,000 files, the size of a dirstate entry is 85
1360 # all over 10,000 files, the size of a dirstate entry is 85
1357 # bytes. The cost of resizing is significantly higher than the cost
1361 # bytes. The cost of resizing is significantly higher than the cost
1358 # of filling in a larger presized dict, so subtract 20% from the
1362 # of filling in a larger presized dict, so subtract 20% from the
1359 # size.
1363 # size.
1360 #
1364 #
1361 # This heuristic is imperfect in many ways, so in a future dirstate
1365 # This heuristic is imperfect in many ways, so in a future dirstate
1362 # format update it makes sense to just record the number of entries
1366 # format update it makes sense to just record the number of entries
1363 # on write.
1367 # on write.
1364 self._map = parsers.dict_new_presized(len(st) / 71)
1368 self._map = parsers.dict_new_presized(len(st) / 71)
1365
1369
1366 # Python's garbage collector triggers a GC each time a certain number
1370 # Python's garbage collector triggers a GC each time a certain number
1367 # of container objects (the number being defined by
1371 # of container objects (the number being defined by
1368 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1372 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1369 # for each file in the dirstate. The C version then immediately marks
1373 # for each file in the dirstate. The C version then immediately marks
1370 # them as not to be tracked by the collector. However, this has no
1374 # them as not to be tracked by the collector. However, this has no
1371 # effect on when GCs are triggered, only on what objects the GC looks
1375 # effect on when GCs are triggered, only on what objects the GC looks
1372 # into. This means that O(number of files) GCs are unavoidable.
1376 # into. This means that O(number of files) GCs are unavoidable.
1373 # Depending on when in the process's lifetime the dirstate is parsed,
1377 # Depending on when in the process's lifetime the dirstate is parsed,
1374 # this can get very expensive. As a workaround, disable GC while
1378 # this can get very expensive. As a workaround, disable GC while
1375 # parsing the dirstate.
1379 # parsing the dirstate.
1376 #
1380 #
1377 # (we cannot decorate the function directly since it is in a C module)
1381 # (we cannot decorate the function directly since it is in a C module)
1378 parse_dirstate = util.nogc(parsers.parse_dirstate)
1382 parse_dirstate = util.nogc(parsers.parse_dirstate)
1379 p = parse_dirstate(self._map, self.copymap, st)
1383 p = parse_dirstate(self._map, self.copymap, st)
1380 if not self._dirtyparents:
1384 if not self._dirtyparents:
1381 self.setparents(*p)
1385 self.setparents(*p)
1382
1386
1383 # Avoid excess attribute lookups by fast pathing certain checks
1387 # Avoid excess attribute lookups by fast pathing certain checks
1384 self.__contains__ = self._map.__contains__
1388 self.__contains__ = self._map.__contains__
1385 self.__getitem__ = self._map.__getitem__
1389 self.__getitem__ = self._map.__getitem__
1386 self.__setitem__ = self._map.__setitem__
1390 self.__setitem__ = self._map.__setitem__
1387 self.__delitem__ = self._map.__delitem__
1391 self.__delitem__ = self._map.__delitem__
1388 self.get = self._map.get
1392 self.get = self._map.get
1389
1393
1390 def write(self, st, now):
1394 def write(self, st, now):
1391 st.write(parsers.pack_dirstate(self._map, self.copymap,
1395 st.write(parsers.pack_dirstate(self._map, self.copymap,
1392 self.parents(), now))
1396 self.parents(), now))
1393 st.close()
1397 st.close()
1394 self._dirtyparents = False
1398 self._dirtyparents = False
1395 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1399 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1396
1400
1397 @propertycache
1401 @propertycache
1398 def nonnormalset(self):
1402 def nonnormalset(self):
1399 nonnorm, otherparents = self.nonnormalentries()
1403 nonnorm, otherparents = self.nonnormalentries()
1400 self.otherparentset = otherparents
1404 self.otherparentset = otherparents
1401 return nonnorm
1405 return nonnorm
1402
1406
1403 @propertycache
1407 @propertycache
1404 def otherparentset(self):
1408 def otherparentset(self):
1405 nonnorm, otherparents = self.nonnormalentries()
1409 nonnorm, otherparents = self.nonnormalentries()
1406 self.nonnormalset = nonnorm
1410 self.nonnormalset = nonnorm
1407 return otherparents
1411 return otherparents
1408
1412
1409 @propertycache
1413 @propertycache
1410 def identity(self):
1414 def identity(self):
1411 self._map
1415 self._map
1412 return self.identity
1416 return self.identity
1413
1417
1414 @propertycache
1418 @propertycache
1415 def dirfoldmap(self):
1419 def dirfoldmap(self):
1416 f = {}
1420 f = {}
1417 normcase = util.normcase
1421 normcase = util.normcase
1418 for name in self.dirs:
1422 for name in self.dirs:
1419 f[normcase(name)] = name
1423 f[normcase(name)] = name
1420 return f
1424 return f
@@ -1,19 +1,18 b''
1 Set up
1 Set up
2
2
3 $ hg init repo
3 $ hg init repo
4 $ cd repo
4 $ cd repo
5
5
6 Try to import an empty patch
6 Try to import an empty patch
7
7
8 $ hg import --no-commit - <<EOF
8 $ hg import --no-commit - <<EOF
9 > EOF
9 > EOF
10 applying patch from stdin
10 applying patch from stdin
11 abort: stdin: no diffs found
11 abort: stdin: no diffs found
12 [255]
12 [255]
13
13
14 A dirstate backup is left behind
14 No dirstate backups are left behind
15
15
16 $ ls .hg/dirstate* | sort
16 $ ls .hg/dirstate* | sort
17 .hg/dirstate
17 .hg/dirstate
18 .hg/dirstate.backup.import.* (glob)
19
18
General Comments 0
You need to be logged in to leave comments. Login now