##// END OF EJS Templates
dirstate: remove _droppath method...
Mark Thomas -
r35082:e8ae0b2d default
parent child Browse files
Show More
@@ -1,1481 +1,1478 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83
83
84 @contextlib.contextmanager
84 @contextlib.contextmanager
85 def parentchange(self):
85 def parentchange(self):
86 '''Context manager for handling dirstate parents.
86 '''Context manager for handling dirstate parents.
87
87
88 If an exception occurs in the scope of the context manager,
88 If an exception occurs in the scope of the context manager,
89 the incoherent dirstate won't be written when wlock is
89 the incoherent dirstate won't be written when wlock is
90 released.
90 released.
91 '''
91 '''
92 self._parentwriters += 1
92 self._parentwriters += 1
93 yield
93 yield
94 # Typically we want the "undo" step of a context manager in a
94 # Typically we want the "undo" step of a context manager in a
95 # finally block so it happens even when an exception
95 # finally block so it happens even when an exception
96 # occurs. In this case, however, we only want to decrement
96 # occurs. In this case, however, we only want to decrement
97 # parentwriters if the code in the with statement exits
97 # parentwriters if the code in the with statement exits
98 # normally, so we don't have a try/finally here on purpose.
98 # normally, so we don't have a try/finally here on purpose.
99 self._parentwriters -= 1
99 self._parentwriters -= 1
100
100
101 def beginparentchange(self):
101 def beginparentchange(self):
102 '''Marks the beginning of a set of changes that involve changing
102 '''Marks the beginning of a set of changes that involve changing
103 the dirstate parents. If there is an exception during this time,
103 the dirstate parents. If there is an exception during this time,
104 the dirstate will not be written when the wlock is released. This
104 the dirstate will not be written when the wlock is released. This
105 prevents writing an incoherent dirstate where the parent doesn't
105 prevents writing an incoherent dirstate where the parent doesn't
106 match the contents.
106 match the contents.
107 '''
107 '''
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
108 self._ui.deprecwarn('beginparentchange is obsoleted by the '
109 'parentchange context manager.', '4.3')
109 'parentchange context manager.', '4.3')
110 self._parentwriters += 1
110 self._parentwriters += 1
111
111
112 def endparentchange(self):
112 def endparentchange(self):
113 '''Marks the end of a set of changes that involve changing the
113 '''Marks the end of a set of changes that involve changing the
114 dirstate parents. Once all parent changes have been marked done,
114 dirstate parents. Once all parent changes have been marked done,
115 the wlock will be free to write the dirstate on release.
115 the wlock will be free to write the dirstate on release.
116 '''
116 '''
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
117 self._ui.deprecwarn('endparentchange is obsoleted by the '
118 'parentchange context manager.', '4.3')
118 'parentchange context manager.', '4.3')
119 if self._parentwriters > 0:
119 if self._parentwriters > 0:
120 self._parentwriters -= 1
120 self._parentwriters -= 1
121
121
122 def pendingparentchange(self):
122 def pendingparentchange(self):
123 '''Returns true if the dirstate is in the middle of a set of changes
123 '''Returns true if the dirstate is in the middle of a set of changes
124 that modify the dirstate parent.
124 that modify the dirstate parent.
125 '''
125 '''
126 return self._parentwriters > 0
126 return self._parentwriters > 0
127
127
128 @propertycache
128 @propertycache
129 def _map(self):
129 def _map(self):
130 """Return the dirstate contents (see documentation for dirstatemap)."""
130 """Return the dirstate contents (see documentation for dirstatemap)."""
131 self._map = dirstatemap(self._ui, self._opener, self._root)
131 self._map = dirstatemap(self._ui, self._opener, self._root)
132 return self._map
132 return self._map
133
133
134 @property
134 @property
135 def _sparsematcher(self):
135 def _sparsematcher(self):
136 """The matcher for the sparse checkout.
136 """The matcher for the sparse checkout.
137
137
138 The working directory may not include every file from a manifest. The
138 The working directory may not include every file from a manifest. The
139 matcher obtained by this property will match a path if it is to be
139 matcher obtained by this property will match a path if it is to be
140 included in the working directory.
140 included in the working directory.
141 """
141 """
142 # TODO there is potential to cache this property. For now, the matcher
142 # TODO there is potential to cache this property. For now, the matcher
143 # is resolved on every access. (But the called function does use a
143 # is resolved on every access. (But the called function does use a
144 # cache to keep the lookup fast.)
144 # cache to keep the lookup fast.)
145 return self._sparsematchfn()
145 return self._sparsematchfn()
146
146
147 @repocache('branch')
147 @repocache('branch')
148 def _branch(self):
148 def _branch(self):
149 try:
149 try:
150 return self._opener.read("branch").strip() or "default"
150 return self._opener.read("branch").strip() or "default"
151 except IOError as inst:
151 except IOError as inst:
152 if inst.errno != errno.ENOENT:
152 if inst.errno != errno.ENOENT:
153 raise
153 raise
154 return "default"
154 return "default"
155
155
156 @property
156 @property
157 def _pl(self):
157 def _pl(self):
158 return self._map.parents()
158 return self._map.parents()
159
159
160 def dirs(self):
160 def dirs(self):
161 return self._map.dirs
161 return self._map.dirs
162
162
163 @rootcache('.hgignore')
163 @rootcache('.hgignore')
164 def _ignore(self):
164 def _ignore(self):
165 files = self._ignorefiles()
165 files = self._ignorefiles()
166 if not files:
166 if not files:
167 return matchmod.never(self._root, '')
167 return matchmod.never(self._root, '')
168
168
169 pats = ['include:%s' % f for f in files]
169 pats = ['include:%s' % f for f in files]
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
170 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
171
171
172 @propertycache
172 @propertycache
173 def _slash(self):
173 def _slash(self):
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
174 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
175
175
176 @propertycache
176 @propertycache
177 def _checklink(self):
177 def _checklink(self):
178 return util.checklink(self._root)
178 return util.checklink(self._root)
179
179
180 @propertycache
180 @propertycache
181 def _checkexec(self):
181 def _checkexec(self):
182 return util.checkexec(self._root)
182 return util.checkexec(self._root)
183
183
184 @propertycache
184 @propertycache
185 def _checkcase(self):
185 def _checkcase(self):
186 return not util.fscasesensitive(self._join('.hg'))
186 return not util.fscasesensitive(self._join('.hg'))
187
187
188 def _join(self, f):
188 def _join(self, f):
189 # much faster than os.path.join()
189 # much faster than os.path.join()
190 # it's safe because f is always a relative path
190 # it's safe because f is always a relative path
191 return self._rootdir + f
191 return self._rootdir + f
192
192
193 def flagfunc(self, buildfallback):
193 def flagfunc(self, buildfallback):
194 if self._checklink and self._checkexec:
194 if self._checklink and self._checkexec:
195 def f(x):
195 def f(x):
196 try:
196 try:
197 st = os.lstat(self._join(x))
197 st = os.lstat(self._join(x))
198 if util.statislink(st):
198 if util.statislink(st):
199 return 'l'
199 return 'l'
200 if util.statisexec(st):
200 if util.statisexec(st):
201 return 'x'
201 return 'x'
202 except OSError:
202 except OSError:
203 pass
203 pass
204 return ''
204 return ''
205 return f
205 return f
206
206
207 fallback = buildfallback()
207 fallback = buildfallback()
208 if self._checklink:
208 if self._checklink:
209 def f(x):
209 def f(x):
210 if os.path.islink(self._join(x)):
210 if os.path.islink(self._join(x)):
211 return 'l'
211 return 'l'
212 if 'x' in fallback(x):
212 if 'x' in fallback(x):
213 return 'x'
213 return 'x'
214 return ''
214 return ''
215 return f
215 return f
216 if self._checkexec:
216 if self._checkexec:
217 def f(x):
217 def f(x):
218 if 'l' in fallback(x):
218 if 'l' in fallback(x):
219 return 'l'
219 return 'l'
220 if util.isexec(self._join(x)):
220 if util.isexec(self._join(x)):
221 return 'x'
221 return 'x'
222 return ''
222 return ''
223 return f
223 return f
224 else:
224 else:
225 return fallback
225 return fallback
226
226
227 @propertycache
227 @propertycache
228 def _cwd(self):
228 def _cwd(self):
229 # internal config: ui.forcecwd
229 # internal config: ui.forcecwd
230 forcecwd = self._ui.config('ui', 'forcecwd')
230 forcecwd = self._ui.config('ui', 'forcecwd')
231 if forcecwd:
231 if forcecwd:
232 return forcecwd
232 return forcecwd
233 return pycompat.getcwd()
233 return pycompat.getcwd()
234
234
235 def getcwd(self):
235 def getcwd(self):
236 '''Return the path from which a canonical path is calculated.
236 '''Return the path from which a canonical path is calculated.
237
237
238 This path should be used to resolve file patterns or to convert
238 This path should be used to resolve file patterns or to convert
239 canonical paths back to file paths for display. It shouldn't be
239 canonical paths back to file paths for display. It shouldn't be
240 used to get real file paths. Use vfs functions instead.
240 used to get real file paths. Use vfs functions instead.
241 '''
241 '''
242 cwd = self._cwd
242 cwd = self._cwd
243 if cwd == self._root:
243 if cwd == self._root:
244 return ''
244 return ''
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
245 # self._root ends with a path separator if self._root is '/' or 'C:\'
246 rootsep = self._root
246 rootsep = self._root
247 if not util.endswithsep(rootsep):
247 if not util.endswithsep(rootsep):
248 rootsep += pycompat.ossep
248 rootsep += pycompat.ossep
249 if cwd.startswith(rootsep):
249 if cwd.startswith(rootsep):
250 return cwd[len(rootsep):]
250 return cwd[len(rootsep):]
251 else:
251 else:
252 # we're outside the repo. return an absolute path.
252 # we're outside the repo. return an absolute path.
253 return cwd
253 return cwd
254
254
255 def pathto(self, f, cwd=None):
255 def pathto(self, f, cwd=None):
256 if cwd is None:
256 if cwd is None:
257 cwd = self.getcwd()
257 cwd = self.getcwd()
258 path = util.pathto(self._root, cwd, f)
258 path = util.pathto(self._root, cwd, f)
259 if self._slash:
259 if self._slash:
260 return util.pconvert(path)
260 return util.pconvert(path)
261 return path
261 return path
262
262
263 def __getitem__(self, key):
263 def __getitem__(self, key):
264 '''Return the current state of key (a filename) in the dirstate.
264 '''Return the current state of key (a filename) in the dirstate.
265
265
266 States are:
266 States are:
267 n normal
267 n normal
268 m needs merging
268 m needs merging
269 r marked for removal
269 r marked for removal
270 a marked for addition
270 a marked for addition
271 ? not tracked
271 ? not tracked
272 '''
272 '''
273 return self._map.get(key, ("?",))[0]
273 return self._map.get(key, ("?",))[0]
274
274
275 def __contains__(self, key):
275 def __contains__(self, key):
276 return key in self._map
276 return key in self._map
277
277
278 def __iter__(self):
278 def __iter__(self):
279 return iter(sorted(self._map))
279 return iter(sorted(self._map))
280
280
281 def items(self):
281 def items(self):
282 return self._map.iteritems()
282 return self._map.iteritems()
283
283
284 iteritems = items
284 iteritems = items
285
285
286 def parents(self):
286 def parents(self):
287 return [self._validate(p) for p in self._pl]
287 return [self._validate(p) for p in self._pl]
288
288
289 def p1(self):
289 def p1(self):
290 return self._validate(self._pl[0])
290 return self._validate(self._pl[0])
291
291
292 def p2(self):
292 def p2(self):
293 return self._validate(self._pl[1])
293 return self._validate(self._pl[1])
294
294
295 def branch(self):
295 def branch(self):
296 return encoding.tolocal(self._branch)
296 return encoding.tolocal(self._branch)
297
297
298 def setparents(self, p1, p2=nullid):
298 def setparents(self, p1, p2=nullid):
299 """Set dirstate parents to p1 and p2.
299 """Set dirstate parents to p1 and p2.
300
300
301 When moving from two parents to one, 'm' merged entries a
301 When moving from two parents to one, 'm' merged entries a
302 adjusted to normal and previous copy records discarded and
302 adjusted to normal and previous copy records discarded and
303 returned by the call.
303 returned by the call.
304
304
305 See localrepo.setparents()
305 See localrepo.setparents()
306 """
306 """
307 if self._parentwriters == 0:
307 if self._parentwriters == 0:
308 raise ValueError("cannot set dirstate parent without "
308 raise ValueError("cannot set dirstate parent without "
309 "calling dirstate.beginparentchange")
309 "calling dirstate.beginparentchange")
310
310
311 self._dirty = True
311 self._dirty = True
312 oldp2 = self._pl[1]
312 oldp2 = self._pl[1]
313 if self._origpl is None:
313 if self._origpl is None:
314 self._origpl = self._pl
314 self._origpl = self._pl
315 self._map.setparents(p1, p2)
315 self._map.setparents(p1, p2)
316 copies = {}
316 copies = {}
317 if oldp2 != nullid and p2 == nullid:
317 if oldp2 != nullid and p2 == nullid:
318 candidatefiles = self._map.nonnormalset.union(
318 candidatefiles = self._map.nonnormalset.union(
319 self._map.otherparentset)
319 self._map.otherparentset)
320 for f in candidatefiles:
320 for f in candidatefiles:
321 s = self._map.get(f)
321 s = self._map.get(f)
322 if s is None:
322 if s is None:
323 continue
323 continue
324
324
325 # Discard 'm' markers when moving away from a merge state
325 # Discard 'm' markers when moving away from a merge state
326 if s[0] == 'm':
326 if s[0] == 'm':
327 source = self._map.copymap.get(f)
327 source = self._map.copymap.get(f)
328 if source:
328 if source:
329 copies[f] = source
329 copies[f] = source
330 self.normallookup(f)
330 self.normallookup(f)
331 # Also fix up otherparent markers
331 # Also fix up otherparent markers
332 elif s[0] == 'n' and s[2] == -2:
332 elif s[0] == 'n' and s[2] == -2:
333 source = self._map.copymap.get(f)
333 source = self._map.copymap.get(f)
334 if source:
334 if source:
335 copies[f] = source
335 copies[f] = source
336 self.add(f)
336 self.add(f)
337 return copies
337 return copies
338
338
339 def setbranch(self, branch):
339 def setbranch(self, branch):
340 self._branch = encoding.fromlocal(branch)
340 self._branch = encoding.fromlocal(branch)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
341 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
342 try:
342 try:
343 f.write(self._branch + '\n')
343 f.write(self._branch + '\n')
344 f.close()
344 f.close()
345
345
346 # make sure filecache has the correct stat info for _branch after
346 # make sure filecache has the correct stat info for _branch after
347 # replacing the underlying file
347 # replacing the underlying file
348 ce = self._filecache['_branch']
348 ce = self._filecache['_branch']
349 if ce:
349 if ce:
350 ce.refresh()
350 ce.refresh()
351 except: # re-raises
351 except: # re-raises
352 f.discard()
352 f.discard()
353 raise
353 raise
354
354
355 def invalidate(self):
355 def invalidate(self):
356 '''Causes the next access to reread the dirstate.
356 '''Causes the next access to reread the dirstate.
357
357
358 This is different from localrepo.invalidatedirstate() because it always
358 This is different from localrepo.invalidatedirstate() because it always
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
359 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
360 check whether the dirstate has changed before rereading it.'''
360 check whether the dirstate has changed before rereading it.'''
361
361
362 for a in ("_map", "_branch", "_ignore"):
362 for a in ("_map", "_branch", "_ignore"):
363 if a in self.__dict__:
363 if a in self.__dict__:
364 delattr(self, a)
364 delattr(self, a)
365 self._lastnormaltime = 0
365 self._lastnormaltime = 0
366 self._dirty = False
366 self._dirty = False
367 self._updatedfiles.clear()
367 self._updatedfiles.clear()
368 self._parentwriters = 0
368 self._parentwriters = 0
369 self._origpl = None
369 self._origpl = None
370
370
371 def copy(self, source, dest):
371 def copy(self, source, dest):
372 """Mark dest as a copy of source. Unmark dest if source is None."""
372 """Mark dest as a copy of source. Unmark dest if source is None."""
373 if source == dest:
373 if source == dest:
374 return
374 return
375 self._dirty = True
375 self._dirty = True
376 if source is not None:
376 if source is not None:
377 self._map.copymap[dest] = source
377 self._map.copymap[dest] = source
378 self._updatedfiles.add(source)
378 self._updatedfiles.add(source)
379 self._updatedfiles.add(dest)
379 self._updatedfiles.add(dest)
380 elif self._map.copymap.pop(dest, None):
380 elif self._map.copymap.pop(dest, None):
381 self._updatedfiles.add(dest)
381 self._updatedfiles.add(dest)
382
382
383 def copied(self, file):
383 def copied(self, file):
384 return self._map.copymap.get(file, None)
384 return self._map.copymap.get(file, None)
385
385
386 def copies(self):
386 def copies(self):
387 return self._map.copymap
387 return self._map.copymap
388
388
389 def _droppath(self, f):
390 self._updatedfiles.add(f)
391
392 def _addpath(self, f, state, mode, size, mtime):
389 def _addpath(self, f, state, mode, size, mtime):
393 oldstate = self[f]
390 oldstate = self[f]
394 if state == 'a' or oldstate == 'r':
391 if state == 'a' or oldstate == 'r':
395 scmutil.checkfilename(f)
392 scmutil.checkfilename(f)
396 if f in self._map.dirs:
393 if f in self._map.dirs:
397 raise error.Abort(_('directory %r already in dirstate') % f)
394 raise error.Abort(_('directory %r already in dirstate') % f)
398 # shadows
395 # shadows
399 for d in util.finddirs(f):
396 for d in util.finddirs(f):
400 if d in self._map.dirs:
397 if d in self._map.dirs:
401 break
398 break
402 entry = self._map.get(d)
399 entry = self._map.get(d)
403 if entry is not None and entry[0] != 'r':
400 if entry is not None and entry[0] != 'r':
404 raise error.Abort(
401 raise error.Abort(
405 _('file %r in dirstate clashes with %r') % (d, f))
402 _('file %r in dirstate clashes with %r') % (d, f))
406 self._dirty = True
403 self._dirty = True
407 self._updatedfiles.add(f)
404 self._updatedfiles.add(f)
408 self._map.addfile(f, oldstate, state, mode, size, mtime)
405 self._map.addfile(f, oldstate, state, mode, size, mtime)
409
406
410 def normal(self, f):
407 def normal(self, f):
411 '''Mark a file normal and clean.'''
408 '''Mark a file normal and clean.'''
412 s = os.lstat(self._join(f))
409 s = os.lstat(self._join(f))
413 mtime = s.st_mtime
410 mtime = s.st_mtime
414 self._addpath(f, 'n', s.st_mode,
411 self._addpath(f, 'n', s.st_mode,
415 s.st_size & _rangemask, mtime & _rangemask)
412 s.st_size & _rangemask, mtime & _rangemask)
416 self._map.copymap.pop(f, None)
413 self._map.copymap.pop(f, None)
417 if f in self._map.nonnormalset:
414 if f in self._map.nonnormalset:
418 self._map.nonnormalset.remove(f)
415 self._map.nonnormalset.remove(f)
419 if mtime > self._lastnormaltime:
416 if mtime > self._lastnormaltime:
420 # Remember the most recent modification timeslot for status(),
417 # Remember the most recent modification timeslot for status(),
421 # to make sure we won't miss future size-preserving file content
418 # to make sure we won't miss future size-preserving file content
422 # modifications that happen within the same timeslot.
419 # modifications that happen within the same timeslot.
423 self._lastnormaltime = mtime
420 self._lastnormaltime = mtime
424
421
425 def normallookup(self, f):
422 def normallookup(self, f):
426 '''Mark a file normal, but possibly dirty.'''
423 '''Mark a file normal, but possibly dirty.'''
427 if self._pl[1] != nullid:
424 if self._pl[1] != nullid:
428 # if there is a merge going on and the file was either
425 # if there is a merge going on and the file was either
429 # in state 'm' (-1) or coming from other parent (-2) before
426 # in state 'm' (-1) or coming from other parent (-2) before
430 # being removed, restore that state.
427 # being removed, restore that state.
431 entry = self._map.get(f)
428 entry = self._map.get(f)
432 if entry is not None:
429 if entry is not None:
433 if entry[0] == 'r' and entry[2] in (-1, -2):
430 if entry[0] == 'r' and entry[2] in (-1, -2):
434 source = self._map.copymap.get(f)
431 source = self._map.copymap.get(f)
435 if entry[2] == -1:
432 if entry[2] == -1:
436 self.merge(f)
433 self.merge(f)
437 elif entry[2] == -2:
434 elif entry[2] == -2:
438 self.otherparent(f)
435 self.otherparent(f)
439 if source:
436 if source:
440 self.copy(source, f)
437 self.copy(source, f)
441 return
438 return
442 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
439 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
443 return
440 return
444 self._addpath(f, 'n', 0, -1, -1)
441 self._addpath(f, 'n', 0, -1, -1)
445 self._map.copymap.pop(f, None)
442 self._map.copymap.pop(f, None)
446
443
447 def otherparent(self, f):
444 def otherparent(self, f):
448 '''Mark as coming from the other parent, always dirty.'''
445 '''Mark as coming from the other parent, always dirty.'''
449 if self._pl[1] == nullid:
446 if self._pl[1] == nullid:
450 raise error.Abort(_("setting %r to other parent "
447 raise error.Abort(_("setting %r to other parent "
451 "only allowed in merges") % f)
448 "only allowed in merges") % f)
452 if f in self and self[f] == 'n':
449 if f in self and self[f] == 'n':
453 # merge-like
450 # merge-like
454 self._addpath(f, 'm', 0, -2, -1)
451 self._addpath(f, 'm', 0, -2, -1)
455 else:
452 else:
456 # add-like
453 # add-like
457 self._addpath(f, 'n', 0, -2, -1)
454 self._addpath(f, 'n', 0, -2, -1)
458 self._map.copymap.pop(f, None)
455 self._map.copymap.pop(f, None)
459
456
460 def add(self, f):
457 def add(self, f):
461 '''Mark a file added.'''
458 '''Mark a file added.'''
462 self._addpath(f, 'a', 0, -1, -1)
459 self._addpath(f, 'a', 0, -1, -1)
463 self._map.copymap.pop(f, None)
460 self._map.copymap.pop(f, None)
464
461
465 def remove(self, f):
462 def remove(self, f):
466 '''Mark a file removed.'''
463 '''Mark a file removed.'''
467 self._dirty = True
464 self._dirty = True
468 self._droppath(f)
469 oldstate = self[f]
465 oldstate = self[f]
470 size = 0
466 size = 0
471 if self._pl[1] != nullid:
467 if self._pl[1] != nullid:
472 entry = self._map.get(f)
468 entry = self._map.get(f)
473 if entry is not None:
469 if entry is not None:
474 # backup the previous state
470 # backup the previous state
475 if entry[0] == 'm': # merge
471 if entry[0] == 'm': # merge
476 size = -1
472 size = -1
477 elif entry[0] == 'n' and entry[2] == -2: # other parent
473 elif entry[0] == 'n' and entry[2] == -2: # other parent
478 size = -2
474 size = -2
479 self._map.otherparentset.add(f)
475 self._map.otherparentset.add(f)
476 self._updatedfiles.add(f)
480 self._map.removefile(f, oldstate, size)
477 self._map.removefile(f, oldstate, size)
481 if size == 0:
478 if size == 0:
482 self._map.copymap.pop(f, None)
479 self._map.copymap.pop(f, None)
483
480
484 def merge(self, f):
481 def merge(self, f):
485 '''Mark a file merged.'''
482 '''Mark a file merged.'''
486 if self._pl[1] == nullid:
483 if self._pl[1] == nullid:
487 return self.normallookup(f)
484 return self.normallookup(f)
488 return self.otherparent(f)
485 return self.otherparent(f)
489
486
490 def drop(self, f):
487 def drop(self, f):
491 '''Drop a file from the dirstate'''
488 '''Drop a file from the dirstate'''
492 oldstate = self[f]
489 oldstate = self[f]
493 if self._map.dropfile(f, oldstate):
490 if self._map.dropfile(f, oldstate):
494 self._dirty = True
491 self._dirty = True
495 self._droppath(f)
492 self._updatedfiles.add(f)
496 self._map.copymap.pop(f, None)
493 self._map.copymap.pop(f, None)
497
494
498 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
495 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
499 if exists is None:
496 if exists is None:
500 exists = os.path.lexists(os.path.join(self._root, path))
497 exists = os.path.lexists(os.path.join(self._root, path))
501 if not exists:
498 if not exists:
502 # Maybe a path component exists
499 # Maybe a path component exists
503 if not ignoremissing and '/' in path:
500 if not ignoremissing and '/' in path:
504 d, f = path.rsplit('/', 1)
501 d, f = path.rsplit('/', 1)
505 d = self._normalize(d, False, ignoremissing, None)
502 d = self._normalize(d, False, ignoremissing, None)
506 folded = d + "/" + f
503 folded = d + "/" + f
507 else:
504 else:
508 # No path components, preserve original case
505 # No path components, preserve original case
509 folded = path
506 folded = path
510 else:
507 else:
511 # recursively normalize leading directory components
508 # recursively normalize leading directory components
512 # against dirstate
509 # against dirstate
513 if '/' in normed:
510 if '/' in normed:
514 d, f = normed.rsplit('/', 1)
511 d, f = normed.rsplit('/', 1)
515 d = self._normalize(d, False, ignoremissing, True)
512 d = self._normalize(d, False, ignoremissing, True)
516 r = self._root + "/" + d
513 r = self._root + "/" + d
517 folded = d + "/" + util.fspath(f, r)
514 folded = d + "/" + util.fspath(f, r)
518 else:
515 else:
519 folded = util.fspath(normed, self._root)
516 folded = util.fspath(normed, self._root)
520 storemap[normed] = folded
517 storemap[normed] = folded
521
518
522 return folded
519 return folded
523
520
524 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
521 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
525 normed = util.normcase(path)
522 normed = util.normcase(path)
526 folded = self._map.filefoldmap.get(normed, None)
523 folded = self._map.filefoldmap.get(normed, None)
527 if folded is None:
524 if folded is None:
528 if isknown:
525 if isknown:
529 folded = path
526 folded = path
530 else:
527 else:
531 folded = self._discoverpath(path, normed, ignoremissing, exists,
528 folded = self._discoverpath(path, normed, ignoremissing, exists,
532 self._map.filefoldmap)
529 self._map.filefoldmap)
533 return folded
530 return folded
534
531
535 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
532 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
536 normed = util.normcase(path)
533 normed = util.normcase(path)
537 folded = self._map.filefoldmap.get(normed, None)
534 folded = self._map.filefoldmap.get(normed, None)
538 if folded is None:
535 if folded is None:
539 folded = self._map.dirfoldmap.get(normed, None)
536 folded = self._map.dirfoldmap.get(normed, None)
540 if folded is None:
537 if folded is None:
541 if isknown:
538 if isknown:
542 folded = path
539 folded = path
543 else:
540 else:
544 # store discovered result in dirfoldmap so that future
541 # store discovered result in dirfoldmap so that future
545 # normalizefile calls don't start matching directories
542 # normalizefile calls don't start matching directories
546 folded = self._discoverpath(path, normed, ignoremissing, exists,
543 folded = self._discoverpath(path, normed, ignoremissing, exists,
547 self._map.dirfoldmap)
544 self._map.dirfoldmap)
548 return folded
545 return folded
549
546
550 def normalize(self, path, isknown=False, ignoremissing=False):
547 def normalize(self, path, isknown=False, ignoremissing=False):
551 '''
548 '''
552 normalize the case of a pathname when on a casefolding filesystem
549 normalize the case of a pathname when on a casefolding filesystem
553
550
554 isknown specifies whether the filename came from walking the
551 isknown specifies whether the filename came from walking the
555 disk, to avoid extra filesystem access.
552 disk, to avoid extra filesystem access.
556
553
557 If ignoremissing is True, missing path are returned
554 If ignoremissing is True, missing path are returned
558 unchanged. Otherwise, we try harder to normalize possibly
555 unchanged. Otherwise, we try harder to normalize possibly
559 existing path components.
556 existing path components.
560
557
561 The normalized case is determined based on the following precedence:
558 The normalized case is determined based on the following precedence:
562
559
563 - version of name already stored in the dirstate
560 - version of name already stored in the dirstate
564 - version of name stored on disk
561 - version of name stored on disk
565 - version provided via command arguments
562 - version provided via command arguments
566 '''
563 '''
567
564
568 if self._checkcase:
565 if self._checkcase:
569 return self._normalize(path, isknown, ignoremissing)
566 return self._normalize(path, isknown, ignoremissing)
570 return path
567 return path
571
568
572 def clear(self):
569 def clear(self):
573 self._map.clear()
570 self._map.clear()
574 self._lastnormaltime = 0
571 self._lastnormaltime = 0
575 self._updatedfiles.clear()
572 self._updatedfiles.clear()
576 self._dirty = True
573 self._dirty = True
577
574
578 def rebuild(self, parent, allfiles, changedfiles=None):
575 def rebuild(self, parent, allfiles, changedfiles=None):
579 if changedfiles is None:
576 if changedfiles is None:
580 # Rebuild entire dirstate
577 # Rebuild entire dirstate
581 changedfiles = allfiles
578 changedfiles = allfiles
582 lastnormaltime = self._lastnormaltime
579 lastnormaltime = self._lastnormaltime
583 self.clear()
580 self.clear()
584 self._lastnormaltime = lastnormaltime
581 self._lastnormaltime = lastnormaltime
585
582
586 if self._origpl is None:
583 if self._origpl is None:
587 self._origpl = self._pl
584 self._origpl = self._pl
588 self._map.setparents(parent, nullid)
585 self._map.setparents(parent, nullid)
589 for f in changedfiles:
586 for f in changedfiles:
590 if f in allfiles:
587 if f in allfiles:
591 self.normallookup(f)
588 self.normallookup(f)
592 else:
589 else:
593 self.drop(f)
590 self.drop(f)
594
591
595 self._dirty = True
592 self._dirty = True
596
593
597 def identity(self):
594 def identity(self):
598 '''Return identity of dirstate itself to detect changing in storage
595 '''Return identity of dirstate itself to detect changing in storage
599
596
600 If identity of previous dirstate is equal to this, writing
597 If identity of previous dirstate is equal to this, writing
601 changes based on the former dirstate out can keep consistency.
598 changes based on the former dirstate out can keep consistency.
602 '''
599 '''
603 return self._map.identity
600 return self._map.identity
604
601
605 def write(self, tr):
602 def write(self, tr):
606 if not self._dirty:
603 if not self._dirty:
607 return
604 return
608
605
609 filename = self._filename
606 filename = self._filename
610 if tr:
607 if tr:
611 # 'dirstate.write()' is not only for writing in-memory
608 # 'dirstate.write()' is not only for writing in-memory
612 # changes out, but also for dropping ambiguous timestamp.
609 # changes out, but also for dropping ambiguous timestamp.
613 # delayed writing re-raise "ambiguous timestamp issue".
610 # delayed writing re-raise "ambiguous timestamp issue".
614 # See also the wiki page below for detail:
611 # See also the wiki page below for detail:
615 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
612 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
616
613
617 # emulate dropping timestamp in 'parsers.pack_dirstate'
614 # emulate dropping timestamp in 'parsers.pack_dirstate'
618 now = _getfsnow(self._opener)
615 now = _getfsnow(self._opener)
619 self._map.clearambiguoustimes(self._updatedfiles, now)
616 self._map.clearambiguoustimes(self._updatedfiles, now)
620
617
621 # emulate that all 'dirstate.normal' results are written out
618 # emulate that all 'dirstate.normal' results are written out
622 self._lastnormaltime = 0
619 self._lastnormaltime = 0
623 self._updatedfiles.clear()
620 self._updatedfiles.clear()
624
621
625 # delay writing in-memory changes out
622 # delay writing in-memory changes out
626 tr.addfilegenerator('dirstate', (self._filename,),
623 tr.addfilegenerator('dirstate', (self._filename,),
627 self._writedirstate, location='plain')
624 self._writedirstate, location='plain')
628 return
625 return
629
626
630 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
627 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
631 self._writedirstate(st)
628 self._writedirstate(st)
632
629
633 def addparentchangecallback(self, category, callback):
630 def addparentchangecallback(self, category, callback):
634 """add a callback to be called when the wd parents are changed
631 """add a callback to be called when the wd parents are changed
635
632
636 Callback will be called with the following arguments:
633 Callback will be called with the following arguments:
637 dirstate, (oldp1, oldp2), (newp1, newp2)
634 dirstate, (oldp1, oldp2), (newp1, newp2)
638
635
639 Category is a unique identifier to allow overwriting an old callback
636 Category is a unique identifier to allow overwriting an old callback
640 with a newer callback.
637 with a newer callback.
641 """
638 """
642 self._plchangecallbacks[category] = callback
639 self._plchangecallbacks[category] = callback
643
640
644 def _writedirstate(self, st):
641 def _writedirstate(self, st):
645 # notify callbacks about parents change
642 # notify callbacks about parents change
646 if self._origpl is not None and self._origpl != self._pl:
643 if self._origpl is not None and self._origpl != self._pl:
647 for c, callback in sorted(self._plchangecallbacks.iteritems()):
644 for c, callback in sorted(self._plchangecallbacks.iteritems()):
648 callback(self, self._origpl, self._pl)
645 callback(self, self._origpl, self._pl)
649 self._origpl = None
646 self._origpl = None
650 # use the modification time of the newly created temporary file as the
647 # use the modification time of the newly created temporary file as the
651 # filesystem's notion of 'now'
648 # filesystem's notion of 'now'
652 now = util.fstat(st).st_mtime & _rangemask
649 now = util.fstat(st).st_mtime & _rangemask
653
650
654 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
651 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
655 # timestamp of each entries in dirstate, because of 'now > mtime'
652 # timestamp of each entries in dirstate, because of 'now > mtime'
656 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
653 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
657 if delaywrite > 0:
654 if delaywrite > 0:
658 # do we have any files to delay for?
655 # do we have any files to delay for?
659 for f, e in self._map.iteritems():
656 for f, e in self._map.iteritems():
660 if e[0] == 'n' and e[3] == now:
657 if e[0] == 'n' and e[3] == now:
661 import time # to avoid useless import
658 import time # to avoid useless import
662 # rather than sleep n seconds, sleep until the next
659 # rather than sleep n seconds, sleep until the next
663 # multiple of n seconds
660 # multiple of n seconds
664 clock = time.time()
661 clock = time.time()
665 start = int(clock) - (int(clock) % delaywrite)
662 start = int(clock) - (int(clock) % delaywrite)
666 end = start + delaywrite
663 end = start + delaywrite
667 time.sleep(end - clock)
664 time.sleep(end - clock)
668 now = end # trust our estimate that the end is near now
665 now = end # trust our estimate that the end is near now
669 break
666 break
670
667
671 self._map.write(st, now)
668 self._map.write(st, now)
672 self._lastnormaltime = 0
669 self._lastnormaltime = 0
673 self._dirty = False
670 self._dirty = False
674
671
675 def _dirignore(self, f):
672 def _dirignore(self, f):
676 if f == '.':
673 if f == '.':
677 return False
674 return False
678 if self._ignore(f):
675 if self._ignore(f):
679 return True
676 return True
680 for p in util.finddirs(f):
677 for p in util.finddirs(f):
681 if self._ignore(p):
678 if self._ignore(p):
682 return True
679 return True
683 return False
680 return False
684
681
685 def _ignorefiles(self):
682 def _ignorefiles(self):
686 files = []
683 files = []
687 if os.path.exists(self._join('.hgignore')):
684 if os.path.exists(self._join('.hgignore')):
688 files.append(self._join('.hgignore'))
685 files.append(self._join('.hgignore'))
689 for name, path in self._ui.configitems("ui"):
686 for name, path in self._ui.configitems("ui"):
690 if name == 'ignore' or name.startswith('ignore.'):
687 if name == 'ignore' or name.startswith('ignore.'):
691 # we need to use os.path.join here rather than self._join
688 # we need to use os.path.join here rather than self._join
692 # because path is arbitrary and user-specified
689 # because path is arbitrary and user-specified
693 files.append(os.path.join(self._rootdir, util.expandpath(path)))
690 files.append(os.path.join(self._rootdir, util.expandpath(path)))
694 return files
691 return files
695
692
696 def _ignorefileandline(self, f):
693 def _ignorefileandline(self, f):
697 files = collections.deque(self._ignorefiles())
694 files = collections.deque(self._ignorefiles())
698 visited = set()
695 visited = set()
699 while files:
696 while files:
700 i = files.popleft()
697 i = files.popleft()
701 patterns = matchmod.readpatternfile(i, self._ui.warn,
698 patterns = matchmod.readpatternfile(i, self._ui.warn,
702 sourceinfo=True)
699 sourceinfo=True)
703 for pattern, lineno, line in patterns:
700 for pattern, lineno, line in patterns:
704 kind, p = matchmod._patsplit(pattern, 'glob')
701 kind, p = matchmod._patsplit(pattern, 'glob')
705 if kind == "subinclude":
702 if kind == "subinclude":
706 if p not in visited:
703 if p not in visited:
707 files.append(p)
704 files.append(p)
708 continue
705 continue
709 m = matchmod.match(self._root, '', [], [pattern],
706 m = matchmod.match(self._root, '', [], [pattern],
710 warn=self._ui.warn)
707 warn=self._ui.warn)
711 if m(f):
708 if m(f):
712 return (i, lineno, line)
709 return (i, lineno, line)
713 visited.add(i)
710 visited.add(i)
714 return (None, -1, "")
711 return (None, -1, "")
715
712
716 def _walkexplicit(self, match, subrepos):
713 def _walkexplicit(self, match, subrepos):
717 '''Get stat data about the files explicitly specified by match.
714 '''Get stat data about the files explicitly specified by match.
718
715
719 Return a triple (results, dirsfound, dirsnotfound).
716 Return a triple (results, dirsfound, dirsnotfound).
720 - results is a mapping from filename to stat result. It also contains
717 - results is a mapping from filename to stat result. It also contains
721 listings mapping subrepos and .hg to None.
718 listings mapping subrepos and .hg to None.
722 - dirsfound is a list of files found to be directories.
719 - dirsfound is a list of files found to be directories.
723 - dirsnotfound is a list of files that the dirstate thinks are
720 - dirsnotfound is a list of files that the dirstate thinks are
724 directories and that were not found.'''
721 directories and that were not found.'''
725
722
726 def badtype(mode):
723 def badtype(mode):
727 kind = _('unknown')
724 kind = _('unknown')
728 if stat.S_ISCHR(mode):
725 if stat.S_ISCHR(mode):
729 kind = _('character device')
726 kind = _('character device')
730 elif stat.S_ISBLK(mode):
727 elif stat.S_ISBLK(mode):
731 kind = _('block device')
728 kind = _('block device')
732 elif stat.S_ISFIFO(mode):
729 elif stat.S_ISFIFO(mode):
733 kind = _('fifo')
730 kind = _('fifo')
734 elif stat.S_ISSOCK(mode):
731 elif stat.S_ISSOCK(mode):
735 kind = _('socket')
732 kind = _('socket')
736 elif stat.S_ISDIR(mode):
733 elif stat.S_ISDIR(mode):
737 kind = _('directory')
734 kind = _('directory')
738 return _('unsupported file type (type is %s)') % kind
735 return _('unsupported file type (type is %s)') % kind
739
736
740 matchedir = match.explicitdir
737 matchedir = match.explicitdir
741 badfn = match.bad
738 badfn = match.bad
742 dmap = self._map
739 dmap = self._map
743 lstat = os.lstat
740 lstat = os.lstat
744 getkind = stat.S_IFMT
741 getkind = stat.S_IFMT
745 dirkind = stat.S_IFDIR
742 dirkind = stat.S_IFDIR
746 regkind = stat.S_IFREG
743 regkind = stat.S_IFREG
747 lnkkind = stat.S_IFLNK
744 lnkkind = stat.S_IFLNK
748 join = self._join
745 join = self._join
749 dirsfound = []
746 dirsfound = []
750 foundadd = dirsfound.append
747 foundadd = dirsfound.append
751 dirsnotfound = []
748 dirsnotfound = []
752 notfoundadd = dirsnotfound.append
749 notfoundadd = dirsnotfound.append
753
750
754 if not match.isexact() and self._checkcase:
751 if not match.isexact() and self._checkcase:
755 normalize = self._normalize
752 normalize = self._normalize
756 else:
753 else:
757 normalize = None
754 normalize = None
758
755
759 files = sorted(match.files())
756 files = sorted(match.files())
760 subrepos.sort()
757 subrepos.sort()
761 i, j = 0, 0
758 i, j = 0, 0
762 while i < len(files) and j < len(subrepos):
759 while i < len(files) and j < len(subrepos):
763 subpath = subrepos[j] + "/"
760 subpath = subrepos[j] + "/"
764 if files[i] < subpath:
761 if files[i] < subpath:
765 i += 1
762 i += 1
766 continue
763 continue
767 while i < len(files) and files[i].startswith(subpath):
764 while i < len(files) and files[i].startswith(subpath):
768 del files[i]
765 del files[i]
769 j += 1
766 j += 1
770
767
771 if not files or '.' in files:
768 if not files or '.' in files:
772 files = ['.']
769 files = ['.']
773 results = dict.fromkeys(subrepos)
770 results = dict.fromkeys(subrepos)
774 results['.hg'] = None
771 results['.hg'] = None
775
772
776 alldirs = None
773 alldirs = None
777 for ff in files:
774 for ff in files:
778 # constructing the foldmap is expensive, so don't do it for the
775 # constructing the foldmap is expensive, so don't do it for the
779 # common case where files is ['.']
776 # common case where files is ['.']
780 if normalize and ff != '.':
777 if normalize and ff != '.':
781 nf = normalize(ff, False, True)
778 nf = normalize(ff, False, True)
782 else:
779 else:
783 nf = ff
780 nf = ff
784 if nf in results:
781 if nf in results:
785 continue
782 continue
786
783
787 try:
784 try:
788 st = lstat(join(nf))
785 st = lstat(join(nf))
789 kind = getkind(st.st_mode)
786 kind = getkind(st.st_mode)
790 if kind == dirkind:
787 if kind == dirkind:
791 if nf in dmap:
788 if nf in dmap:
792 # file replaced by dir on disk but still in dirstate
789 # file replaced by dir on disk but still in dirstate
793 results[nf] = None
790 results[nf] = None
794 if matchedir:
791 if matchedir:
795 matchedir(nf)
792 matchedir(nf)
796 foundadd((nf, ff))
793 foundadd((nf, ff))
797 elif kind == regkind or kind == lnkkind:
794 elif kind == regkind or kind == lnkkind:
798 results[nf] = st
795 results[nf] = st
799 else:
796 else:
800 badfn(ff, badtype(kind))
797 badfn(ff, badtype(kind))
801 if nf in dmap:
798 if nf in dmap:
802 results[nf] = None
799 results[nf] = None
803 except OSError as inst: # nf not found on disk - it is dirstate only
800 except OSError as inst: # nf not found on disk - it is dirstate only
804 if nf in dmap: # does it exactly match a missing file?
801 if nf in dmap: # does it exactly match a missing file?
805 results[nf] = None
802 results[nf] = None
806 else: # does it match a missing directory?
803 else: # does it match a missing directory?
807 if alldirs is None:
804 if alldirs is None:
808 alldirs = util.dirs(dmap._map)
805 alldirs = util.dirs(dmap._map)
809 if nf in alldirs:
806 if nf in alldirs:
810 if matchedir:
807 if matchedir:
811 matchedir(nf)
808 matchedir(nf)
812 notfoundadd(nf)
809 notfoundadd(nf)
813 else:
810 else:
814 badfn(ff, encoding.strtolocal(inst.strerror))
811 badfn(ff, encoding.strtolocal(inst.strerror))
815
812
816 # Case insensitive filesystems cannot rely on lstat() failing to detect
813 # Case insensitive filesystems cannot rely on lstat() failing to detect
817 # a case-only rename. Prune the stat object for any file that does not
814 # a case-only rename. Prune the stat object for any file that does not
818 # match the case in the filesystem, if there are multiple files that
815 # match the case in the filesystem, if there are multiple files that
819 # normalize to the same path.
816 # normalize to the same path.
820 if match.isexact() and self._checkcase:
817 if match.isexact() and self._checkcase:
821 normed = {}
818 normed = {}
822
819
823 for f, st in results.iteritems():
820 for f, st in results.iteritems():
824 if st is None:
821 if st is None:
825 continue
822 continue
826
823
827 nc = util.normcase(f)
824 nc = util.normcase(f)
828 paths = normed.get(nc)
825 paths = normed.get(nc)
829
826
830 if paths is None:
827 if paths is None:
831 paths = set()
828 paths = set()
832 normed[nc] = paths
829 normed[nc] = paths
833
830
834 paths.add(f)
831 paths.add(f)
835
832
836 for norm, paths in normed.iteritems():
833 for norm, paths in normed.iteritems():
837 if len(paths) > 1:
834 if len(paths) > 1:
838 for path in paths:
835 for path in paths:
839 folded = self._discoverpath(path, norm, True, None,
836 folded = self._discoverpath(path, norm, True, None,
840 self._map.dirfoldmap)
837 self._map.dirfoldmap)
841 if path != folded:
838 if path != folded:
842 results[path] = None
839 results[path] = None
843
840
844 return results, dirsfound, dirsnotfound
841 return results, dirsfound, dirsnotfound
845
842
846 def walk(self, match, subrepos, unknown, ignored, full=True):
843 def walk(self, match, subrepos, unknown, ignored, full=True):
847 '''
844 '''
848 Walk recursively through the directory tree, finding all files
845 Walk recursively through the directory tree, finding all files
849 matched by match.
846 matched by match.
850
847
851 If full is False, maybe skip some known-clean files.
848 If full is False, maybe skip some known-clean files.
852
849
853 Return a dict mapping filename to stat-like object (either
850 Return a dict mapping filename to stat-like object (either
854 mercurial.osutil.stat instance or return value of os.stat()).
851 mercurial.osutil.stat instance or return value of os.stat()).
855
852
856 '''
853 '''
857 # full is a flag that extensions that hook into walk can use -- this
854 # full is a flag that extensions that hook into walk can use -- this
858 # implementation doesn't use it at all. This satisfies the contract
855 # implementation doesn't use it at all. This satisfies the contract
859 # because we only guarantee a "maybe".
856 # because we only guarantee a "maybe".
860
857
861 if ignored:
858 if ignored:
862 ignore = util.never
859 ignore = util.never
863 dirignore = util.never
860 dirignore = util.never
864 elif unknown:
861 elif unknown:
865 ignore = self._ignore
862 ignore = self._ignore
866 dirignore = self._dirignore
863 dirignore = self._dirignore
867 else:
864 else:
868 # if not unknown and not ignored, drop dir recursion and step 2
865 # if not unknown and not ignored, drop dir recursion and step 2
869 ignore = util.always
866 ignore = util.always
870 dirignore = util.always
867 dirignore = util.always
871
868
872 matchfn = match.matchfn
869 matchfn = match.matchfn
873 matchalways = match.always()
870 matchalways = match.always()
874 matchtdir = match.traversedir
871 matchtdir = match.traversedir
875 dmap = self._map
872 dmap = self._map
876 listdir = util.listdir
873 listdir = util.listdir
877 lstat = os.lstat
874 lstat = os.lstat
878 dirkind = stat.S_IFDIR
875 dirkind = stat.S_IFDIR
879 regkind = stat.S_IFREG
876 regkind = stat.S_IFREG
880 lnkkind = stat.S_IFLNK
877 lnkkind = stat.S_IFLNK
881 join = self._join
878 join = self._join
882
879
883 exact = skipstep3 = False
880 exact = skipstep3 = False
884 if match.isexact(): # match.exact
881 if match.isexact(): # match.exact
885 exact = True
882 exact = True
886 dirignore = util.always # skip step 2
883 dirignore = util.always # skip step 2
887 elif match.prefix(): # match.match, no patterns
884 elif match.prefix(): # match.match, no patterns
888 skipstep3 = True
885 skipstep3 = True
889
886
890 if not exact and self._checkcase:
887 if not exact and self._checkcase:
891 normalize = self._normalize
888 normalize = self._normalize
892 normalizefile = self._normalizefile
889 normalizefile = self._normalizefile
893 skipstep3 = False
890 skipstep3 = False
894 else:
891 else:
895 normalize = self._normalize
892 normalize = self._normalize
896 normalizefile = None
893 normalizefile = None
897
894
898 # step 1: find all explicit files
895 # step 1: find all explicit files
899 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
896 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
900
897
901 skipstep3 = skipstep3 and not (work or dirsnotfound)
898 skipstep3 = skipstep3 and not (work or dirsnotfound)
902 work = [d for d in work if not dirignore(d[0])]
899 work = [d for d in work if not dirignore(d[0])]
903
900
904 # step 2: visit subdirectories
901 # step 2: visit subdirectories
905 def traverse(work, alreadynormed):
902 def traverse(work, alreadynormed):
906 wadd = work.append
903 wadd = work.append
907 while work:
904 while work:
908 nd = work.pop()
905 nd = work.pop()
909 if not match.visitdir(nd):
906 if not match.visitdir(nd):
910 continue
907 continue
911 skip = None
908 skip = None
912 if nd == '.':
909 if nd == '.':
913 nd = ''
910 nd = ''
914 else:
911 else:
915 skip = '.hg'
912 skip = '.hg'
916 try:
913 try:
917 entries = listdir(join(nd), stat=True, skip=skip)
914 entries = listdir(join(nd), stat=True, skip=skip)
918 except OSError as inst:
915 except OSError as inst:
919 if inst.errno in (errno.EACCES, errno.ENOENT):
916 if inst.errno in (errno.EACCES, errno.ENOENT):
920 match.bad(self.pathto(nd),
917 match.bad(self.pathto(nd),
921 encoding.strtolocal(inst.strerror))
918 encoding.strtolocal(inst.strerror))
922 continue
919 continue
923 raise
920 raise
924 for f, kind, st in entries:
921 for f, kind, st in entries:
925 if normalizefile:
922 if normalizefile:
926 # even though f might be a directory, we're only
923 # even though f might be a directory, we're only
927 # interested in comparing it to files currently in the
924 # interested in comparing it to files currently in the
928 # dmap -- therefore normalizefile is enough
925 # dmap -- therefore normalizefile is enough
929 nf = normalizefile(nd and (nd + "/" + f) or f, True,
926 nf = normalizefile(nd and (nd + "/" + f) or f, True,
930 True)
927 True)
931 else:
928 else:
932 nf = nd and (nd + "/" + f) or f
929 nf = nd and (nd + "/" + f) or f
933 if nf not in results:
930 if nf not in results:
934 if kind == dirkind:
931 if kind == dirkind:
935 if not ignore(nf):
932 if not ignore(nf):
936 if matchtdir:
933 if matchtdir:
937 matchtdir(nf)
934 matchtdir(nf)
938 wadd(nf)
935 wadd(nf)
939 if nf in dmap and (matchalways or matchfn(nf)):
936 if nf in dmap and (matchalways or matchfn(nf)):
940 results[nf] = None
937 results[nf] = None
941 elif kind == regkind or kind == lnkkind:
938 elif kind == regkind or kind == lnkkind:
942 if nf in dmap:
939 if nf in dmap:
943 if matchalways or matchfn(nf):
940 if matchalways or matchfn(nf):
944 results[nf] = st
941 results[nf] = st
945 elif ((matchalways or matchfn(nf))
942 elif ((matchalways or matchfn(nf))
946 and not ignore(nf)):
943 and not ignore(nf)):
947 # unknown file -- normalize if necessary
944 # unknown file -- normalize if necessary
948 if not alreadynormed:
945 if not alreadynormed:
949 nf = normalize(nf, False, True)
946 nf = normalize(nf, False, True)
950 results[nf] = st
947 results[nf] = st
951 elif nf in dmap and (matchalways or matchfn(nf)):
948 elif nf in dmap and (matchalways or matchfn(nf)):
952 results[nf] = None
949 results[nf] = None
953
950
954 for nd, d in work:
951 for nd, d in work:
955 # alreadynormed means that processwork doesn't have to do any
952 # alreadynormed means that processwork doesn't have to do any
956 # expensive directory normalization
953 # expensive directory normalization
957 alreadynormed = not normalize or nd == d
954 alreadynormed = not normalize or nd == d
958 traverse([d], alreadynormed)
955 traverse([d], alreadynormed)
959
956
960 for s in subrepos:
957 for s in subrepos:
961 del results[s]
958 del results[s]
962 del results['.hg']
959 del results['.hg']
963
960
964 # step 3: visit remaining files from dmap
961 # step 3: visit remaining files from dmap
965 if not skipstep3 and not exact:
962 if not skipstep3 and not exact:
966 # If a dmap file is not in results yet, it was either
963 # If a dmap file is not in results yet, it was either
967 # a) not matching matchfn b) ignored, c) missing, or d) under a
964 # a) not matching matchfn b) ignored, c) missing, or d) under a
968 # symlink directory.
965 # symlink directory.
969 if not results and matchalways:
966 if not results and matchalways:
970 visit = [f for f in dmap]
967 visit = [f for f in dmap]
971 else:
968 else:
972 visit = [f for f in dmap if f not in results and matchfn(f)]
969 visit = [f for f in dmap if f not in results and matchfn(f)]
973 visit.sort()
970 visit.sort()
974
971
975 if unknown:
972 if unknown:
976 # unknown == True means we walked all dirs under the roots
973 # unknown == True means we walked all dirs under the roots
977 # that wasn't ignored, and everything that matched was stat'ed
974 # that wasn't ignored, and everything that matched was stat'ed
978 # and is already in results.
975 # and is already in results.
979 # The rest must thus be ignored or under a symlink.
976 # The rest must thus be ignored or under a symlink.
980 audit_path = pathutil.pathauditor(self._root, cached=True)
977 audit_path = pathutil.pathauditor(self._root, cached=True)
981
978
982 for nf in iter(visit):
979 for nf in iter(visit):
983 # If a stat for the same file was already added with a
980 # If a stat for the same file was already added with a
984 # different case, don't add one for this, since that would
981 # different case, don't add one for this, since that would
985 # make it appear as if the file exists under both names
982 # make it appear as if the file exists under both names
986 # on disk.
983 # on disk.
987 if (normalizefile and
984 if (normalizefile and
988 normalizefile(nf, True, True) in results):
985 normalizefile(nf, True, True) in results):
989 results[nf] = None
986 results[nf] = None
990 # Report ignored items in the dmap as long as they are not
987 # Report ignored items in the dmap as long as they are not
991 # under a symlink directory.
988 # under a symlink directory.
992 elif audit_path.check(nf):
989 elif audit_path.check(nf):
993 try:
990 try:
994 results[nf] = lstat(join(nf))
991 results[nf] = lstat(join(nf))
995 # file was just ignored, no links, and exists
992 # file was just ignored, no links, and exists
996 except OSError:
993 except OSError:
997 # file doesn't exist
994 # file doesn't exist
998 results[nf] = None
995 results[nf] = None
999 else:
996 else:
1000 # It's either missing or under a symlink directory
997 # It's either missing or under a symlink directory
1001 # which we in this case report as missing
998 # which we in this case report as missing
1002 results[nf] = None
999 results[nf] = None
1003 else:
1000 else:
1004 # We may not have walked the full directory tree above,
1001 # We may not have walked the full directory tree above,
1005 # so stat and check everything we missed.
1002 # so stat and check everything we missed.
1006 iv = iter(visit)
1003 iv = iter(visit)
1007 for st in util.statfiles([join(i) for i in visit]):
1004 for st in util.statfiles([join(i) for i in visit]):
1008 results[next(iv)] = st
1005 results[next(iv)] = st
1009 return results
1006 return results
1010
1007
1011 def status(self, match, subrepos, ignored, clean, unknown):
1008 def status(self, match, subrepos, ignored, clean, unknown):
1012 '''Determine the status of the working copy relative to the
1009 '''Determine the status of the working copy relative to the
1013 dirstate and return a pair of (unsure, status), where status is of type
1010 dirstate and return a pair of (unsure, status), where status is of type
1014 scmutil.status and:
1011 scmutil.status and:
1015
1012
1016 unsure:
1013 unsure:
1017 files that might have been modified since the dirstate was
1014 files that might have been modified since the dirstate was
1018 written, but need to be read to be sure (size is the same
1015 written, but need to be read to be sure (size is the same
1019 but mtime differs)
1016 but mtime differs)
1020 status.modified:
1017 status.modified:
1021 files that have definitely been modified since the dirstate
1018 files that have definitely been modified since the dirstate
1022 was written (different size or mode)
1019 was written (different size or mode)
1023 status.clean:
1020 status.clean:
1024 files that have definitely not been modified since the
1021 files that have definitely not been modified since the
1025 dirstate was written
1022 dirstate was written
1026 '''
1023 '''
1027 listignored, listclean, listunknown = ignored, clean, unknown
1024 listignored, listclean, listunknown = ignored, clean, unknown
1028 lookup, modified, added, unknown, ignored = [], [], [], [], []
1025 lookup, modified, added, unknown, ignored = [], [], [], [], []
1029 removed, deleted, clean = [], [], []
1026 removed, deleted, clean = [], [], []
1030
1027
1031 dmap = self._map
1028 dmap = self._map
1032 dmap.preload()
1029 dmap.preload()
1033 dcontains = dmap.__contains__
1030 dcontains = dmap.__contains__
1034 dget = dmap.__getitem__
1031 dget = dmap.__getitem__
1035 ladd = lookup.append # aka "unsure"
1032 ladd = lookup.append # aka "unsure"
1036 madd = modified.append
1033 madd = modified.append
1037 aadd = added.append
1034 aadd = added.append
1038 uadd = unknown.append
1035 uadd = unknown.append
1039 iadd = ignored.append
1036 iadd = ignored.append
1040 radd = removed.append
1037 radd = removed.append
1041 dadd = deleted.append
1038 dadd = deleted.append
1042 cadd = clean.append
1039 cadd = clean.append
1043 mexact = match.exact
1040 mexact = match.exact
1044 dirignore = self._dirignore
1041 dirignore = self._dirignore
1045 checkexec = self._checkexec
1042 checkexec = self._checkexec
1046 copymap = self._map.copymap
1043 copymap = self._map.copymap
1047 lastnormaltime = self._lastnormaltime
1044 lastnormaltime = self._lastnormaltime
1048
1045
1049 # We need to do full walks when either
1046 # We need to do full walks when either
1050 # - we're listing all clean files, or
1047 # - we're listing all clean files, or
1051 # - match.traversedir does something, because match.traversedir should
1048 # - match.traversedir does something, because match.traversedir should
1052 # be called for every dir in the working dir
1049 # be called for every dir in the working dir
1053 full = listclean or match.traversedir is not None
1050 full = listclean or match.traversedir is not None
1054 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1051 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1055 full=full).iteritems():
1052 full=full).iteritems():
1056 if not dcontains(fn):
1053 if not dcontains(fn):
1057 if (listignored or mexact(fn)) and dirignore(fn):
1054 if (listignored or mexact(fn)) and dirignore(fn):
1058 if listignored:
1055 if listignored:
1059 iadd(fn)
1056 iadd(fn)
1060 else:
1057 else:
1061 uadd(fn)
1058 uadd(fn)
1062 continue
1059 continue
1063
1060
1064 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1061 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1065 # written like that for performance reasons. dmap[fn] is not a
1062 # written like that for performance reasons. dmap[fn] is not a
1066 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1063 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1067 # opcode has fast paths when the value to be unpacked is a tuple or
1064 # opcode has fast paths when the value to be unpacked is a tuple or
1068 # a list, but falls back to creating a full-fledged iterator in
1065 # a list, but falls back to creating a full-fledged iterator in
1069 # general. That is much slower than simply accessing and storing the
1066 # general. That is much slower than simply accessing and storing the
1070 # tuple members one by one.
1067 # tuple members one by one.
1071 t = dget(fn)
1068 t = dget(fn)
1072 state = t[0]
1069 state = t[0]
1073 mode = t[1]
1070 mode = t[1]
1074 size = t[2]
1071 size = t[2]
1075 time = t[3]
1072 time = t[3]
1076
1073
1077 if not st and state in "nma":
1074 if not st and state in "nma":
1078 dadd(fn)
1075 dadd(fn)
1079 elif state == 'n':
1076 elif state == 'n':
1080 if (size >= 0 and
1077 if (size >= 0 and
1081 ((size != st.st_size and size != st.st_size & _rangemask)
1078 ((size != st.st_size and size != st.st_size & _rangemask)
1082 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1079 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1083 or size == -2 # other parent
1080 or size == -2 # other parent
1084 or fn in copymap):
1081 or fn in copymap):
1085 madd(fn)
1082 madd(fn)
1086 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1083 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1087 ladd(fn)
1084 ladd(fn)
1088 elif st.st_mtime == lastnormaltime:
1085 elif st.st_mtime == lastnormaltime:
1089 # fn may have just been marked as normal and it may have
1086 # fn may have just been marked as normal and it may have
1090 # changed in the same second without changing its size.
1087 # changed in the same second without changing its size.
1091 # This can happen if we quickly do multiple commits.
1088 # This can happen if we quickly do multiple commits.
1092 # Force lookup, so we don't miss such a racy file change.
1089 # Force lookup, so we don't miss such a racy file change.
1093 ladd(fn)
1090 ladd(fn)
1094 elif listclean:
1091 elif listclean:
1095 cadd(fn)
1092 cadd(fn)
1096 elif state == 'm':
1093 elif state == 'm':
1097 madd(fn)
1094 madd(fn)
1098 elif state == 'a':
1095 elif state == 'a':
1099 aadd(fn)
1096 aadd(fn)
1100 elif state == 'r':
1097 elif state == 'r':
1101 radd(fn)
1098 radd(fn)
1102
1099
1103 return (lookup, scmutil.status(modified, added, removed, deleted,
1100 return (lookup, scmutil.status(modified, added, removed, deleted,
1104 unknown, ignored, clean))
1101 unknown, ignored, clean))
1105
1102
1106 def matches(self, match):
1103 def matches(self, match):
1107 '''
1104 '''
1108 return files in the dirstate (in whatever state) filtered by match
1105 return files in the dirstate (in whatever state) filtered by match
1109 '''
1106 '''
1110 dmap = self._map
1107 dmap = self._map
1111 if match.always():
1108 if match.always():
1112 return dmap.keys()
1109 return dmap.keys()
1113 files = match.files()
1110 files = match.files()
1114 if match.isexact():
1111 if match.isexact():
1115 # fast path -- filter the other way around, since typically files is
1112 # fast path -- filter the other way around, since typically files is
1116 # much smaller than dmap
1113 # much smaller than dmap
1117 return [f for f in files if f in dmap]
1114 return [f for f in files if f in dmap]
1118 if match.prefix() and all(fn in dmap for fn in files):
1115 if match.prefix() and all(fn in dmap for fn in files):
1119 # fast path -- all the values are known to be files, so just return
1116 # fast path -- all the values are known to be files, so just return
1120 # that
1117 # that
1121 return list(files)
1118 return list(files)
1122 return [f for f in dmap if match(f)]
1119 return [f for f in dmap if match(f)]
1123
1120
1124 def _actualfilename(self, tr):
1121 def _actualfilename(self, tr):
1125 if tr:
1122 if tr:
1126 return self._pendingfilename
1123 return self._pendingfilename
1127 else:
1124 else:
1128 return self._filename
1125 return self._filename
1129
1126
1130 def savebackup(self, tr, backupname):
1127 def savebackup(self, tr, backupname):
1131 '''Save current dirstate into backup file'''
1128 '''Save current dirstate into backup file'''
1132 filename = self._actualfilename(tr)
1129 filename = self._actualfilename(tr)
1133 assert backupname != filename
1130 assert backupname != filename
1134
1131
1135 # use '_writedirstate' instead of 'write' to write changes certainly,
1132 # use '_writedirstate' instead of 'write' to write changes certainly,
1136 # because the latter omits writing out if transaction is running.
1133 # because the latter omits writing out if transaction is running.
1137 # output file will be used to create backup of dirstate at this point.
1134 # output file will be used to create backup of dirstate at this point.
1138 if self._dirty or not self._opener.exists(filename):
1135 if self._dirty or not self._opener.exists(filename):
1139 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1136 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1140 checkambig=True))
1137 checkambig=True))
1141
1138
1142 if tr:
1139 if tr:
1143 # ensure that subsequent tr.writepending returns True for
1140 # ensure that subsequent tr.writepending returns True for
1144 # changes written out above, even if dirstate is never
1141 # changes written out above, even if dirstate is never
1145 # changed after this
1142 # changed after this
1146 tr.addfilegenerator('dirstate', (self._filename,),
1143 tr.addfilegenerator('dirstate', (self._filename,),
1147 self._writedirstate, location='plain')
1144 self._writedirstate, location='plain')
1148
1145
1149 # ensure that pending file written above is unlinked at
1146 # ensure that pending file written above is unlinked at
1150 # failure, even if tr.writepending isn't invoked until the
1147 # failure, even if tr.writepending isn't invoked until the
1151 # end of this transaction
1148 # end of this transaction
1152 tr.registertmp(filename, location='plain')
1149 tr.registertmp(filename, location='plain')
1153
1150
1154 self._opener.tryunlink(backupname)
1151 self._opener.tryunlink(backupname)
1155 # hardlink backup is okay because _writedirstate is always called
1152 # hardlink backup is okay because _writedirstate is always called
1156 # with an "atomictemp=True" file.
1153 # with an "atomictemp=True" file.
1157 util.copyfile(self._opener.join(filename),
1154 util.copyfile(self._opener.join(filename),
1158 self._opener.join(backupname), hardlink=True)
1155 self._opener.join(backupname), hardlink=True)
1159
1156
1160 def restorebackup(self, tr, backupname):
1157 def restorebackup(self, tr, backupname):
1161 '''Restore dirstate by backup file'''
1158 '''Restore dirstate by backup file'''
1162 # this "invalidate()" prevents "wlock.release()" from writing
1159 # this "invalidate()" prevents "wlock.release()" from writing
1163 # changes of dirstate out after restoring from backup file
1160 # changes of dirstate out after restoring from backup file
1164 self.invalidate()
1161 self.invalidate()
1165 filename = self._actualfilename(tr)
1162 filename = self._actualfilename(tr)
1166 o = self._opener
1163 o = self._opener
1167 if util.samefile(o.join(backupname), o.join(filename)):
1164 if util.samefile(o.join(backupname), o.join(filename)):
1168 o.unlink(backupname)
1165 o.unlink(backupname)
1169 else:
1166 else:
1170 o.rename(backupname, filename, checkambig=True)
1167 o.rename(backupname, filename, checkambig=True)
1171
1168
1172 def clearbackup(self, tr, backupname):
1169 def clearbackup(self, tr, backupname):
1173 '''Clear backup file'''
1170 '''Clear backup file'''
1174 self._opener.unlink(backupname)
1171 self._opener.unlink(backupname)
1175
1172
1176 class dirstatemap(object):
1173 class dirstatemap(object):
1177 """Map encapsulating the dirstate's contents.
1174 """Map encapsulating the dirstate's contents.
1178
1175
1179 The dirstate contains the following state:
1176 The dirstate contains the following state:
1180
1177
1181 - `identity` is the identity of the dirstate file, which can be used to
1178 - `identity` is the identity of the dirstate file, which can be used to
1182 detect when changes have occurred to the dirstate file.
1179 detect when changes have occurred to the dirstate file.
1183
1180
1184 - `parents` is a pair containing the parents of the working copy. The
1181 - `parents` is a pair containing the parents of the working copy. The
1185 parents are updated by calling `setparents`.
1182 parents are updated by calling `setparents`.
1186
1183
1187 - the state map maps filenames to tuples of (state, mode, size, mtime),
1184 - the state map maps filenames to tuples of (state, mode, size, mtime),
1188 where state is a single character representing 'normal', 'added',
1185 where state is a single character representing 'normal', 'added',
1189 'removed', or 'merged'. It is read by treating the dirstate as a
1186 'removed', or 'merged'. It is read by treating the dirstate as a
1190 dict. File state is updated by calling the `addfile`, `removefile` and
1187 dict. File state is updated by calling the `addfile`, `removefile` and
1191 `dropfile` methods.
1188 `dropfile` methods.
1192
1189
1193 - `copymap` maps destination filenames to their source filename.
1190 - `copymap` maps destination filenames to their source filename.
1194
1191
1195 The dirstate also provides the following views onto the state:
1192 The dirstate also provides the following views onto the state:
1196
1193
1197 - `nonnormalset` is a set of the filenames that have state other
1194 - `nonnormalset` is a set of the filenames that have state other
1198 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1195 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1199
1196
1200 - `otherparentset` is a set of the filenames that are marked as coming
1197 - `otherparentset` is a set of the filenames that are marked as coming
1201 from the second parent when the dirstate is currently being merged.
1198 from the second parent when the dirstate is currently being merged.
1202
1199
1203 - `dirs` is a set-like object containing all the directories that contain
1200 - `dirs` is a set-like object containing all the directories that contain
1204 files in the dirstate, excluding any files that are marked as removed.
1201 files in the dirstate, excluding any files that are marked as removed.
1205
1202
1206 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1203 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1207 form that they appear as in the dirstate.
1204 form that they appear as in the dirstate.
1208
1205
1209 - `dirfoldmap` is a dict mapping normalized directory names to the
1206 - `dirfoldmap` is a dict mapping normalized directory names to the
1210 denormalized form that they appear as in the dirstate.
1207 denormalized form that they appear as in the dirstate.
1211 """
1208 """
1212
1209
1213 def __init__(self, ui, opener, root):
1210 def __init__(self, ui, opener, root):
1214 self._ui = ui
1211 self._ui = ui
1215 self._opener = opener
1212 self._opener = opener
1216 self._root = root
1213 self._root = root
1217 self._filename = 'dirstate'
1214 self._filename = 'dirstate'
1218
1215
1219 self._parents = None
1216 self._parents = None
1220 self._dirtyparents = False
1217 self._dirtyparents = False
1221
1218
1222 # for consistent view between _pl() and _read() invocations
1219 # for consistent view between _pl() and _read() invocations
1223 self._pendingmode = None
1220 self._pendingmode = None
1224
1221
1225 @propertycache
1222 @propertycache
1226 def _map(self):
1223 def _map(self):
1227 self._map = {}
1224 self._map = {}
1228 self.read()
1225 self.read()
1229 return self._map
1226 return self._map
1230
1227
1231 @propertycache
1228 @propertycache
1232 def copymap(self):
1229 def copymap(self):
1233 self.copymap = {}
1230 self.copymap = {}
1234 self._map
1231 self._map
1235 return self.copymap
1232 return self.copymap
1236
1233
1237 def clear(self):
1234 def clear(self):
1238 self._map.clear()
1235 self._map.clear()
1239 self.copymap.clear()
1236 self.copymap.clear()
1240 self.setparents(nullid, nullid)
1237 self.setparents(nullid, nullid)
1241 util.clearcachedproperty(self, "dirs")
1238 util.clearcachedproperty(self, "dirs")
1242 util.clearcachedproperty(self, "filefoldmap")
1239 util.clearcachedproperty(self, "filefoldmap")
1243 util.clearcachedproperty(self, "dirfoldmap")
1240 util.clearcachedproperty(self, "dirfoldmap")
1244 util.clearcachedproperty(self, "nonnormalset")
1241 util.clearcachedproperty(self, "nonnormalset")
1245 util.clearcachedproperty(self, "otherparentset")
1242 util.clearcachedproperty(self, "otherparentset")
1246
1243
1247 def iteritems(self):
1244 def iteritems(self):
1248 return self._map.iteritems()
1245 return self._map.iteritems()
1249
1246
1250 def __len__(self):
1247 def __len__(self):
1251 return len(self._map)
1248 return len(self._map)
1252
1249
1253 def __iter__(self):
1250 def __iter__(self):
1254 return iter(self._map)
1251 return iter(self._map)
1255
1252
1256 def get(self, key, default=None):
1253 def get(self, key, default=None):
1257 return self._map.get(key, default)
1254 return self._map.get(key, default)
1258
1255
1259 def __contains__(self, key):
1256 def __contains__(self, key):
1260 return key in self._map
1257 return key in self._map
1261
1258
1262 def __getitem__(self, key):
1259 def __getitem__(self, key):
1263 return self._map[key]
1260 return self._map[key]
1264
1261
1265 def keys(self):
1262 def keys(self):
1266 return self._map.keys()
1263 return self._map.keys()
1267
1264
1268 def preload(self):
1265 def preload(self):
1269 """Loads the underlying data, if it's not already loaded"""
1266 """Loads the underlying data, if it's not already loaded"""
1270 self._map
1267 self._map
1271
1268
1272 def addfile(self, f, oldstate, state, mode, size, mtime):
1269 def addfile(self, f, oldstate, state, mode, size, mtime):
1273 """Add a tracked file to the dirstate."""
1270 """Add a tracked file to the dirstate."""
1274 if oldstate in "?r" and "dirs" in self.__dict__:
1271 if oldstate in "?r" and "dirs" in self.__dict__:
1275 self.dirs.addpath(f)
1272 self.dirs.addpath(f)
1276 self._map[f] = dirstatetuple(state, mode, size, mtime)
1273 self._map[f] = dirstatetuple(state, mode, size, mtime)
1277 if state != 'n' or mtime == -1:
1274 if state != 'n' or mtime == -1:
1278 self.nonnormalset.add(f)
1275 self.nonnormalset.add(f)
1279 if size == -2:
1276 if size == -2:
1280 self.otherparentset.add(f)
1277 self.otherparentset.add(f)
1281
1278
1282 def removefile(self, f, oldstate, size):
1279 def removefile(self, f, oldstate, size):
1283 """
1280 """
1284 Mark a file as removed in the dirstate.
1281 Mark a file as removed in the dirstate.
1285
1282
1286 The `size` parameter is used to store sentinel values that indicate
1283 The `size` parameter is used to store sentinel values that indicate
1287 the file's previous state. In the future, we should refactor this
1284 the file's previous state. In the future, we should refactor this
1288 to be more explicit about what that state is.
1285 to be more explicit about what that state is.
1289 """
1286 """
1290 if oldstate not in "?r" and "dirs" in self.__dict__:
1287 if oldstate not in "?r" and "dirs" in self.__dict__:
1291 self.dirs.delpath(f)
1288 self.dirs.delpath(f)
1292 if "filefoldmap" in self.__dict__:
1289 if "filefoldmap" in self.__dict__:
1293 normed = util.normcase(f)
1290 normed = util.normcase(f)
1294 self.filefoldmap.pop(normed, None)
1291 self.filefoldmap.pop(normed, None)
1295 self._map[f] = dirstatetuple('r', 0, size, 0)
1292 self._map[f] = dirstatetuple('r', 0, size, 0)
1296 self.nonnormalset.add(f)
1293 self.nonnormalset.add(f)
1297
1294
1298 def dropfile(self, f, oldstate):
1295 def dropfile(self, f, oldstate):
1299 """
1296 """
1300 Remove a file from the dirstate. Returns True if the file was
1297 Remove a file from the dirstate. Returns True if the file was
1301 previously recorded.
1298 previously recorded.
1302 """
1299 """
1303 exists = self._map.pop(f, None) is not None
1300 exists = self._map.pop(f, None) is not None
1304 if exists:
1301 if exists:
1305 if oldstate != "r" and "dirs" in self.__dict__:
1302 if oldstate != "r" and "dirs" in self.__dict__:
1306 self.dirs.delpath(f)
1303 self.dirs.delpath(f)
1307 if "filefoldmap" in self.__dict__:
1304 if "filefoldmap" in self.__dict__:
1308 normed = util.normcase(f)
1305 normed = util.normcase(f)
1309 self.filefoldmap.pop(normed, None)
1306 self.filefoldmap.pop(normed, None)
1310 self.nonnormalset.discard(f)
1307 self.nonnormalset.discard(f)
1311 return exists
1308 return exists
1312
1309
1313 def clearambiguoustimes(self, files, now):
1310 def clearambiguoustimes(self, files, now):
1314 for f in files:
1311 for f in files:
1315 e = self.get(f)
1312 e = self.get(f)
1316 if e is not None and e[0] == 'n' and e[3] == now:
1313 if e is not None and e[0] == 'n' and e[3] == now:
1317 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1314 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1318 self.nonnormalset.add(f)
1315 self.nonnormalset.add(f)
1319
1316
1320 def nonnormalentries(self):
1317 def nonnormalentries(self):
1321 '''Compute the nonnormal dirstate entries from the dmap'''
1318 '''Compute the nonnormal dirstate entries from the dmap'''
1322 try:
1319 try:
1323 return parsers.nonnormalotherparententries(self._map)
1320 return parsers.nonnormalotherparententries(self._map)
1324 except AttributeError:
1321 except AttributeError:
1325 nonnorm = set()
1322 nonnorm = set()
1326 otherparent = set()
1323 otherparent = set()
1327 for fname, e in self._map.iteritems():
1324 for fname, e in self._map.iteritems():
1328 if e[0] != 'n' or e[3] == -1:
1325 if e[0] != 'n' or e[3] == -1:
1329 nonnorm.add(fname)
1326 nonnorm.add(fname)
1330 if e[0] == 'n' and e[2] == -2:
1327 if e[0] == 'n' and e[2] == -2:
1331 otherparent.add(fname)
1328 otherparent.add(fname)
1332 return nonnorm, otherparent
1329 return nonnorm, otherparent
1333
1330
1334 @propertycache
1331 @propertycache
1335 def filefoldmap(self):
1332 def filefoldmap(self):
1336 """Returns a dictionary mapping normalized case paths to their
1333 """Returns a dictionary mapping normalized case paths to their
1337 non-normalized versions.
1334 non-normalized versions.
1338 """
1335 """
1339 try:
1336 try:
1340 makefilefoldmap = parsers.make_file_foldmap
1337 makefilefoldmap = parsers.make_file_foldmap
1341 except AttributeError:
1338 except AttributeError:
1342 pass
1339 pass
1343 else:
1340 else:
1344 return makefilefoldmap(self._map, util.normcasespec,
1341 return makefilefoldmap(self._map, util.normcasespec,
1345 util.normcasefallback)
1342 util.normcasefallback)
1346
1343
1347 f = {}
1344 f = {}
1348 normcase = util.normcase
1345 normcase = util.normcase
1349 for name, s in self._map.iteritems():
1346 for name, s in self._map.iteritems():
1350 if s[0] != 'r':
1347 if s[0] != 'r':
1351 f[normcase(name)] = name
1348 f[normcase(name)] = name
1352 f['.'] = '.' # prevents useless util.fspath() invocation
1349 f['.'] = '.' # prevents useless util.fspath() invocation
1353 return f
1350 return f
1354
1351
1355 @propertycache
1352 @propertycache
1356 def dirs(self):
1353 def dirs(self):
1357 """Returns a set-like object containing all the directories in the
1354 """Returns a set-like object containing all the directories in the
1358 current dirstate.
1355 current dirstate.
1359 """
1356 """
1360 return util.dirs(self._map, 'r')
1357 return util.dirs(self._map, 'r')
1361
1358
1362 def _opendirstatefile(self):
1359 def _opendirstatefile(self):
1363 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1360 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1364 if self._pendingmode is not None and self._pendingmode != mode:
1361 if self._pendingmode is not None and self._pendingmode != mode:
1365 fp.close()
1362 fp.close()
1366 raise error.Abort(_('working directory state may be '
1363 raise error.Abort(_('working directory state may be '
1367 'changed parallelly'))
1364 'changed parallelly'))
1368 self._pendingmode = mode
1365 self._pendingmode = mode
1369 return fp
1366 return fp
1370
1367
1371 def parents(self):
1368 def parents(self):
1372 if not self._parents:
1369 if not self._parents:
1373 try:
1370 try:
1374 fp = self._opendirstatefile()
1371 fp = self._opendirstatefile()
1375 st = fp.read(40)
1372 st = fp.read(40)
1376 fp.close()
1373 fp.close()
1377 except IOError as err:
1374 except IOError as err:
1378 if err.errno != errno.ENOENT:
1375 if err.errno != errno.ENOENT:
1379 raise
1376 raise
1380 # File doesn't exist, so the current state is empty
1377 # File doesn't exist, so the current state is empty
1381 st = ''
1378 st = ''
1382
1379
1383 l = len(st)
1380 l = len(st)
1384 if l == 40:
1381 if l == 40:
1385 self._parents = st[:20], st[20:40]
1382 self._parents = st[:20], st[20:40]
1386 elif l == 0:
1383 elif l == 0:
1387 self._parents = [nullid, nullid]
1384 self._parents = [nullid, nullid]
1388 else:
1385 else:
1389 raise error.Abort(_('working directory state appears '
1386 raise error.Abort(_('working directory state appears '
1390 'damaged!'))
1387 'damaged!'))
1391
1388
1392 return self._parents
1389 return self._parents
1393
1390
1394 def setparents(self, p1, p2):
1391 def setparents(self, p1, p2):
1395 self._parents = (p1, p2)
1392 self._parents = (p1, p2)
1396 self._dirtyparents = True
1393 self._dirtyparents = True
1397
1394
1398 def read(self):
1395 def read(self):
1399 # ignore HG_PENDING because identity is used only for writing
1396 # ignore HG_PENDING because identity is used only for writing
1400 self.identity = util.filestat.frompath(
1397 self.identity = util.filestat.frompath(
1401 self._opener.join(self._filename))
1398 self._opener.join(self._filename))
1402
1399
1403 try:
1400 try:
1404 fp = self._opendirstatefile()
1401 fp = self._opendirstatefile()
1405 try:
1402 try:
1406 st = fp.read()
1403 st = fp.read()
1407 finally:
1404 finally:
1408 fp.close()
1405 fp.close()
1409 except IOError as err:
1406 except IOError as err:
1410 if err.errno != errno.ENOENT:
1407 if err.errno != errno.ENOENT:
1411 raise
1408 raise
1412 return
1409 return
1413 if not st:
1410 if not st:
1414 return
1411 return
1415
1412
1416 if util.safehasattr(parsers, 'dict_new_presized'):
1413 if util.safehasattr(parsers, 'dict_new_presized'):
1417 # Make an estimate of the number of files in the dirstate based on
1414 # Make an estimate of the number of files in the dirstate based on
1418 # its size. From a linear regression on a set of real-world repos,
1415 # its size. From a linear regression on a set of real-world repos,
1419 # all over 10,000 files, the size of a dirstate entry is 85
1416 # all over 10,000 files, the size of a dirstate entry is 85
1420 # bytes. The cost of resizing is significantly higher than the cost
1417 # bytes. The cost of resizing is significantly higher than the cost
1421 # of filling in a larger presized dict, so subtract 20% from the
1418 # of filling in a larger presized dict, so subtract 20% from the
1422 # size.
1419 # size.
1423 #
1420 #
1424 # This heuristic is imperfect in many ways, so in a future dirstate
1421 # This heuristic is imperfect in many ways, so in a future dirstate
1425 # format update it makes sense to just record the number of entries
1422 # format update it makes sense to just record the number of entries
1426 # on write.
1423 # on write.
1427 self._map = parsers.dict_new_presized(len(st) / 71)
1424 self._map = parsers.dict_new_presized(len(st) / 71)
1428
1425
1429 # Python's garbage collector triggers a GC each time a certain number
1426 # Python's garbage collector triggers a GC each time a certain number
1430 # of container objects (the number being defined by
1427 # of container objects (the number being defined by
1431 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1428 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1432 # for each file in the dirstate. The C version then immediately marks
1429 # for each file in the dirstate. The C version then immediately marks
1433 # them as not to be tracked by the collector. However, this has no
1430 # them as not to be tracked by the collector. However, this has no
1434 # effect on when GCs are triggered, only on what objects the GC looks
1431 # effect on when GCs are triggered, only on what objects the GC looks
1435 # into. This means that O(number of files) GCs are unavoidable.
1432 # into. This means that O(number of files) GCs are unavoidable.
1436 # Depending on when in the process's lifetime the dirstate is parsed,
1433 # Depending on when in the process's lifetime the dirstate is parsed,
1437 # this can get very expensive. As a workaround, disable GC while
1434 # this can get very expensive. As a workaround, disable GC while
1438 # parsing the dirstate.
1435 # parsing the dirstate.
1439 #
1436 #
1440 # (we cannot decorate the function directly since it is in a C module)
1437 # (we cannot decorate the function directly since it is in a C module)
1441 parse_dirstate = util.nogc(parsers.parse_dirstate)
1438 parse_dirstate = util.nogc(parsers.parse_dirstate)
1442 p = parse_dirstate(self._map, self.copymap, st)
1439 p = parse_dirstate(self._map, self.copymap, st)
1443 if not self._dirtyparents:
1440 if not self._dirtyparents:
1444 self.setparents(*p)
1441 self.setparents(*p)
1445
1442
1446 # Avoid excess attribute lookups by fast pathing certain checks
1443 # Avoid excess attribute lookups by fast pathing certain checks
1447 self.__contains__ = self._map.__contains__
1444 self.__contains__ = self._map.__contains__
1448 self.__getitem__ = self._map.__getitem__
1445 self.__getitem__ = self._map.__getitem__
1449 self.get = self._map.get
1446 self.get = self._map.get
1450
1447
1451 def write(self, st, now):
1448 def write(self, st, now):
1452 st.write(parsers.pack_dirstate(self._map, self.copymap,
1449 st.write(parsers.pack_dirstate(self._map, self.copymap,
1453 self.parents(), now))
1450 self.parents(), now))
1454 st.close()
1451 st.close()
1455 self._dirtyparents = False
1452 self._dirtyparents = False
1456 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1453 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1457
1454
1458 @propertycache
1455 @propertycache
1459 def nonnormalset(self):
1456 def nonnormalset(self):
1460 nonnorm, otherparents = self.nonnormalentries()
1457 nonnorm, otherparents = self.nonnormalentries()
1461 self.otherparentset = otherparents
1458 self.otherparentset = otherparents
1462 return nonnorm
1459 return nonnorm
1463
1460
1464 @propertycache
1461 @propertycache
1465 def otherparentset(self):
1462 def otherparentset(self):
1466 nonnorm, otherparents = self.nonnormalentries()
1463 nonnorm, otherparents = self.nonnormalentries()
1467 self.nonnormalset = nonnorm
1464 self.nonnormalset = nonnorm
1468 return otherparents
1465 return otherparents
1469
1466
1470 @propertycache
1467 @propertycache
1471 def identity(self):
1468 def identity(self):
1472 self._map
1469 self._map
1473 return self.identity
1470 return self.identity
1474
1471
1475 @propertycache
1472 @propertycache
1476 def dirfoldmap(self):
1473 def dirfoldmap(self):
1477 f = {}
1474 f = {}
1478 normcase = util.normcase
1475 normcase = util.normcase
1479 for name in self.dirs:
1476 for name in self.dirs:
1480 f[normcase(name)] = name
1477 f[normcase(name)] = name
1481 return f
1478 return f
General Comments 0
You need to be logged in to leave comments. Login now