##// END OF EJS Templates
dirstate: drop deprecated methods (API)...
Matt Harbison -
r35969:265e91da default
parent child Browse files
Show More
@@ -1,1498 +1,1477 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 class dirstate(object):
57 class dirstate(object):
58
58
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
59 def __init__(self, opener, ui, root, validate, sparsematchfn):
60 '''Create a new dirstate object.
60 '''Create a new dirstate object.
61
61
62 opener is an open()-like callable that can be used to open the
62 opener is an open()-like callable that can be used to open the
63 dirstate file; root is the root of the directory tracked by
63 dirstate file; root is the root of the directory tracked by
64 the dirstate.
64 the dirstate.
65 '''
65 '''
66 self._opener = opener
66 self._opener = opener
67 self._validate = validate
67 self._validate = validate
68 self._root = root
68 self._root = root
69 self._sparsematchfn = sparsematchfn
69 self._sparsematchfn = sparsematchfn
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
70 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
71 # UNC path pointing to root share (issue4557)
71 # UNC path pointing to root share (issue4557)
72 self._rootdir = pathutil.normasprefix(root)
72 self._rootdir = pathutil.normasprefix(root)
73 self._dirty = False
73 self._dirty = False
74 self._lastnormaltime = 0
74 self._lastnormaltime = 0
75 self._ui = ui
75 self._ui = ui
76 self._filecache = {}
76 self._filecache = {}
77 self._parentwriters = 0
77 self._parentwriters = 0
78 self._filename = 'dirstate'
78 self._filename = 'dirstate'
79 self._pendingfilename = '%s.pending' % self._filename
79 self._pendingfilename = '%s.pending' % self._filename
80 self._plchangecallbacks = {}
80 self._plchangecallbacks = {}
81 self._origpl = None
81 self._origpl = None
82 self._updatedfiles = set()
82 self._updatedfiles = set()
83 self._mapcls = dirstatemap
83 self._mapcls = dirstatemap
84
84
85 @contextlib.contextmanager
85 @contextlib.contextmanager
86 def parentchange(self):
86 def parentchange(self):
87 '''Context manager for handling dirstate parents.
87 '''Context manager for handling dirstate parents.
88
88
89 If an exception occurs in the scope of the context manager,
89 If an exception occurs in the scope of the context manager,
90 the incoherent dirstate won't be written when wlock is
90 the incoherent dirstate won't be written when wlock is
91 released.
91 released.
92 '''
92 '''
93 self._parentwriters += 1
93 self._parentwriters += 1
94 yield
94 yield
95 # Typically we want the "undo" step of a context manager in a
95 # Typically we want the "undo" step of a context manager in a
96 # finally block so it happens even when an exception
96 # finally block so it happens even when an exception
97 # occurs. In this case, however, we only want to decrement
97 # occurs. In this case, however, we only want to decrement
98 # parentwriters if the code in the with statement exits
98 # parentwriters if the code in the with statement exits
99 # normally, so we don't have a try/finally here on purpose.
99 # normally, so we don't have a try/finally here on purpose.
100 self._parentwriters -= 1
100 self._parentwriters -= 1
101
101
102 def beginparentchange(self):
103 '''Marks the beginning of a set of changes that involve changing
104 the dirstate parents. If there is an exception during this time,
105 the dirstate will not be written when the wlock is released. This
106 prevents writing an incoherent dirstate where the parent doesn't
107 match the contents.
108 '''
109 self._ui.deprecwarn('beginparentchange is obsoleted by the '
110 'parentchange context manager.', '4.3')
111 self._parentwriters += 1
112
113 def endparentchange(self):
114 '''Marks the end of a set of changes that involve changing the
115 dirstate parents. Once all parent changes have been marked done,
116 the wlock will be free to write the dirstate on release.
117 '''
118 self._ui.deprecwarn('endparentchange is obsoleted by the '
119 'parentchange context manager.', '4.3')
120 if self._parentwriters > 0:
121 self._parentwriters -= 1
122
123 def pendingparentchange(self):
102 def pendingparentchange(self):
124 '''Returns true if the dirstate is in the middle of a set of changes
103 '''Returns true if the dirstate is in the middle of a set of changes
125 that modify the dirstate parent.
104 that modify the dirstate parent.
126 '''
105 '''
127 return self._parentwriters > 0
106 return self._parentwriters > 0
128
107
129 @propertycache
108 @propertycache
130 def _map(self):
109 def _map(self):
131 """Return the dirstate contents (see documentation for dirstatemap)."""
110 """Return the dirstate contents (see documentation for dirstatemap)."""
132 self._map = self._mapcls(self._ui, self._opener, self._root)
111 self._map = self._mapcls(self._ui, self._opener, self._root)
133 return self._map
112 return self._map
134
113
135 @property
114 @property
136 def _sparsematcher(self):
115 def _sparsematcher(self):
137 """The matcher for the sparse checkout.
116 """The matcher for the sparse checkout.
138
117
139 The working directory may not include every file from a manifest. The
118 The working directory may not include every file from a manifest. The
140 matcher obtained by this property will match a path if it is to be
119 matcher obtained by this property will match a path if it is to be
141 included in the working directory.
120 included in the working directory.
142 """
121 """
143 # TODO there is potential to cache this property. For now, the matcher
122 # TODO there is potential to cache this property. For now, the matcher
144 # is resolved on every access. (But the called function does use a
123 # is resolved on every access. (But the called function does use a
145 # cache to keep the lookup fast.)
124 # cache to keep the lookup fast.)
146 return self._sparsematchfn()
125 return self._sparsematchfn()
147
126
148 @repocache('branch')
127 @repocache('branch')
149 def _branch(self):
128 def _branch(self):
150 try:
129 try:
151 return self._opener.read("branch").strip() or "default"
130 return self._opener.read("branch").strip() or "default"
152 except IOError as inst:
131 except IOError as inst:
153 if inst.errno != errno.ENOENT:
132 if inst.errno != errno.ENOENT:
154 raise
133 raise
155 return "default"
134 return "default"
156
135
157 @property
136 @property
158 def _pl(self):
137 def _pl(self):
159 return self._map.parents()
138 return self._map.parents()
160
139
161 def hasdir(self, d):
140 def hasdir(self, d):
162 return self._map.hastrackeddir(d)
141 return self._map.hastrackeddir(d)
163
142
164 @rootcache('.hgignore')
143 @rootcache('.hgignore')
165 def _ignore(self):
144 def _ignore(self):
166 files = self._ignorefiles()
145 files = self._ignorefiles()
167 if not files:
146 if not files:
168 return matchmod.never(self._root, '')
147 return matchmod.never(self._root, '')
169
148
170 pats = ['include:%s' % f for f in files]
149 pats = ['include:%s' % f for f in files]
171 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
150 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
172
151
173 @propertycache
152 @propertycache
174 def _slash(self):
153 def _slash(self):
175 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
154 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
176
155
177 @propertycache
156 @propertycache
178 def _checklink(self):
157 def _checklink(self):
179 return util.checklink(self._root)
158 return util.checklink(self._root)
180
159
181 @propertycache
160 @propertycache
182 def _checkexec(self):
161 def _checkexec(self):
183 return util.checkexec(self._root)
162 return util.checkexec(self._root)
184
163
185 @propertycache
164 @propertycache
186 def _checkcase(self):
165 def _checkcase(self):
187 return not util.fscasesensitive(self._join('.hg'))
166 return not util.fscasesensitive(self._join('.hg'))
188
167
189 def _join(self, f):
168 def _join(self, f):
190 # much faster than os.path.join()
169 # much faster than os.path.join()
191 # it's safe because f is always a relative path
170 # it's safe because f is always a relative path
192 return self._rootdir + f
171 return self._rootdir + f
193
172
194 def flagfunc(self, buildfallback):
173 def flagfunc(self, buildfallback):
195 if self._checklink and self._checkexec:
174 if self._checklink and self._checkexec:
196 def f(x):
175 def f(x):
197 try:
176 try:
198 st = os.lstat(self._join(x))
177 st = os.lstat(self._join(x))
199 if util.statislink(st):
178 if util.statislink(st):
200 return 'l'
179 return 'l'
201 if util.statisexec(st):
180 if util.statisexec(st):
202 return 'x'
181 return 'x'
203 except OSError:
182 except OSError:
204 pass
183 pass
205 return ''
184 return ''
206 return f
185 return f
207
186
208 fallback = buildfallback()
187 fallback = buildfallback()
209 if self._checklink:
188 if self._checklink:
210 def f(x):
189 def f(x):
211 if os.path.islink(self._join(x)):
190 if os.path.islink(self._join(x)):
212 return 'l'
191 return 'l'
213 if 'x' in fallback(x):
192 if 'x' in fallback(x):
214 return 'x'
193 return 'x'
215 return ''
194 return ''
216 return f
195 return f
217 if self._checkexec:
196 if self._checkexec:
218 def f(x):
197 def f(x):
219 if 'l' in fallback(x):
198 if 'l' in fallback(x):
220 return 'l'
199 return 'l'
221 if util.isexec(self._join(x)):
200 if util.isexec(self._join(x)):
222 return 'x'
201 return 'x'
223 return ''
202 return ''
224 return f
203 return f
225 else:
204 else:
226 return fallback
205 return fallback
227
206
228 @propertycache
207 @propertycache
229 def _cwd(self):
208 def _cwd(self):
230 # internal config: ui.forcecwd
209 # internal config: ui.forcecwd
231 forcecwd = self._ui.config('ui', 'forcecwd')
210 forcecwd = self._ui.config('ui', 'forcecwd')
232 if forcecwd:
211 if forcecwd:
233 return forcecwd
212 return forcecwd
234 return pycompat.getcwd()
213 return pycompat.getcwd()
235
214
236 def getcwd(self):
215 def getcwd(self):
237 '''Return the path from which a canonical path is calculated.
216 '''Return the path from which a canonical path is calculated.
238
217
239 This path should be used to resolve file patterns or to convert
218 This path should be used to resolve file patterns or to convert
240 canonical paths back to file paths for display. It shouldn't be
219 canonical paths back to file paths for display. It shouldn't be
241 used to get real file paths. Use vfs functions instead.
220 used to get real file paths. Use vfs functions instead.
242 '''
221 '''
243 cwd = self._cwd
222 cwd = self._cwd
244 if cwd == self._root:
223 if cwd == self._root:
245 return ''
224 return ''
246 # self._root ends with a path separator if self._root is '/' or 'C:\'
225 # self._root ends with a path separator if self._root is '/' or 'C:\'
247 rootsep = self._root
226 rootsep = self._root
248 if not util.endswithsep(rootsep):
227 if not util.endswithsep(rootsep):
249 rootsep += pycompat.ossep
228 rootsep += pycompat.ossep
250 if cwd.startswith(rootsep):
229 if cwd.startswith(rootsep):
251 return cwd[len(rootsep):]
230 return cwd[len(rootsep):]
252 else:
231 else:
253 # we're outside the repo. return an absolute path.
232 # we're outside the repo. return an absolute path.
254 return cwd
233 return cwd
255
234
256 def pathto(self, f, cwd=None):
235 def pathto(self, f, cwd=None):
257 if cwd is None:
236 if cwd is None:
258 cwd = self.getcwd()
237 cwd = self.getcwd()
259 path = util.pathto(self._root, cwd, f)
238 path = util.pathto(self._root, cwd, f)
260 if self._slash:
239 if self._slash:
261 return util.pconvert(path)
240 return util.pconvert(path)
262 return path
241 return path
263
242
264 def __getitem__(self, key):
243 def __getitem__(self, key):
265 '''Return the current state of key (a filename) in the dirstate.
244 '''Return the current state of key (a filename) in the dirstate.
266
245
267 States are:
246 States are:
268 n normal
247 n normal
269 m needs merging
248 m needs merging
270 r marked for removal
249 r marked for removal
271 a marked for addition
250 a marked for addition
272 ? not tracked
251 ? not tracked
273 '''
252 '''
274 return self._map.get(key, ("?",))[0]
253 return self._map.get(key, ("?",))[0]
275
254
276 def __contains__(self, key):
255 def __contains__(self, key):
277 return key in self._map
256 return key in self._map
278
257
279 def __iter__(self):
258 def __iter__(self):
280 return iter(sorted(self._map))
259 return iter(sorted(self._map))
281
260
282 def items(self):
261 def items(self):
283 return self._map.iteritems()
262 return self._map.iteritems()
284
263
285 iteritems = items
264 iteritems = items
286
265
287 def parents(self):
266 def parents(self):
288 return [self._validate(p) for p in self._pl]
267 return [self._validate(p) for p in self._pl]
289
268
290 def p1(self):
269 def p1(self):
291 return self._validate(self._pl[0])
270 return self._validate(self._pl[0])
292
271
293 def p2(self):
272 def p2(self):
294 return self._validate(self._pl[1])
273 return self._validate(self._pl[1])
295
274
296 def branch(self):
275 def branch(self):
297 return encoding.tolocal(self._branch)
276 return encoding.tolocal(self._branch)
298
277
299 def setparents(self, p1, p2=nullid):
278 def setparents(self, p1, p2=nullid):
300 """Set dirstate parents to p1 and p2.
279 """Set dirstate parents to p1 and p2.
301
280
302 When moving from two parents to one, 'm' merged entries a
281 When moving from two parents to one, 'm' merged entries a
303 adjusted to normal and previous copy records discarded and
282 adjusted to normal and previous copy records discarded and
304 returned by the call.
283 returned by the call.
305
284
306 See localrepo.setparents()
285 See localrepo.setparents()
307 """
286 """
308 if self._parentwriters == 0:
287 if self._parentwriters == 0:
309 raise ValueError("cannot set dirstate parent without "
288 raise ValueError("cannot set dirstate parent without "
310 "calling dirstate.beginparentchange")
289 "calling dirstate.beginparentchange")
311
290
312 self._dirty = True
291 self._dirty = True
313 oldp2 = self._pl[1]
292 oldp2 = self._pl[1]
314 if self._origpl is None:
293 if self._origpl is None:
315 self._origpl = self._pl
294 self._origpl = self._pl
316 self._map.setparents(p1, p2)
295 self._map.setparents(p1, p2)
317 copies = {}
296 copies = {}
318 if oldp2 != nullid and p2 == nullid:
297 if oldp2 != nullid and p2 == nullid:
319 candidatefiles = self._map.nonnormalset.union(
298 candidatefiles = self._map.nonnormalset.union(
320 self._map.otherparentset)
299 self._map.otherparentset)
321 for f in candidatefiles:
300 for f in candidatefiles:
322 s = self._map.get(f)
301 s = self._map.get(f)
323 if s is None:
302 if s is None:
324 continue
303 continue
325
304
326 # Discard 'm' markers when moving away from a merge state
305 # Discard 'm' markers when moving away from a merge state
327 if s[0] == 'm':
306 if s[0] == 'm':
328 source = self._map.copymap.get(f)
307 source = self._map.copymap.get(f)
329 if source:
308 if source:
330 copies[f] = source
309 copies[f] = source
331 self.normallookup(f)
310 self.normallookup(f)
332 # Also fix up otherparent markers
311 # Also fix up otherparent markers
333 elif s[0] == 'n' and s[2] == -2:
312 elif s[0] == 'n' and s[2] == -2:
334 source = self._map.copymap.get(f)
313 source = self._map.copymap.get(f)
335 if source:
314 if source:
336 copies[f] = source
315 copies[f] = source
337 self.add(f)
316 self.add(f)
338 return copies
317 return copies
339
318
340 def setbranch(self, branch):
319 def setbranch(self, branch):
341 self._branch = encoding.fromlocal(branch)
320 self._branch = encoding.fromlocal(branch)
342 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
321 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
343 try:
322 try:
344 f.write(self._branch + '\n')
323 f.write(self._branch + '\n')
345 f.close()
324 f.close()
346
325
347 # make sure filecache has the correct stat info for _branch after
326 # make sure filecache has the correct stat info for _branch after
348 # replacing the underlying file
327 # replacing the underlying file
349 ce = self._filecache['_branch']
328 ce = self._filecache['_branch']
350 if ce:
329 if ce:
351 ce.refresh()
330 ce.refresh()
352 except: # re-raises
331 except: # re-raises
353 f.discard()
332 f.discard()
354 raise
333 raise
355
334
356 def invalidate(self):
335 def invalidate(self):
357 '''Causes the next access to reread the dirstate.
336 '''Causes the next access to reread the dirstate.
358
337
359 This is different from localrepo.invalidatedirstate() because it always
338 This is different from localrepo.invalidatedirstate() because it always
360 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
339 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
361 check whether the dirstate has changed before rereading it.'''
340 check whether the dirstate has changed before rereading it.'''
362
341
363 for a in (r"_map", r"_branch", r"_ignore"):
342 for a in (r"_map", r"_branch", r"_ignore"):
364 if a in self.__dict__:
343 if a in self.__dict__:
365 delattr(self, a)
344 delattr(self, a)
366 self._lastnormaltime = 0
345 self._lastnormaltime = 0
367 self._dirty = False
346 self._dirty = False
368 self._updatedfiles.clear()
347 self._updatedfiles.clear()
369 self._parentwriters = 0
348 self._parentwriters = 0
370 self._origpl = None
349 self._origpl = None
371
350
372 def copy(self, source, dest):
351 def copy(self, source, dest):
373 """Mark dest as a copy of source. Unmark dest if source is None."""
352 """Mark dest as a copy of source. Unmark dest if source is None."""
374 if source == dest:
353 if source == dest:
375 return
354 return
376 self._dirty = True
355 self._dirty = True
377 if source is not None:
356 if source is not None:
378 self._map.copymap[dest] = source
357 self._map.copymap[dest] = source
379 self._updatedfiles.add(source)
358 self._updatedfiles.add(source)
380 self._updatedfiles.add(dest)
359 self._updatedfiles.add(dest)
381 elif self._map.copymap.pop(dest, None):
360 elif self._map.copymap.pop(dest, None):
382 self._updatedfiles.add(dest)
361 self._updatedfiles.add(dest)
383
362
384 def copied(self, file):
363 def copied(self, file):
385 return self._map.copymap.get(file, None)
364 return self._map.copymap.get(file, None)
386
365
387 def copies(self):
366 def copies(self):
388 return self._map.copymap
367 return self._map.copymap
389
368
390 def _addpath(self, f, state, mode, size, mtime):
369 def _addpath(self, f, state, mode, size, mtime):
391 oldstate = self[f]
370 oldstate = self[f]
392 if state == 'a' or oldstate == 'r':
371 if state == 'a' or oldstate == 'r':
393 scmutil.checkfilename(f)
372 scmutil.checkfilename(f)
394 if self._map.hastrackeddir(f):
373 if self._map.hastrackeddir(f):
395 raise error.Abort(_('directory %r already in dirstate') % f)
374 raise error.Abort(_('directory %r already in dirstate') % f)
396 # shadows
375 # shadows
397 for d in util.finddirs(f):
376 for d in util.finddirs(f):
398 if self._map.hastrackeddir(d):
377 if self._map.hastrackeddir(d):
399 break
378 break
400 entry = self._map.get(d)
379 entry = self._map.get(d)
401 if entry is not None and entry[0] != 'r':
380 if entry is not None and entry[0] != 'r':
402 raise error.Abort(
381 raise error.Abort(
403 _('file %r in dirstate clashes with %r') % (d, f))
382 _('file %r in dirstate clashes with %r') % (d, f))
404 self._dirty = True
383 self._dirty = True
405 self._updatedfiles.add(f)
384 self._updatedfiles.add(f)
406 self._map.addfile(f, oldstate, state, mode, size, mtime)
385 self._map.addfile(f, oldstate, state, mode, size, mtime)
407
386
408 def normal(self, f):
387 def normal(self, f):
409 '''Mark a file normal and clean.'''
388 '''Mark a file normal and clean.'''
410 s = os.lstat(self._join(f))
389 s = os.lstat(self._join(f))
411 mtime = s.st_mtime
390 mtime = s.st_mtime
412 self._addpath(f, 'n', s.st_mode,
391 self._addpath(f, 'n', s.st_mode,
413 s.st_size & _rangemask, mtime & _rangemask)
392 s.st_size & _rangemask, mtime & _rangemask)
414 self._map.copymap.pop(f, None)
393 self._map.copymap.pop(f, None)
415 if f in self._map.nonnormalset:
394 if f in self._map.nonnormalset:
416 self._map.nonnormalset.remove(f)
395 self._map.nonnormalset.remove(f)
417 if mtime > self._lastnormaltime:
396 if mtime > self._lastnormaltime:
418 # Remember the most recent modification timeslot for status(),
397 # Remember the most recent modification timeslot for status(),
419 # to make sure we won't miss future size-preserving file content
398 # to make sure we won't miss future size-preserving file content
420 # modifications that happen within the same timeslot.
399 # modifications that happen within the same timeslot.
421 self._lastnormaltime = mtime
400 self._lastnormaltime = mtime
422
401
423 def normallookup(self, f):
402 def normallookup(self, f):
424 '''Mark a file normal, but possibly dirty.'''
403 '''Mark a file normal, but possibly dirty.'''
425 if self._pl[1] != nullid:
404 if self._pl[1] != nullid:
426 # if there is a merge going on and the file was either
405 # if there is a merge going on and the file was either
427 # in state 'm' (-1) or coming from other parent (-2) before
406 # in state 'm' (-1) or coming from other parent (-2) before
428 # being removed, restore that state.
407 # being removed, restore that state.
429 entry = self._map.get(f)
408 entry = self._map.get(f)
430 if entry is not None:
409 if entry is not None:
431 if entry[0] == 'r' and entry[2] in (-1, -2):
410 if entry[0] == 'r' and entry[2] in (-1, -2):
432 source = self._map.copymap.get(f)
411 source = self._map.copymap.get(f)
433 if entry[2] == -1:
412 if entry[2] == -1:
434 self.merge(f)
413 self.merge(f)
435 elif entry[2] == -2:
414 elif entry[2] == -2:
436 self.otherparent(f)
415 self.otherparent(f)
437 if source:
416 if source:
438 self.copy(source, f)
417 self.copy(source, f)
439 return
418 return
440 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
419 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
441 return
420 return
442 self._addpath(f, 'n', 0, -1, -1)
421 self._addpath(f, 'n', 0, -1, -1)
443 self._map.copymap.pop(f, None)
422 self._map.copymap.pop(f, None)
444
423
445 def otherparent(self, f):
424 def otherparent(self, f):
446 '''Mark as coming from the other parent, always dirty.'''
425 '''Mark as coming from the other parent, always dirty.'''
447 if self._pl[1] == nullid:
426 if self._pl[1] == nullid:
448 raise error.Abort(_("setting %r to other parent "
427 raise error.Abort(_("setting %r to other parent "
449 "only allowed in merges") % f)
428 "only allowed in merges") % f)
450 if f in self and self[f] == 'n':
429 if f in self and self[f] == 'n':
451 # merge-like
430 # merge-like
452 self._addpath(f, 'm', 0, -2, -1)
431 self._addpath(f, 'm', 0, -2, -1)
453 else:
432 else:
454 # add-like
433 # add-like
455 self._addpath(f, 'n', 0, -2, -1)
434 self._addpath(f, 'n', 0, -2, -1)
456 self._map.copymap.pop(f, None)
435 self._map.copymap.pop(f, None)
457
436
458 def add(self, f):
437 def add(self, f):
459 '''Mark a file added.'''
438 '''Mark a file added.'''
460 self._addpath(f, 'a', 0, -1, -1)
439 self._addpath(f, 'a', 0, -1, -1)
461 self._map.copymap.pop(f, None)
440 self._map.copymap.pop(f, None)
462
441
463 def remove(self, f):
442 def remove(self, f):
464 '''Mark a file removed.'''
443 '''Mark a file removed.'''
465 self._dirty = True
444 self._dirty = True
466 oldstate = self[f]
445 oldstate = self[f]
467 size = 0
446 size = 0
468 if self._pl[1] != nullid:
447 if self._pl[1] != nullid:
469 entry = self._map.get(f)
448 entry = self._map.get(f)
470 if entry is not None:
449 if entry is not None:
471 # backup the previous state
450 # backup the previous state
472 if entry[0] == 'm': # merge
451 if entry[0] == 'm': # merge
473 size = -1
452 size = -1
474 elif entry[0] == 'n' and entry[2] == -2: # other parent
453 elif entry[0] == 'n' and entry[2] == -2: # other parent
475 size = -2
454 size = -2
476 self._map.otherparentset.add(f)
455 self._map.otherparentset.add(f)
477 self._updatedfiles.add(f)
456 self._updatedfiles.add(f)
478 self._map.removefile(f, oldstate, size)
457 self._map.removefile(f, oldstate, size)
479 if size == 0:
458 if size == 0:
480 self._map.copymap.pop(f, None)
459 self._map.copymap.pop(f, None)
481
460
482 def merge(self, f):
461 def merge(self, f):
483 '''Mark a file merged.'''
462 '''Mark a file merged.'''
484 if self._pl[1] == nullid:
463 if self._pl[1] == nullid:
485 return self.normallookup(f)
464 return self.normallookup(f)
486 return self.otherparent(f)
465 return self.otherparent(f)
487
466
488 def drop(self, f):
467 def drop(self, f):
489 '''Drop a file from the dirstate'''
468 '''Drop a file from the dirstate'''
490 oldstate = self[f]
469 oldstate = self[f]
491 if self._map.dropfile(f, oldstate):
470 if self._map.dropfile(f, oldstate):
492 self._dirty = True
471 self._dirty = True
493 self._updatedfiles.add(f)
472 self._updatedfiles.add(f)
494 self._map.copymap.pop(f, None)
473 self._map.copymap.pop(f, None)
495
474
496 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
475 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
497 if exists is None:
476 if exists is None:
498 exists = os.path.lexists(os.path.join(self._root, path))
477 exists = os.path.lexists(os.path.join(self._root, path))
499 if not exists:
478 if not exists:
500 # Maybe a path component exists
479 # Maybe a path component exists
501 if not ignoremissing and '/' in path:
480 if not ignoremissing and '/' in path:
502 d, f = path.rsplit('/', 1)
481 d, f = path.rsplit('/', 1)
503 d = self._normalize(d, False, ignoremissing, None)
482 d = self._normalize(d, False, ignoremissing, None)
504 folded = d + "/" + f
483 folded = d + "/" + f
505 else:
484 else:
506 # No path components, preserve original case
485 # No path components, preserve original case
507 folded = path
486 folded = path
508 else:
487 else:
509 # recursively normalize leading directory components
488 # recursively normalize leading directory components
510 # against dirstate
489 # against dirstate
511 if '/' in normed:
490 if '/' in normed:
512 d, f = normed.rsplit('/', 1)
491 d, f = normed.rsplit('/', 1)
513 d = self._normalize(d, False, ignoremissing, True)
492 d = self._normalize(d, False, ignoremissing, True)
514 r = self._root + "/" + d
493 r = self._root + "/" + d
515 folded = d + "/" + util.fspath(f, r)
494 folded = d + "/" + util.fspath(f, r)
516 else:
495 else:
517 folded = util.fspath(normed, self._root)
496 folded = util.fspath(normed, self._root)
518 storemap[normed] = folded
497 storemap[normed] = folded
519
498
520 return folded
499 return folded
521
500
522 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
501 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
523 normed = util.normcase(path)
502 normed = util.normcase(path)
524 folded = self._map.filefoldmap.get(normed, None)
503 folded = self._map.filefoldmap.get(normed, None)
525 if folded is None:
504 if folded is None:
526 if isknown:
505 if isknown:
527 folded = path
506 folded = path
528 else:
507 else:
529 folded = self._discoverpath(path, normed, ignoremissing, exists,
508 folded = self._discoverpath(path, normed, ignoremissing, exists,
530 self._map.filefoldmap)
509 self._map.filefoldmap)
531 return folded
510 return folded
532
511
533 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
512 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
534 normed = util.normcase(path)
513 normed = util.normcase(path)
535 folded = self._map.filefoldmap.get(normed, None)
514 folded = self._map.filefoldmap.get(normed, None)
536 if folded is None:
515 if folded is None:
537 folded = self._map.dirfoldmap.get(normed, None)
516 folded = self._map.dirfoldmap.get(normed, None)
538 if folded is None:
517 if folded is None:
539 if isknown:
518 if isknown:
540 folded = path
519 folded = path
541 else:
520 else:
542 # store discovered result in dirfoldmap so that future
521 # store discovered result in dirfoldmap so that future
543 # normalizefile calls don't start matching directories
522 # normalizefile calls don't start matching directories
544 folded = self._discoverpath(path, normed, ignoremissing, exists,
523 folded = self._discoverpath(path, normed, ignoremissing, exists,
545 self._map.dirfoldmap)
524 self._map.dirfoldmap)
546 return folded
525 return folded
547
526
548 def normalize(self, path, isknown=False, ignoremissing=False):
527 def normalize(self, path, isknown=False, ignoremissing=False):
549 '''
528 '''
550 normalize the case of a pathname when on a casefolding filesystem
529 normalize the case of a pathname when on a casefolding filesystem
551
530
552 isknown specifies whether the filename came from walking the
531 isknown specifies whether the filename came from walking the
553 disk, to avoid extra filesystem access.
532 disk, to avoid extra filesystem access.
554
533
555 If ignoremissing is True, missing path are returned
534 If ignoremissing is True, missing path are returned
556 unchanged. Otherwise, we try harder to normalize possibly
535 unchanged. Otherwise, we try harder to normalize possibly
557 existing path components.
536 existing path components.
558
537
559 The normalized case is determined based on the following precedence:
538 The normalized case is determined based on the following precedence:
560
539
561 - version of name already stored in the dirstate
540 - version of name already stored in the dirstate
562 - version of name stored on disk
541 - version of name stored on disk
563 - version provided via command arguments
542 - version provided via command arguments
564 '''
543 '''
565
544
566 if self._checkcase:
545 if self._checkcase:
567 return self._normalize(path, isknown, ignoremissing)
546 return self._normalize(path, isknown, ignoremissing)
568 return path
547 return path
569
548
570 def clear(self):
549 def clear(self):
571 self._map.clear()
550 self._map.clear()
572 self._lastnormaltime = 0
551 self._lastnormaltime = 0
573 self._updatedfiles.clear()
552 self._updatedfiles.clear()
574 self._dirty = True
553 self._dirty = True
575
554
576 def rebuild(self, parent, allfiles, changedfiles=None):
555 def rebuild(self, parent, allfiles, changedfiles=None):
577 if changedfiles is None:
556 if changedfiles is None:
578 # Rebuild entire dirstate
557 # Rebuild entire dirstate
579 changedfiles = allfiles
558 changedfiles = allfiles
580 lastnormaltime = self._lastnormaltime
559 lastnormaltime = self._lastnormaltime
581 self.clear()
560 self.clear()
582 self._lastnormaltime = lastnormaltime
561 self._lastnormaltime = lastnormaltime
583
562
584 if self._origpl is None:
563 if self._origpl is None:
585 self._origpl = self._pl
564 self._origpl = self._pl
586 self._map.setparents(parent, nullid)
565 self._map.setparents(parent, nullid)
587 for f in changedfiles:
566 for f in changedfiles:
588 if f in allfiles:
567 if f in allfiles:
589 self.normallookup(f)
568 self.normallookup(f)
590 else:
569 else:
591 self.drop(f)
570 self.drop(f)
592
571
593 self._dirty = True
572 self._dirty = True
594
573
595 def identity(self):
574 def identity(self):
596 '''Return identity of dirstate itself to detect changing in storage
575 '''Return identity of dirstate itself to detect changing in storage
597
576
598 If identity of previous dirstate is equal to this, writing
577 If identity of previous dirstate is equal to this, writing
599 changes based on the former dirstate out can keep consistency.
578 changes based on the former dirstate out can keep consistency.
600 '''
579 '''
601 return self._map.identity
580 return self._map.identity
602
581
603 def write(self, tr):
582 def write(self, tr):
604 if not self._dirty:
583 if not self._dirty:
605 return
584 return
606
585
607 filename = self._filename
586 filename = self._filename
608 if tr:
587 if tr:
609 # 'dirstate.write()' is not only for writing in-memory
588 # 'dirstate.write()' is not only for writing in-memory
610 # changes out, but also for dropping ambiguous timestamp.
589 # changes out, but also for dropping ambiguous timestamp.
611 # delayed writing re-raise "ambiguous timestamp issue".
590 # delayed writing re-raise "ambiguous timestamp issue".
612 # See also the wiki page below for detail:
591 # See also the wiki page below for detail:
613 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
592 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
614
593
615 # emulate dropping timestamp in 'parsers.pack_dirstate'
594 # emulate dropping timestamp in 'parsers.pack_dirstate'
616 now = _getfsnow(self._opener)
595 now = _getfsnow(self._opener)
617 self._map.clearambiguoustimes(self._updatedfiles, now)
596 self._map.clearambiguoustimes(self._updatedfiles, now)
618
597
619 # emulate that all 'dirstate.normal' results are written out
598 # emulate that all 'dirstate.normal' results are written out
620 self._lastnormaltime = 0
599 self._lastnormaltime = 0
621 self._updatedfiles.clear()
600 self._updatedfiles.clear()
622
601
623 # delay writing in-memory changes out
602 # delay writing in-memory changes out
624 tr.addfilegenerator('dirstate', (self._filename,),
603 tr.addfilegenerator('dirstate', (self._filename,),
625 self._writedirstate, location='plain')
604 self._writedirstate, location='plain')
626 return
605 return
627
606
628 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
607 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
629 self._writedirstate(st)
608 self._writedirstate(st)
630
609
631 def addparentchangecallback(self, category, callback):
610 def addparentchangecallback(self, category, callback):
632 """add a callback to be called when the wd parents are changed
611 """add a callback to be called when the wd parents are changed
633
612
634 Callback will be called with the following arguments:
613 Callback will be called with the following arguments:
635 dirstate, (oldp1, oldp2), (newp1, newp2)
614 dirstate, (oldp1, oldp2), (newp1, newp2)
636
615
637 Category is a unique identifier to allow overwriting an old callback
616 Category is a unique identifier to allow overwriting an old callback
638 with a newer callback.
617 with a newer callback.
639 """
618 """
640 self._plchangecallbacks[category] = callback
619 self._plchangecallbacks[category] = callback
641
620
642 def _writedirstate(self, st):
621 def _writedirstate(self, st):
643 # notify callbacks about parents change
622 # notify callbacks about parents change
644 if self._origpl is not None and self._origpl != self._pl:
623 if self._origpl is not None and self._origpl != self._pl:
645 for c, callback in sorted(self._plchangecallbacks.iteritems()):
624 for c, callback in sorted(self._plchangecallbacks.iteritems()):
646 callback(self, self._origpl, self._pl)
625 callback(self, self._origpl, self._pl)
647 self._origpl = None
626 self._origpl = None
648 # use the modification time of the newly created temporary file as the
627 # use the modification time of the newly created temporary file as the
649 # filesystem's notion of 'now'
628 # filesystem's notion of 'now'
650 now = util.fstat(st).st_mtime & _rangemask
629 now = util.fstat(st).st_mtime & _rangemask
651
630
652 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
631 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
653 # timestamp of each entries in dirstate, because of 'now > mtime'
632 # timestamp of each entries in dirstate, because of 'now > mtime'
654 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
633 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
655 if delaywrite > 0:
634 if delaywrite > 0:
656 # do we have any files to delay for?
635 # do we have any files to delay for?
657 for f, e in self._map.iteritems():
636 for f, e in self._map.iteritems():
658 if e[0] == 'n' and e[3] == now:
637 if e[0] == 'n' and e[3] == now:
659 import time # to avoid useless import
638 import time # to avoid useless import
660 # rather than sleep n seconds, sleep until the next
639 # rather than sleep n seconds, sleep until the next
661 # multiple of n seconds
640 # multiple of n seconds
662 clock = time.time()
641 clock = time.time()
663 start = int(clock) - (int(clock) % delaywrite)
642 start = int(clock) - (int(clock) % delaywrite)
664 end = start + delaywrite
643 end = start + delaywrite
665 time.sleep(end - clock)
644 time.sleep(end - clock)
666 now = end # trust our estimate that the end is near now
645 now = end # trust our estimate that the end is near now
667 break
646 break
668
647
669 self._map.write(st, now)
648 self._map.write(st, now)
670 self._lastnormaltime = 0
649 self._lastnormaltime = 0
671 self._dirty = False
650 self._dirty = False
672
651
673 def _dirignore(self, f):
652 def _dirignore(self, f):
674 if f == '.':
653 if f == '.':
675 return False
654 return False
676 if self._ignore(f):
655 if self._ignore(f):
677 return True
656 return True
678 for p in util.finddirs(f):
657 for p in util.finddirs(f):
679 if self._ignore(p):
658 if self._ignore(p):
680 return True
659 return True
681 return False
660 return False
682
661
683 def _ignorefiles(self):
662 def _ignorefiles(self):
684 files = []
663 files = []
685 if os.path.exists(self._join('.hgignore')):
664 if os.path.exists(self._join('.hgignore')):
686 files.append(self._join('.hgignore'))
665 files.append(self._join('.hgignore'))
687 for name, path in self._ui.configitems("ui"):
666 for name, path in self._ui.configitems("ui"):
688 if name == 'ignore' or name.startswith('ignore.'):
667 if name == 'ignore' or name.startswith('ignore.'):
689 # we need to use os.path.join here rather than self._join
668 # we need to use os.path.join here rather than self._join
690 # because path is arbitrary and user-specified
669 # because path is arbitrary and user-specified
691 files.append(os.path.join(self._rootdir, util.expandpath(path)))
670 files.append(os.path.join(self._rootdir, util.expandpath(path)))
692 return files
671 return files
693
672
694 def _ignorefileandline(self, f):
673 def _ignorefileandline(self, f):
695 files = collections.deque(self._ignorefiles())
674 files = collections.deque(self._ignorefiles())
696 visited = set()
675 visited = set()
697 while files:
676 while files:
698 i = files.popleft()
677 i = files.popleft()
699 patterns = matchmod.readpatternfile(i, self._ui.warn,
678 patterns = matchmod.readpatternfile(i, self._ui.warn,
700 sourceinfo=True)
679 sourceinfo=True)
701 for pattern, lineno, line in patterns:
680 for pattern, lineno, line in patterns:
702 kind, p = matchmod._patsplit(pattern, 'glob')
681 kind, p = matchmod._patsplit(pattern, 'glob')
703 if kind == "subinclude":
682 if kind == "subinclude":
704 if p not in visited:
683 if p not in visited:
705 files.append(p)
684 files.append(p)
706 continue
685 continue
707 m = matchmod.match(self._root, '', [], [pattern],
686 m = matchmod.match(self._root, '', [], [pattern],
708 warn=self._ui.warn)
687 warn=self._ui.warn)
709 if m(f):
688 if m(f):
710 return (i, lineno, line)
689 return (i, lineno, line)
711 visited.add(i)
690 visited.add(i)
712 return (None, -1, "")
691 return (None, -1, "")
713
692
714 def _walkexplicit(self, match, subrepos):
693 def _walkexplicit(self, match, subrepos):
715 '''Get stat data about the files explicitly specified by match.
694 '''Get stat data about the files explicitly specified by match.
716
695
717 Return a triple (results, dirsfound, dirsnotfound).
696 Return a triple (results, dirsfound, dirsnotfound).
718 - results is a mapping from filename to stat result. It also contains
697 - results is a mapping from filename to stat result. It also contains
719 listings mapping subrepos and .hg to None.
698 listings mapping subrepos and .hg to None.
720 - dirsfound is a list of files found to be directories.
699 - dirsfound is a list of files found to be directories.
721 - dirsnotfound is a list of files that the dirstate thinks are
700 - dirsnotfound is a list of files that the dirstate thinks are
722 directories and that were not found.'''
701 directories and that were not found.'''
723
702
724 def badtype(mode):
703 def badtype(mode):
725 kind = _('unknown')
704 kind = _('unknown')
726 if stat.S_ISCHR(mode):
705 if stat.S_ISCHR(mode):
727 kind = _('character device')
706 kind = _('character device')
728 elif stat.S_ISBLK(mode):
707 elif stat.S_ISBLK(mode):
729 kind = _('block device')
708 kind = _('block device')
730 elif stat.S_ISFIFO(mode):
709 elif stat.S_ISFIFO(mode):
731 kind = _('fifo')
710 kind = _('fifo')
732 elif stat.S_ISSOCK(mode):
711 elif stat.S_ISSOCK(mode):
733 kind = _('socket')
712 kind = _('socket')
734 elif stat.S_ISDIR(mode):
713 elif stat.S_ISDIR(mode):
735 kind = _('directory')
714 kind = _('directory')
736 return _('unsupported file type (type is %s)') % kind
715 return _('unsupported file type (type is %s)') % kind
737
716
738 matchedir = match.explicitdir
717 matchedir = match.explicitdir
739 badfn = match.bad
718 badfn = match.bad
740 dmap = self._map
719 dmap = self._map
741 lstat = os.lstat
720 lstat = os.lstat
742 getkind = stat.S_IFMT
721 getkind = stat.S_IFMT
743 dirkind = stat.S_IFDIR
722 dirkind = stat.S_IFDIR
744 regkind = stat.S_IFREG
723 regkind = stat.S_IFREG
745 lnkkind = stat.S_IFLNK
724 lnkkind = stat.S_IFLNK
746 join = self._join
725 join = self._join
747 dirsfound = []
726 dirsfound = []
748 foundadd = dirsfound.append
727 foundadd = dirsfound.append
749 dirsnotfound = []
728 dirsnotfound = []
750 notfoundadd = dirsnotfound.append
729 notfoundadd = dirsnotfound.append
751
730
752 if not match.isexact() and self._checkcase:
731 if not match.isexact() and self._checkcase:
753 normalize = self._normalize
732 normalize = self._normalize
754 else:
733 else:
755 normalize = None
734 normalize = None
756
735
757 files = sorted(match.files())
736 files = sorted(match.files())
758 subrepos.sort()
737 subrepos.sort()
759 i, j = 0, 0
738 i, j = 0, 0
760 while i < len(files) and j < len(subrepos):
739 while i < len(files) and j < len(subrepos):
761 subpath = subrepos[j] + "/"
740 subpath = subrepos[j] + "/"
762 if files[i] < subpath:
741 if files[i] < subpath:
763 i += 1
742 i += 1
764 continue
743 continue
765 while i < len(files) and files[i].startswith(subpath):
744 while i < len(files) and files[i].startswith(subpath):
766 del files[i]
745 del files[i]
767 j += 1
746 j += 1
768
747
769 if not files or '.' in files:
748 if not files or '.' in files:
770 files = ['.']
749 files = ['.']
771 results = dict.fromkeys(subrepos)
750 results = dict.fromkeys(subrepos)
772 results['.hg'] = None
751 results['.hg'] = None
773
752
774 for ff in files:
753 for ff in files:
775 # constructing the foldmap is expensive, so don't do it for the
754 # constructing the foldmap is expensive, so don't do it for the
776 # common case where files is ['.']
755 # common case where files is ['.']
777 if normalize and ff != '.':
756 if normalize and ff != '.':
778 nf = normalize(ff, False, True)
757 nf = normalize(ff, False, True)
779 else:
758 else:
780 nf = ff
759 nf = ff
781 if nf in results:
760 if nf in results:
782 continue
761 continue
783
762
784 try:
763 try:
785 st = lstat(join(nf))
764 st = lstat(join(nf))
786 kind = getkind(st.st_mode)
765 kind = getkind(st.st_mode)
787 if kind == dirkind:
766 if kind == dirkind:
788 if nf in dmap:
767 if nf in dmap:
789 # file replaced by dir on disk but still in dirstate
768 # file replaced by dir on disk but still in dirstate
790 results[nf] = None
769 results[nf] = None
791 if matchedir:
770 if matchedir:
792 matchedir(nf)
771 matchedir(nf)
793 foundadd((nf, ff))
772 foundadd((nf, ff))
794 elif kind == regkind or kind == lnkkind:
773 elif kind == regkind or kind == lnkkind:
795 results[nf] = st
774 results[nf] = st
796 else:
775 else:
797 badfn(ff, badtype(kind))
776 badfn(ff, badtype(kind))
798 if nf in dmap:
777 if nf in dmap:
799 results[nf] = None
778 results[nf] = None
800 except OSError as inst: # nf not found on disk - it is dirstate only
779 except OSError as inst: # nf not found on disk - it is dirstate only
801 if nf in dmap: # does it exactly match a missing file?
780 if nf in dmap: # does it exactly match a missing file?
802 results[nf] = None
781 results[nf] = None
803 else: # does it match a missing directory?
782 else: # does it match a missing directory?
804 if self._map.hasdir(nf):
783 if self._map.hasdir(nf):
805 if matchedir:
784 if matchedir:
806 matchedir(nf)
785 matchedir(nf)
807 notfoundadd(nf)
786 notfoundadd(nf)
808 else:
787 else:
809 badfn(ff, encoding.strtolocal(inst.strerror))
788 badfn(ff, encoding.strtolocal(inst.strerror))
810
789
811 # Case insensitive filesystems cannot rely on lstat() failing to detect
790 # Case insensitive filesystems cannot rely on lstat() failing to detect
812 # a case-only rename. Prune the stat object for any file that does not
791 # a case-only rename. Prune the stat object for any file that does not
813 # match the case in the filesystem, if there are multiple files that
792 # match the case in the filesystem, if there are multiple files that
814 # normalize to the same path.
793 # normalize to the same path.
815 if match.isexact() and self._checkcase:
794 if match.isexact() and self._checkcase:
816 normed = {}
795 normed = {}
817
796
818 for f, st in results.iteritems():
797 for f, st in results.iteritems():
819 if st is None:
798 if st is None:
820 continue
799 continue
821
800
822 nc = util.normcase(f)
801 nc = util.normcase(f)
823 paths = normed.get(nc)
802 paths = normed.get(nc)
824
803
825 if paths is None:
804 if paths is None:
826 paths = set()
805 paths = set()
827 normed[nc] = paths
806 normed[nc] = paths
828
807
829 paths.add(f)
808 paths.add(f)
830
809
831 for norm, paths in normed.iteritems():
810 for norm, paths in normed.iteritems():
832 if len(paths) > 1:
811 if len(paths) > 1:
833 for path in paths:
812 for path in paths:
834 folded = self._discoverpath(path, norm, True, None,
813 folded = self._discoverpath(path, norm, True, None,
835 self._map.dirfoldmap)
814 self._map.dirfoldmap)
836 if path != folded:
815 if path != folded:
837 results[path] = None
816 results[path] = None
838
817
839 return results, dirsfound, dirsnotfound
818 return results, dirsfound, dirsnotfound
840
819
841 def walk(self, match, subrepos, unknown, ignored, full=True):
820 def walk(self, match, subrepos, unknown, ignored, full=True):
842 '''
821 '''
843 Walk recursively through the directory tree, finding all files
822 Walk recursively through the directory tree, finding all files
844 matched by match.
823 matched by match.
845
824
846 If full is False, maybe skip some known-clean files.
825 If full is False, maybe skip some known-clean files.
847
826
848 Return a dict mapping filename to stat-like object (either
827 Return a dict mapping filename to stat-like object (either
849 mercurial.osutil.stat instance or return value of os.stat()).
828 mercurial.osutil.stat instance or return value of os.stat()).
850
829
851 '''
830 '''
852 # full is a flag that extensions that hook into walk can use -- this
831 # full is a flag that extensions that hook into walk can use -- this
853 # implementation doesn't use it at all. This satisfies the contract
832 # implementation doesn't use it at all. This satisfies the contract
854 # because we only guarantee a "maybe".
833 # because we only guarantee a "maybe".
855
834
856 if ignored:
835 if ignored:
857 ignore = util.never
836 ignore = util.never
858 dirignore = util.never
837 dirignore = util.never
859 elif unknown:
838 elif unknown:
860 ignore = self._ignore
839 ignore = self._ignore
861 dirignore = self._dirignore
840 dirignore = self._dirignore
862 else:
841 else:
863 # if not unknown and not ignored, drop dir recursion and step 2
842 # if not unknown and not ignored, drop dir recursion and step 2
864 ignore = util.always
843 ignore = util.always
865 dirignore = util.always
844 dirignore = util.always
866
845
867 matchfn = match.matchfn
846 matchfn = match.matchfn
868 matchalways = match.always()
847 matchalways = match.always()
869 matchtdir = match.traversedir
848 matchtdir = match.traversedir
870 dmap = self._map
849 dmap = self._map
871 listdir = util.listdir
850 listdir = util.listdir
872 lstat = os.lstat
851 lstat = os.lstat
873 dirkind = stat.S_IFDIR
852 dirkind = stat.S_IFDIR
874 regkind = stat.S_IFREG
853 regkind = stat.S_IFREG
875 lnkkind = stat.S_IFLNK
854 lnkkind = stat.S_IFLNK
876 join = self._join
855 join = self._join
877
856
878 exact = skipstep3 = False
857 exact = skipstep3 = False
879 if match.isexact(): # match.exact
858 if match.isexact(): # match.exact
880 exact = True
859 exact = True
881 dirignore = util.always # skip step 2
860 dirignore = util.always # skip step 2
882 elif match.prefix(): # match.match, no patterns
861 elif match.prefix(): # match.match, no patterns
883 skipstep3 = True
862 skipstep3 = True
884
863
885 if not exact and self._checkcase:
864 if not exact and self._checkcase:
886 normalize = self._normalize
865 normalize = self._normalize
887 normalizefile = self._normalizefile
866 normalizefile = self._normalizefile
888 skipstep3 = False
867 skipstep3 = False
889 else:
868 else:
890 normalize = self._normalize
869 normalize = self._normalize
891 normalizefile = None
870 normalizefile = None
892
871
893 # step 1: find all explicit files
872 # step 1: find all explicit files
894 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
873 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
895
874
896 skipstep3 = skipstep3 and not (work or dirsnotfound)
875 skipstep3 = skipstep3 and not (work or dirsnotfound)
897 work = [d for d in work if not dirignore(d[0])]
876 work = [d for d in work if not dirignore(d[0])]
898
877
899 # step 2: visit subdirectories
878 # step 2: visit subdirectories
900 def traverse(work, alreadynormed):
879 def traverse(work, alreadynormed):
901 wadd = work.append
880 wadd = work.append
902 while work:
881 while work:
903 nd = work.pop()
882 nd = work.pop()
904 if not match.visitdir(nd):
883 if not match.visitdir(nd):
905 continue
884 continue
906 skip = None
885 skip = None
907 if nd == '.':
886 if nd == '.':
908 nd = ''
887 nd = ''
909 else:
888 else:
910 skip = '.hg'
889 skip = '.hg'
911 try:
890 try:
912 entries = listdir(join(nd), stat=True, skip=skip)
891 entries = listdir(join(nd), stat=True, skip=skip)
913 except OSError as inst:
892 except OSError as inst:
914 if inst.errno in (errno.EACCES, errno.ENOENT):
893 if inst.errno in (errno.EACCES, errno.ENOENT):
915 match.bad(self.pathto(nd),
894 match.bad(self.pathto(nd),
916 encoding.strtolocal(inst.strerror))
895 encoding.strtolocal(inst.strerror))
917 continue
896 continue
918 raise
897 raise
919 for f, kind, st in entries:
898 for f, kind, st in entries:
920 if normalizefile:
899 if normalizefile:
921 # even though f might be a directory, we're only
900 # even though f might be a directory, we're only
922 # interested in comparing it to files currently in the
901 # interested in comparing it to files currently in the
923 # dmap -- therefore normalizefile is enough
902 # dmap -- therefore normalizefile is enough
924 nf = normalizefile(nd and (nd + "/" + f) or f, True,
903 nf = normalizefile(nd and (nd + "/" + f) or f, True,
925 True)
904 True)
926 else:
905 else:
927 nf = nd and (nd + "/" + f) or f
906 nf = nd and (nd + "/" + f) or f
928 if nf not in results:
907 if nf not in results:
929 if kind == dirkind:
908 if kind == dirkind:
930 if not ignore(nf):
909 if not ignore(nf):
931 if matchtdir:
910 if matchtdir:
932 matchtdir(nf)
911 matchtdir(nf)
933 wadd(nf)
912 wadd(nf)
934 if nf in dmap and (matchalways or matchfn(nf)):
913 if nf in dmap and (matchalways or matchfn(nf)):
935 results[nf] = None
914 results[nf] = None
936 elif kind == regkind or kind == lnkkind:
915 elif kind == regkind or kind == lnkkind:
937 if nf in dmap:
916 if nf in dmap:
938 if matchalways or matchfn(nf):
917 if matchalways or matchfn(nf):
939 results[nf] = st
918 results[nf] = st
940 elif ((matchalways or matchfn(nf))
919 elif ((matchalways or matchfn(nf))
941 and not ignore(nf)):
920 and not ignore(nf)):
942 # unknown file -- normalize if necessary
921 # unknown file -- normalize if necessary
943 if not alreadynormed:
922 if not alreadynormed:
944 nf = normalize(nf, False, True)
923 nf = normalize(nf, False, True)
945 results[nf] = st
924 results[nf] = st
946 elif nf in dmap and (matchalways or matchfn(nf)):
925 elif nf in dmap and (matchalways or matchfn(nf)):
947 results[nf] = None
926 results[nf] = None
948
927
949 for nd, d in work:
928 for nd, d in work:
950 # alreadynormed means that processwork doesn't have to do any
929 # alreadynormed means that processwork doesn't have to do any
951 # expensive directory normalization
930 # expensive directory normalization
952 alreadynormed = not normalize or nd == d
931 alreadynormed = not normalize or nd == d
953 traverse([d], alreadynormed)
932 traverse([d], alreadynormed)
954
933
955 for s in subrepos:
934 for s in subrepos:
956 del results[s]
935 del results[s]
957 del results['.hg']
936 del results['.hg']
958
937
959 # step 3: visit remaining files from dmap
938 # step 3: visit remaining files from dmap
960 if not skipstep3 and not exact:
939 if not skipstep3 and not exact:
961 # If a dmap file is not in results yet, it was either
940 # If a dmap file is not in results yet, it was either
962 # a) not matching matchfn b) ignored, c) missing, or d) under a
941 # a) not matching matchfn b) ignored, c) missing, or d) under a
963 # symlink directory.
942 # symlink directory.
964 if not results and matchalways:
943 if not results and matchalways:
965 visit = [f for f in dmap]
944 visit = [f for f in dmap]
966 else:
945 else:
967 visit = [f for f in dmap if f not in results and matchfn(f)]
946 visit = [f for f in dmap if f not in results and matchfn(f)]
968 visit.sort()
947 visit.sort()
969
948
970 if unknown:
949 if unknown:
971 # unknown == True means we walked all dirs under the roots
950 # unknown == True means we walked all dirs under the roots
972 # that wasn't ignored, and everything that matched was stat'ed
951 # that wasn't ignored, and everything that matched was stat'ed
973 # and is already in results.
952 # and is already in results.
974 # The rest must thus be ignored or under a symlink.
953 # The rest must thus be ignored or under a symlink.
975 audit_path = pathutil.pathauditor(self._root, cached=True)
954 audit_path = pathutil.pathauditor(self._root, cached=True)
976
955
977 for nf in iter(visit):
956 for nf in iter(visit):
978 # If a stat for the same file was already added with a
957 # If a stat for the same file was already added with a
979 # different case, don't add one for this, since that would
958 # different case, don't add one for this, since that would
980 # make it appear as if the file exists under both names
959 # make it appear as if the file exists under both names
981 # on disk.
960 # on disk.
982 if (normalizefile and
961 if (normalizefile and
983 normalizefile(nf, True, True) in results):
962 normalizefile(nf, True, True) in results):
984 results[nf] = None
963 results[nf] = None
985 # Report ignored items in the dmap as long as they are not
964 # Report ignored items in the dmap as long as they are not
986 # under a symlink directory.
965 # under a symlink directory.
987 elif audit_path.check(nf):
966 elif audit_path.check(nf):
988 try:
967 try:
989 results[nf] = lstat(join(nf))
968 results[nf] = lstat(join(nf))
990 # file was just ignored, no links, and exists
969 # file was just ignored, no links, and exists
991 except OSError:
970 except OSError:
992 # file doesn't exist
971 # file doesn't exist
993 results[nf] = None
972 results[nf] = None
994 else:
973 else:
995 # It's either missing or under a symlink directory
974 # It's either missing or under a symlink directory
996 # which we in this case report as missing
975 # which we in this case report as missing
997 results[nf] = None
976 results[nf] = None
998 else:
977 else:
999 # We may not have walked the full directory tree above,
978 # We may not have walked the full directory tree above,
1000 # so stat and check everything we missed.
979 # so stat and check everything we missed.
1001 iv = iter(visit)
980 iv = iter(visit)
1002 for st in util.statfiles([join(i) for i in visit]):
981 for st in util.statfiles([join(i) for i in visit]):
1003 results[next(iv)] = st
982 results[next(iv)] = st
1004 return results
983 return results
1005
984
1006 def status(self, match, subrepos, ignored, clean, unknown):
985 def status(self, match, subrepos, ignored, clean, unknown):
1007 '''Determine the status of the working copy relative to the
986 '''Determine the status of the working copy relative to the
1008 dirstate and return a pair of (unsure, status), where status is of type
987 dirstate and return a pair of (unsure, status), where status is of type
1009 scmutil.status and:
988 scmutil.status and:
1010
989
1011 unsure:
990 unsure:
1012 files that might have been modified since the dirstate was
991 files that might have been modified since the dirstate was
1013 written, but need to be read to be sure (size is the same
992 written, but need to be read to be sure (size is the same
1014 but mtime differs)
993 but mtime differs)
1015 status.modified:
994 status.modified:
1016 files that have definitely been modified since the dirstate
995 files that have definitely been modified since the dirstate
1017 was written (different size or mode)
996 was written (different size or mode)
1018 status.clean:
997 status.clean:
1019 files that have definitely not been modified since the
998 files that have definitely not been modified since the
1020 dirstate was written
999 dirstate was written
1021 '''
1000 '''
1022 listignored, listclean, listunknown = ignored, clean, unknown
1001 listignored, listclean, listunknown = ignored, clean, unknown
1023 lookup, modified, added, unknown, ignored = [], [], [], [], []
1002 lookup, modified, added, unknown, ignored = [], [], [], [], []
1024 removed, deleted, clean = [], [], []
1003 removed, deleted, clean = [], [], []
1025
1004
1026 dmap = self._map
1005 dmap = self._map
1027 dmap.preload()
1006 dmap.preload()
1028 dcontains = dmap.__contains__
1007 dcontains = dmap.__contains__
1029 dget = dmap.__getitem__
1008 dget = dmap.__getitem__
1030 ladd = lookup.append # aka "unsure"
1009 ladd = lookup.append # aka "unsure"
1031 madd = modified.append
1010 madd = modified.append
1032 aadd = added.append
1011 aadd = added.append
1033 uadd = unknown.append
1012 uadd = unknown.append
1034 iadd = ignored.append
1013 iadd = ignored.append
1035 radd = removed.append
1014 radd = removed.append
1036 dadd = deleted.append
1015 dadd = deleted.append
1037 cadd = clean.append
1016 cadd = clean.append
1038 mexact = match.exact
1017 mexact = match.exact
1039 dirignore = self._dirignore
1018 dirignore = self._dirignore
1040 checkexec = self._checkexec
1019 checkexec = self._checkexec
1041 copymap = self._map.copymap
1020 copymap = self._map.copymap
1042 lastnormaltime = self._lastnormaltime
1021 lastnormaltime = self._lastnormaltime
1043
1022
1044 # We need to do full walks when either
1023 # We need to do full walks when either
1045 # - we're listing all clean files, or
1024 # - we're listing all clean files, or
1046 # - match.traversedir does something, because match.traversedir should
1025 # - match.traversedir does something, because match.traversedir should
1047 # be called for every dir in the working dir
1026 # be called for every dir in the working dir
1048 full = listclean or match.traversedir is not None
1027 full = listclean or match.traversedir is not None
1049 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1028 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1050 full=full).iteritems():
1029 full=full).iteritems():
1051 if not dcontains(fn):
1030 if not dcontains(fn):
1052 if (listignored or mexact(fn)) and dirignore(fn):
1031 if (listignored or mexact(fn)) and dirignore(fn):
1053 if listignored:
1032 if listignored:
1054 iadd(fn)
1033 iadd(fn)
1055 else:
1034 else:
1056 uadd(fn)
1035 uadd(fn)
1057 continue
1036 continue
1058
1037
1059 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1038 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1060 # written like that for performance reasons. dmap[fn] is not a
1039 # written like that for performance reasons. dmap[fn] is not a
1061 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1040 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1062 # opcode has fast paths when the value to be unpacked is a tuple or
1041 # opcode has fast paths when the value to be unpacked is a tuple or
1063 # a list, but falls back to creating a full-fledged iterator in
1042 # a list, but falls back to creating a full-fledged iterator in
1064 # general. That is much slower than simply accessing and storing the
1043 # general. That is much slower than simply accessing and storing the
1065 # tuple members one by one.
1044 # tuple members one by one.
1066 t = dget(fn)
1045 t = dget(fn)
1067 state = t[0]
1046 state = t[0]
1068 mode = t[1]
1047 mode = t[1]
1069 size = t[2]
1048 size = t[2]
1070 time = t[3]
1049 time = t[3]
1071
1050
1072 if not st and state in "nma":
1051 if not st and state in "nma":
1073 dadd(fn)
1052 dadd(fn)
1074 elif state == 'n':
1053 elif state == 'n':
1075 if (size >= 0 and
1054 if (size >= 0 and
1076 ((size != st.st_size and size != st.st_size & _rangemask)
1055 ((size != st.st_size and size != st.st_size & _rangemask)
1077 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1056 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1078 or size == -2 # other parent
1057 or size == -2 # other parent
1079 or fn in copymap):
1058 or fn in copymap):
1080 madd(fn)
1059 madd(fn)
1081 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1060 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1082 ladd(fn)
1061 ladd(fn)
1083 elif st.st_mtime == lastnormaltime:
1062 elif st.st_mtime == lastnormaltime:
1084 # fn may have just been marked as normal and it may have
1063 # fn may have just been marked as normal and it may have
1085 # changed in the same second without changing its size.
1064 # changed in the same second without changing its size.
1086 # This can happen if we quickly do multiple commits.
1065 # This can happen if we quickly do multiple commits.
1087 # Force lookup, so we don't miss such a racy file change.
1066 # Force lookup, so we don't miss such a racy file change.
1088 ladd(fn)
1067 ladd(fn)
1089 elif listclean:
1068 elif listclean:
1090 cadd(fn)
1069 cadd(fn)
1091 elif state == 'm':
1070 elif state == 'm':
1092 madd(fn)
1071 madd(fn)
1093 elif state == 'a':
1072 elif state == 'a':
1094 aadd(fn)
1073 aadd(fn)
1095 elif state == 'r':
1074 elif state == 'r':
1096 radd(fn)
1075 radd(fn)
1097
1076
1098 return (lookup, scmutil.status(modified, added, removed, deleted,
1077 return (lookup, scmutil.status(modified, added, removed, deleted,
1099 unknown, ignored, clean))
1078 unknown, ignored, clean))
1100
1079
1101 def matches(self, match):
1080 def matches(self, match):
1102 '''
1081 '''
1103 return files in the dirstate (in whatever state) filtered by match
1082 return files in the dirstate (in whatever state) filtered by match
1104 '''
1083 '''
1105 dmap = self._map
1084 dmap = self._map
1106 if match.always():
1085 if match.always():
1107 return dmap.keys()
1086 return dmap.keys()
1108 files = match.files()
1087 files = match.files()
1109 if match.isexact():
1088 if match.isexact():
1110 # fast path -- filter the other way around, since typically files is
1089 # fast path -- filter the other way around, since typically files is
1111 # much smaller than dmap
1090 # much smaller than dmap
1112 return [f for f in files if f in dmap]
1091 return [f for f in files if f in dmap]
1113 if match.prefix() and all(fn in dmap for fn in files):
1092 if match.prefix() and all(fn in dmap for fn in files):
1114 # fast path -- all the values are known to be files, so just return
1093 # fast path -- all the values are known to be files, so just return
1115 # that
1094 # that
1116 return list(files)
1095 return list(files)
1117 return [f for f in dmap if match(f)]
1096 return [f for f in dmap if match(f)]
1118
1097
1119 def _actualfilename(self, tr):
1098 def _actualfilename(self, tr):
1120 if tr:
1099 if tr:
1121 return self._pendingfilename
1100 return self._pendingfilename
1122 else:
1101 else:
1123 return self._filename
1102 return self._filename
1124
1103
1125 def savebackup(self, tr, backupname):
1104 def savebackup(self, tr, backupname):
1126 '''Save current dirstate into backup file'''
1105 '''Save current dirstate into backup file'''
1127 filename = self._actualfilename(tr)
1106 filename = self._actualfilename(tr)
1128 assert backupname != filename
1107 assert backupname != filename
1129
1108
1130 # use '_writedirstate' instead of 'write' to write changes certainly,
1109 # use '_writedirstate' instead of 'write' to write changes certainly,
1131 # because the latter omits writing out if transaction is running.
1110 # because the latter omits writing out if transaction is running.
1132 # output file will be used to create backup of dirstate at this point.
1111 # output file will be used to create backup of dirstate at this point.
1133 if self._dirty or not self._opener.exists(filename):
1112 if self._dirty or not self._opener.exists(filename):
1134 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1113 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1135 checkambig=True))
1114 checkambig=True))
1136
1115
1137 if tr:
1116 if tr:
1138 # ensure that subsequent tr.writepending returns True for
1117 # ensure that subsequent tr.writepending returns True for
1139 # changes written out above, even if dirstate is never
1118 # changes written out above, even if dirstate is never
1140 # changed after this
1119 # changed after this
1141 tr.addfilegenerator('dirstate', (self._filename,),
1120 tr.addfilegenerator('dirstate', (self._filename,),
1142 self._writedirstate, location='plain')
1121 self._writedirstate, location='plain')
1143
1122
1144 # ensure that pending file written above is unlinked at
1123 # ensure that pending file written above is unlinked at
1145 # failure, even if tr.writepending isn't invoked until the
1124 # failure, even if tr.writepending isn't invoked until the
1146 # end of this transaction
1125 # end of this transaction
1147 tr.registertmp(filename, location='plain')
1126 tr.registertmp(filename, location='plain')
1148
1127
1149 self._opener.tryunlink(backupname)
1128 self._opener.tryunlink(backupname)
1150 # hardlink backup is okay because _writedirstate is always called
1129 # hardlink backup is okay because _writedirstate is always called
1151 # with an "atomictemp=True" file.
1130 # with an "atomictemp=True" file.
1152 util.copyfile(self._opener.join(filename),
1131 util.copyfile(self._opener.join(filename),
1153 self._opener.join(backupname), hardlink=True)
1132 self._opener.join(backupname), hardlink=True)
1154
1133
1155 def restorebackup(self, tr, backupname):
1134 def restorebackup(self, tr, backupname):
1156 '''Restore dirstate by backup file'''
1135 '''Restore dirstate by backup file'''
1157 # this "invalidate()" prevents "wlock.release()" from writing
1136 # this "invalidate()" prevents "wlock.release()" from writing
1158 # changes of dirstate out after restoring from backup file
1137 # changes of dirstate out after restoring from backup file
1159 self.invalidate()
1138 self.invalidate()
1160 filename = self._actualfilename(tr)
1139 filename = self._actualfilename(tr)
1161 o = self._opener
1140 o = self._opener
1162 if util.samefile(o.join(backupname), o.join(filename)):
1141 if util.samefile(o.join(backupname), o.join(filename)):
1163 o.unlink(backupname)
1142 o.unlink(backupname)
1164 else:
1143 else:
1165 o.rename(backupname, filename, checkambig=True)
1144 o.rename(backupname, filename, checkambig=True)
1166
1145
1167 def clearbackup(self, tr, backupname):
1146 def clearbackup(self, tr, backupname):
1168 '''Clear backup file'''
1147 '''Clear backup file'''
1169 self._opener.unlink(backupname)
1148 self._opener.unlink(backupname)
1170
1149
1171 class dirstatemap(object):
1150 class dirstatemap(object):
1172 """Map encapsulating the dirstate's contents.
1151 """Map encapsulating the dirstate's contents.
1173
1152
1174 The dirstate contains the following state:
1153 The dirstate contains the following state:
1175
1154
1176 - `identity` is the identity of the dirstate file, which can be used to
1155 - `identity` is the identity of the dirstate file, which can be used to
1177 detect when changes have occurred to the dirstate file.
1156 detect when changes have occurred to the dirstate file.
1178
1157
1179 - `parents` is a pair containing the parents of the working copy. The
1158 - `parents` is a pair containing the parents of the working copy. The
1180 parents are updated by calling `setparents`.
1159 parents are updated by calling `setparents`.
1181
1160
1182 - the state map maps filenames to tuples of (state, mode, size, mtime),
1161 - the state map maps filenames to tuples of (state, mode, size, mtime),
1183 where state is a single character representing 'normal', 'added',
1162 where state is a single character representing 'normal', 'added',
1184 'removed', or 'merged'. It is read by treating the dirstate as a
1163 'removed', or 'merged'. It is read by treating the dirstate as a
1185 dict. File state is updated by calling the `addfile`, `removefile` and
1164 dict. File state is updated by calling the `addfile`, `removefile` and
1186 `dropfile` methods.
1165 `dropfile` methods.
1187
1166
1188 - `copymap` maps destination filenames to their source filename.
1167 - `copymap` maps destination filenames to their source filename.
1189
1168
1190 The dirstate also provides the following views onto the state:
1169 The dirstate also provides the following views onto the state:
1191
1170
1192 - `nonnormalset` is a set of the filenames that have state other
1171 - `nonnormalset` is a set of the filenames that have state other
1193 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1172 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1194
1173
1195 - `otherparentset` is a set of the filenames that are marked as coming
1174 - `otherparentset` is a set of the filenames that are marked as coming
1196 from the second parent when the dirstate is currently being merged.
1175 from the second parent when the dirstate is currently being merged.
1197
1176
1198 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1177 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1199 form that they appear as in the dirstate.
1178 form that they appear as in the dirstate.
1200
1179
1201 - `dirfoldmap` is a dict mapping normalized directory names to the
1180 - `dirfoldmap` is a dict mapping normalized directory names to the
1202 denormalized form that they appear as in the dirstate.
1181 denormalized form that they appear as in the dirstate.
1203 """
1182 """
1204
1183
1205 def __init__(self, ui, opener, root):
1184 def __init__(self, ui, opener, root):
1206 self._ui = ui
1185 self._ui = ui
1207 self._opener = opener
1186 self._opener = opener
1208 self._root = root
1187 self._root = root
1209 self._filename = 'dirstate'
1188 self._filename = 'dirstate'
1210
1189
1211 self._parents = None
1190 self._parents = None
1212 self._dirtyparents = False
1191 self._dirtyparents = False
1213
1192
1214 # for consistent view between _pl() and _read() invocations
1193 # for consistent view between _pl() and _read() invocations
1215 self._pendingmode = None
1194 self._pendingmode = None
1216
1195
1217 @propertycache
1196 @propertycache
1218 def _map(self):
1197 def _map(self):
1219 self._map = {}
1198 self._map = {}
1220 self.read()
1199 self.read()
1221 return self._map
1200 return self._map
1222
1201
1223 @propertycache
1202 @propertycache
1224 def copymap(self):
1203 def copymap(self):
1225 self.copymap = {}
1204 self.copymap = {}
1226 self._map
1205 self._map
1227 return self.copymap
1206 return self.copymap
1228
1207
1229 def clear(self):
1208 def clear(self):
1230 self._map.clear()
1209 self._map.clear()
1231 self.copymap.clear()
1210 self.copymap.clear()
1232 self.setparents(nullid, nullid)
1211 self.setparents(nullid, nullid)
1233 util.clearcachedproperty(self, "_dirs")
1212 util.clearcachedproperty(self, "_dirs")
1234 util.clearcachedproperty(self, "_alldirs")
1213 util.clearcachedproperty(self, "_alldirs")
1235 util.clearcachedproperty(self, "filefoldmap")
1214 util.clearcachedproperty(self, "filefoldmap")
1236 util.clearcachedproperty(self, "dirfoldmap")
1215 util.clearcachedproperty(self, "dirfoldmap")
1237 util.clearcachedproperty(self, "nonnormalset")
1216 util.clearcachedproperty(self, "nonnormalset")
1238 util.clearcachedproperty(self, "otherparentset")
1217 util.clearcachedproperty(self, "otherparentset")
1239
1218
1240 def items(self):
1219 def items(self):
1241 return self._map.iteritems()
1220 return self._map.iteritems()
1242
1221
1243 # forward for python2,3 compat
1222 # forward for python2,3 compat
1244 iteritems = items
1223 iteritems = items
1245
1224
1246 def __len__(self):
1225 def __len__(self):
1247 return len(self._map)
1226 return len(self._map)
1248
1227
1249 def __iter__(self):
1228 def __iter__(self):
1250 return iter(self._map)
1229 return iter(self._map)
1251
1230
1252 def get(self, key, default=None):
1231 def get(self, key, default=None):
1253 return self._map.get(key, default)
1232 return self._map.get(key, default)
1254
1233
1255 def __contains__(self, key):
1234 def __contains__(self, key):
1256 return key in self._map
1235 return key in self._map
1257
1236
1258 def __getitem__(self, key):
1237 def __getitem__(self, key):
1259 return self._map[key]
1238 return self._map[key]
1260
1239
1261 def keys(self):
1240 def keys(self):
1262 return self._map.keys()
1241 return self._map.keys()
1263
1242
1264 def preload(self):
1243 def preload(self):
1265 """Loads the underlying data, if it's not already loaded"""
1244 """Loads the underlying data, if it's not already loaded"""
1266 self._map
1245 self._map
1267
1246
1268 def addfile(self, f, oldstate, state, mode, size, mtime):
1247 def addfile(self, f, oldstate, state, mode, size, mtime):
1269 """Add a tracked file to the dirstate."""
1248 """Add a tracked file to the dirstate."""
1270 if oldstate in "?r" and r"_dirs" in self.__dict__:
1249 if oldstate in "?r" and r"_dirs" in self.__dict__:
1271 self._dirs.addpath(f)
1250 self._dirs.addpath(f)
1272 if oldstate == "?" and r"_alldirs" in self.__dict__:
1251 if oldstate == "?" and r"_alldirs" in self.__dict__:
1273 self._alldirs.addpath(f)
1252 self._alldirs.addpath(f)
1274 self._map[f] = dirstatetuple(state, mode, size, mtime)
1253 self._map[f] = dirstatetuple(state, mode, size, mtime)
1275 if state != 'n' or mtime == -1:
1254 if state != 'n' or mtime == -1:
1276 self.nonnormalset.add(f)
1255 self.nonnormalset.add(f)
1277 if size == -2:
1256 if size == -2:
1278 self.otherparentset.add(f)
1257 self.otherparentset.add(f)
1279
1258
1280 def removefile(self, f, oldstate, size):
1259 def removefile(self, f, oldstate, size):
1281 """
1260 """
1282 Mark a file as removed in the dirstate.
1261 Mark a file as removed in the dirstate.
1283
1262
1284 The `size` parameter is used to store sentinel values that indicate
1263 The `size` parameter is used to store sentinel values that indicate
1285 the file's previous state. In the future, we should refactor this
1264 the file's previous state. In the future, we should refactor this
1286 to be more explicit about what that state is.
1265 to be more explicit about what that state is.
1287 """
1266 """
1288 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1267 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1289 self._dirs.delpath(f)
1268 self._dirs.delpath(f)
1290 if oldstate == "?" and r"_alldirs" in self.__dict__:
1269 if oldstate == "?" and r"_alldirs" in self.__dict__:
1291 self._alldirs.addpath(f)
1270 self._alldirs.addpath(f)
1292 if r"filefoldmap" in self.__dict__:
1271 if r"filefoldmap" in self.__dict__:
1293 normed = util.normcase(f)
1272 normed = util.normcase(f)
1294 self.filefoldmap.pop(normed, None)
1273 self.filefoldmap.pop(normed, None)
1295 self._map[f] = dirstatetuple('r', 0, size, 0)
1274 self._map[f] = dirstatetuple('r', 0, size, 0)
1296 self.nonnormalset.add(f)
1275 self.nonnormalset.add(f)
1297
1276
1298 def dropfile(self, f, oldstate):
1277 def dropfile(self, f, oldstate):
1299 """
1278 """
1300 Remove a file from the dirstate. Returns True if the file was
1279 Remove a file from the dirstate. Returns True if the file was
1301 previously recorded.
1280 previously recorded.
1302 """
1281 """
1303 exists = self._map.pop(f, None) is not None
1282 exists = self._map.pop(f, None) is not None
1304 if exists:
1283 if exists:
1305 if oldstate != "r" and r"_dirs" in self.__dict__:
1284 if oldstate != "r" and r"_dirs" in self.__dict__:
1306 self._dirs.delpath(f)
1285 self._dirs.delpath(f)
1307 if r"_alldirs" in self.__dict__:
1286 if r"_alldirs" in self.__dict__:
1308 self._alldirs.delpath(f)
1287 self._alldirs.delpath(f)
1309 if r"filefoldmap" in self.__dict__:
1288 if r"filefoldmap" in self.__dict__:
1310 normed = util.normcase(f)
1289 normed = util.normcase(f)
1311 self.filefoldmap.pop(normed, None)
1290 self.filefoldmap.pop(normed, None)
1312 self.nonnormalset.discard(f)
1291 self.nonnormalset.discard(f)
1313 return exists
1292 return exists
1314
1293
1315 def clearambiguoustimes(self, files, now):
1294 def clearambiguoustimes(self, files, now):
1316 for f in files:
1295 for f in files:
1317 e = self.get(f)
1296 e = self.get(f)
1318 if e is not None and e[0] == 'n' and e[3] == now:
1297 if e is not None and e[0] == 'n' and e[3] == now:
1319 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1298 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1320 self.nonnormalset.add(f)
1299 self.nonnormalset.add(f)
1321
1300
1322 def nonnormalentries(self):
1301 def nonnormalentries(self):
1323 '''Compute the nonnormal dirstate entries from the dmap'''
1302 '''Compute the nonnormal dirstate entries from the dmap'''
1324 try:
1303 try:
1325 return parsers.nonnormalotherparententries(self._map)
1304 return parsers.nonnormalotherparententries(self._map)
1326 except AttributeError:
1305 except AttributeError:
1327 nonnorm = set()
1306 nonnorm = set()
1328 otherparent = set()
1307 otherparent = set()
1329 for fname, e in self._map.iteritems():
1308 for fname, e in self._map.iteritems():
1330 if e[0] != 'n' or e[3] == -1:
1309 if e[0] != 'n' or e[3] == -1:
1331 nonnorm.add(fname)
1310 nonnorm.add(fname)
1332 if e[0] == 'n' and e[2] == -2:
1311 if e[0] == 'n' and e[2] == -2:
1333 otherparent.add(fname)
1312 otherparent.add(fname)
1334 return nonnorm, otherparent
1313 return nonnorm, otherparent
1335
1314
1336 @propertycache
1315 @propertycache
1337 def filefoldmap(self):
1316 def filefoldmap(self):
1338 """Returns a dictionary mapping normalized case paths to their
1317 """Returns a dictionary mapping normalized case paths to their
1339 non-normalized versions.
1318 non-normalized versions.
1340 """
1319 """
1341 try:
1320 try:
1342 makefilefoldmap = parsers.make_file_foldmap
1321 makefilefoldmap = parsers.make_file_foldmap
1343 except AttributeError:
1322 except AttributeError:
1344 pass
1323 pass
1345 else:
1324 else:
1346 return makefilefoldmap(self._map, util.normcasespec,
1325 return makefilefoldmap(self._map, util.normcasespec,
1347 util.normcasefallback)
1326 util.normcasefallback)
1348
1327
1349 f = {}
1328 f = {}
1350 normcase = util.normcase
1329 normcase = util.normcase
1351 for name, s in self._map.iteritems():
1330 for name, s in self._map.iteritems():
1352 if s[0] != 'r':
1331 if s[0] != 'r':
1353 f[normcase(name)] = name
1332 f[normcase(name)] = name
1354 f['.'] = '.' # prevents useless util.fspath() invocation
1333 f['.'] = '.' # prevents useless util.fspath() invocation
1355 return f
1334 return f
1356
1335
1357 def hastrackeddir(self, d):
1336 def hastrackeddir(self, d):
1358 """
1337 """
1359 Returns True if the dirstate contains a tracked (not removed) file
1338 Returns True if the dirstate contains a tracked (not removed) file
1360 in this directory.
1339 in this directory.
1361 """
1340 """
1362 return d in self._dirs
1341 return d in self._dirs
1363
1342
1364 def hasdir(self, d):
1343 def hasdir(self, d):
1365 """
1344 """
1366 Returns True if the dirstate contains a file (tracked or removed)
1345 Returns True if the dirstate contains a file (tracked or removed)
1367 in this directory.
1346 in this directory.
1368 """
1347 """
1369 return d in self._alldirs
1348 return d in self._alldirs
1370
1349
1371 @propertycache
1350 @propertycache
1372 def _dirs(self):
1351 def _dirs(self):
1373 return util.dirs(self._map, 'r')
1352 return util.dirs(self._map, 'r')
1374
1353
1375 @propertycache
1354 @propertycache
1376 def _alldirs(self):
1355 def _alldirs(self):
1377 return util.dirs(self._map)
1356 return util.dirs(self._map)
1378
1357
1379 def _opendirstatefile(self):
1358 def _opendirstatefile(self):
1380 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1359 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1381 if self._pendingmode is not None and self._pendingmode != mode:
1360 if self._pendingmode is not None and self._pendingmode != mode:
1382 fp.close()
1361 fp.close()
1383 raise error.Abort(_('working directory state may be '
1362 raise error.Abort(_('working directory state may be '
1384 'changed parallelly'))
1363 'changed parallelly'))
1385 self._pendingmode = mode
1364 self._pendingmode = mode
1386 return fp
1365 return fp
1387
1366
1388 def parents(self):
1367 def parents(self):
1389 if not self._parents:
1368 if not self._parents:
1390 try:
1369 try:
1391 fp = self._opendirstatefile()
1370 fp = self._opendirstatefile()
1392 st = fp.read(40)
1371 st = fp.read(40)
1393 fp.close()
1372 fp.close()
1394 except IOError as err:
1373 except IOError as err:
1395 if err.errno != errno.ENOENT:
1374 if err.errno != errno.ENOENT:
1396 raise
1375 raise
1397 # File doesn't exist, so the current state is empty
1376 # File doesn't exist, so the current state is empty
1398 st = ''
1377 st = ''
1399
1378
1400 l = len(st)
1379 l = len(st)
1401 if l == 40:
1380 if l == 40:
1402 self._parents = st[:20], st[20:40]
1381 self._parents = st[:20], st[20:40]
1403 elif l == 0:
1382 elif l == 0:
1404 self._parents = [nullid, nullid]
1383 self._parents = [nullid, nullid]
1405 else:
1384 else:
1406 raise error.Abort(_('working directory state appears '
1385 raise error.Abort(_('working directory state appears '
1407 'damaged!'))
1386 'damaged!'))
1408
1387
1409 return self._parents
1388 return self._parents
1410
1389
1411 def setparents(self, p1, p2):
1390 def setparents(self, p1, p2):
1412 self._parents = (p1, p2)
1391 self._parents = (p1, p2)
1413 self._dirtyparents = True
1392 self._dirtyparents = True
1414
1393
1415 def read(self):
1394 def read(self):
1416 # ignore HG_PENDING because identity is used only for writing
1395 # ignore HG_PENDING because identity is used only for writing
1417 self.identity = util.filestat.frompath(
1396 self.identity = util.filestat.frompath(
1418 self._opener.join(self._filename))
1397 self._opener.join(self._filename))
1419
1398
1420 try:
1399 try:
1421 fp = self._opendirstatefile()
1400 fp = self._opendirstatefile()
1422 try:
1401 try:
1423 st = fp.read()
1402 st = fp.read()
1424 finally:
1403 finally:
1425 fp.close()
1404 fp.close()
1426 except IOError as err:
1405 except IOError as err:
1427 if err.errno != errno.ENOENT:
1406 if err.errno != errno.ENOENT:
1428 raise
1407 raise
1429 return
1408 return
1430 if not st:
1409 if not st:
1431 return
1410 return
1432
1411
1433 if util.safehasattr(parsers, 'dict_new_presized'):
1412 if util.safehasattr(parsers, 'dict_new_presized'):
1434 # Make an estimate of the number of files in the dirstate based on
1413 # Make an estimate of the number of files in the dirstate based on
1435 # its size. From a linear regression on a set of real-world repos,
1414 # its size. From a linear regression on a set of real-world repos,
1436 # all over 10,000 files, the size of a dirstate entry is 85
1415 # all over 10,000 files, the size of a dirstate entry is 85
1437 # bytes. The cost of resizing is significantly higher than the cost
1416 # bytes. The cost of resizing is significantly higher than the cost
1438 # of filling in a larger presized dict, so subtract 20% from the
1417 # of filling in a larger presized dict, so subtract 20% from the
1439 # size.
1418 # size.
1440 #
1419 #
1441 # This heuristic is imperfect in many ways, so in a future dirstate
1420 # This heuristic is imperfect in many ways, so in a future dirstate
1442 # format update it makes sense to just record the number of entries
1421 # format update it makes sense to just record the number of entries
1443 # on write.
1422 # on write.
1444 self._map = parsers.dict_new_presized(len(st) / 71)
1423 self._map = parsers.dict_new_presized(len(st) / 71)
1445
1424
1446 # Python's garbage collector triggers a GC each time a certain number
1425 # Python's garbage collector triggers a GC each time a certain number
1447 # of container objects (the number being defined by
1426 # of container objects (the number being defined by
1448 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1427 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1449 # for each file in the dirstate. The C version then immediately marks
1428 # for each file in the dirstate. The C version then immediately marks
1450 # them as not to be tracked by the collector. However, this has no
1429 # them as not to be tracked by the collector. However, this has no
1451 # effect on when GCs are triggered, only on what objects the GC looks
1430 # effect on when GCs are triggered, only on what objects the GC looks
1452 # into. This means that O(number of files) GCs are unavoidable.
1431 # into. This means that O(number of files) GCs are unavoidable.
1453 # Depending on when in the process's lifetime the dirstate is parsed,
1432 # Depending on when in the process's lifetime the dirstate is parsed,
1454 # this can get very expensive. As a workaround, disable GC while
1433 # this can get very expensive. As a workaround, disable GC while
1455 # parsing the dirstate.
1434 # parsing the dirstate.
1456 #
1435 #
1457 # (we cannot decorate the function directly since it is in a C module)
1436 # (we cannot decorate the function directly since it is in a C module)
1458 parse_dirstate = util.nogc(parsers.parse_dirstate)
1437 parse_dirstate = util.nogc(parsers.parse_dirstate)
1459 p = parse_dirstate(self._map, self.copymap, st)
1438 p = parse_dirstate(self._map, self.copymap, st)
1460 if not self._dirtyparents:
1439 if not self._dirtyparents:
1461 self.setparents(*p)
1440 self.setparents(*p)
1462
1441
1463 # Avoid excess attribute lookups by fast pathing certain checks
1442 # Avoid excess attribute lookups by fast pathing certain checks
1464 self.__contains__ = self._map.__contains__
1443 self.__contains__ = self._map.__contains__
1465 self.__getitem__ = self._map.__getitem__
1444 self.__getitem__ = self._map.__getitem__
1466 self.get = self._map.get
1445 self.get = self._map.get
1467
1446
1468 def write(self, st, now):
1447 def write(self, st, now):
1469 st.write(parsers.pack_dirstate(self._map, self.copymap,
1448 st.write(parsers.pack_dirstate(self._map, self.copymap,
1470 self.parents(), now))
1449 self.parents(), now))
1471 st.close()
1450 st.close()
1472 self._dirtyparents = False
1451 self._dirtyparents = False
1473 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1452 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1474
1453
1475 @propertycache
1454 @propertycache
1476 def nonnormalset(self):
1455 def nonnormalset(self):
1477 nonnorm, otherparents = self.nonnormalentries()
1456 nonnorm, otherparents = self.nonnormalentries()
1478 self.otherparentset = otherparents
1457 self.otherparentset = otherparents
1479 return nonnorm
1458 return nonnorm
1480
1459
1481 @propertycache
1460 @propertycache
1482 def otherparentset(self):
1461 def otherparentset(self):
1483 nonnorm, otherparents = self.nonnormalentries()
1462 nonnorm, otherparents = self.nonnormalentries()
1484 self.nonnormalset = nonnorm
1463 self.nonnormalset = nonnorm
1485 return otherparents
1464 return otherparents
1486
1465
1487 @propertycache
1466 @propertycache
1488 def identity(self):
1467 def identity(self):
1489 self._map
1468 self._map
1490 return self.identity
1469 return self.identity
1491
1470
1492 @propertycache
1471 @propertycache
1493 def dirfoldmap(self):
1472 def dirfoldmap(self):
1494 f = {}
1473 f = {}
1495 normcase = util.normcase
1474 normcase = util.normcase
1496 for name in self._dirs:
1475 for name in self._dirs:
1497 f[normcase(name)] = name
1476 f[normcase(name)] = name
1498 return f
1477 return f
General Comments 0
You need to be logged in to leave comments. Login now